/******/ (() => { // webpackBootstrap /******/ var __webpack_modules__ = ({ /***/ 24318: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(37484)); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(48625)); const cacheHttpClient = __importStar(__nccwpck_require__(9809)); const tar_1 = __nccwpck_require__(81739); class ValidationError extends Error { constructor(message) { super(message); this.name = 'ValidationError'; Object.setPrototypeOf(this, ValidationError.prototype); } } exports.ValidationError = ValidationError; class ReserveCacheError extends Error { constructor(message) { super(message); this.name = 'ReserveCacheError'; Object.setPrototypeOf(this, ReserveCacheError.prototype); } } exports.ReserveCacheError = ReserveCacheError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } } function checkKey(key) { if (key.length > 512) { throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } const regex = /^[^,]*$/; if (!regex.test(key)) { throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } } /** * isFeatureAvailable to check the presence of Actions cache service * * @returns boolean return true if Actions cache service feature is available, otherwise false */ function isFeatureAvailable() { return !!process.env['ACTIONS_CACHE_URL']; } exports.isFeatureAvailable = isFeatureAvailable; /** * Restores cache from keys * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { var _a; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; core.debug('Resolved Keys:'); core.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } for (const key of keys) { checkKey(key); } const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod, enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return cacheEntry.cacheKey; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry const beforeDownload = Date.now(); yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); const downloadTimeMs = Date.now() - beforeDownload; if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); const beforeExtract = Date.now(); yield (0, tar_1.extractTar)(archivePath, compressionMethod); const extractTimeMs = Date.now() - beforeExtract; core.info('Cache restored successfully'); yield cacheHttpClient.reportCacheRestore({ cacheKey: cacheEntry.cacheKey, cacheVersion: cacheEntry.cacheVersion, scope: cacheEntry.scope, size: archiveFileSize, downloadConcurrency: (_a = options === null || options === void 0 ? void 0 : options.downloadConcurrency) !== null && _a !== void 0 ? _a : 8, downloadTimeMs, extractTimeMs }); return cacheEntry.cacheKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return undefined; }); } exports.restoreCache = restoreCache; /** * Saves a list of files with the specified key * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e, _f; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { const beforeArchive = Date.now(); yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); const archiveTimeMs = Date.now() - beforeArchive; if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 4.99 * 1024 * 1024 * 1024; // 4.99 per cache limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); // For GHES, this check will take place in ReserveCache API with enterprise file size limit if (archiveFileSize > fileSizeLimit && !utils.isGhes()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } core.debug('Reserving Cache'); const version = cacheHttpClient.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, version, { compressionMethod, enableCrossOsArchive, cacheSize: archiveFileSize, uploadConcurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency }); let uploadId; let urls; if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.uploadId) { uploadId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.uploadId; urls = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _c === void 0 ? void 0 : _c.urls; } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { throw new Error((_e = (_d = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _d === void 0 ? void 0 : _d.message) !== null && _e !== void 0 ? _e : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_f = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _f === void 0 ? void 0 : _f.message}`); } core.debug(`Saving Cache (ID: ${uploadId})`); yield cacheHttpClient.saveCache(key, version, uploadId, urls, archivePath, archiveTimeMs, options); } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else if (typedError.name === ReserveCacheError.name) { core.info(`Failed to save: ${typedError.message}`); } else { core.warning(`Failed to save: ${typedError.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } //Return a 0 for competibility return 0; }); } exports.saveCache = saveCache; /** * Delete a list of caches with the specified keys * @param keys a list of keys for deleting the cache */ function deleteCache(keys) { return __awaiter(this, void 0, void 0, function* () { core.debug('Deleting Cache'); core.debug(`Cache Keys: ${keys}`); try { yield cacheHttpClient.deleteCache(keys); } catch (error) { core.warning(`Failed to delete: ${error.message}`); } }); } exports.deleteCache = deleteCache; //# sourceMappingURL=cache.js.map /***/ }), /***/ 9809: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.reportCacheRestore = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const auth_1 = __nccwpck_require__(44552); const crypto = __importStar(__nccwpck_require__(76982)); const fs = __importStar(__nccwpck_require__(79896)); const url_1 = __nccwpck_require__(87016); const utils = __importStar(__nccwpck_require__(48625)); const downloadUtils_1 = __nccwpck_require__(7557); const options_1 = __nccwpck_require__(10390); const requestUtils_1 = __nccwpck_require__(76668); const version_1 = __nccwpck_require__(54308); const versionSalt = '1.0'; function getCacheApiUrl(resource) { const baseUrl = process.env['BUILDJET_CACHE_URL'] || 'https://cache-api.buildjet.com/'; if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.'); } const url = `${baseUrl}_apis/artifactcache/${resource}`; core.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { const requestOptions = { headers: { Accept: createAcceptHeader('application/json', '6.0-preview.1'), 'Action-Cache-Url': process.env['ACTIONS_CACHE_URL'] || '', 'Github-Repository': process.env['GITHUB_REPOSITORY'] || '', 'Github-Repository-Id': process.env['GITHUB_REPOSITORY_ID'] || '', 'Github-Repository-Owner': process.env['GITHUB_REPOSITORY_OWNER'] || '', 'Github-Repository-Owner-Id': process.env['GITHUB_REPOSITORY_OWNER_ID'] || '' } }; return requestOptions; } function createHttpClient() { const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; const actionRepository = process.env['GITHUB_ACTION_REPOSITORY'] || ''; const runnerArch = process.env['RUNNER_ARCH'] || ''; const runnerOs = process.env['RUNNER_OS'] || ''; const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient(`BuildJet-cache/${version_1.LIB_VERSION} (${runnerOs}; ${runnerArch}) ${actionRepository}`, [bearerCredentialHandler], getRequestOptions()); } function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { const components = paths; // Add compression method to cache version to restore // compressed cache as per compression method if (compressionMethod) { components.push(compressionMethod); } // Only check for windows platforms if enableCrossOsArchive is false if (process.platform === 'win32' && !enableCrossOsArchive) { components.push('windows-only'); } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto .createHash('sha256') .update(components.join('|')) .digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; } if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); return cacheResult; }); } exports.getCacheEntry = getCacheEntry; function printCachesListForDiagnostics(key, httpClient, version) { return __awaiter(this, void 0, void 0, function* () { const resource = `caches?key=${encodeURIComponent(key)}`; const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 200) { const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } }); } function downloadCache(archiveLocation, archivePath, options) { var _a; return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); if (process.env['PARALLEL_DOWNLOAD'] == 'false') { //Use parallel download as default unless diasbled yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } else { yield (0, downloadUtils_1.downloadCachMultiConnection)(archiveLocation, archivePath, (_a = options === null || options === void 0 ? void 0 : options.downloadConcurrency) !== null && _a !== void 0 ? _a : 8); } }); } exports.downloadCache = downloadCache; function reportCacheRestore(report) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const response = yield (0, requestUtils_1.retryTypedResponse)('cacheRestoreReport', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cacheRestoreReport'), report); })); if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } }); } exports.reportCacheRestore = reportCacheRestore; // Reserve Cache function reserveCache(key, version, options) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const reserveCacheRequest = { key, version, cacheSize: options.cacheSize, chunks: (_a = options.uploadConcurrency) !== null && _a !== void 0 ? _a : 4 }; const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); return response; }); } exports.reserveCache = reserveCache; function getContentRange(start, end) { // Format: `bytes start-end/filesize // start and end are inclusive // filesize can be * // For a 200 byte chunk starting at byte 0: // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Length': end - start + 1 }; const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { return httpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders); })); core.debug(JSON.stringify(uploadChunkResponse.message.headers)); core.debug(JSON.stringify(yield uploadChunkResponse.readBody())); if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } return uploadChunkResponse.message.headers.etag; }); } function uploadFile(httpClient, urls, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { // Upload Chunks const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const fd = fs.openSync(archivePath, 'r'); core.debug('Awaiting all uploads'); let offset = 0; const maxChunkSize = Math.ceil(fileSize / urls.length); try { const eTags = yield Promise.all(urls.map((url) => __awaiter(this, void 0, void 0, function* () { while (offset < fileSize) { const chunkSize = Math.min(fileSize - offset, maxChunkSize); const start = offset; const end = offset + chunkSize - 1; offset += chunkSize; return yield uploadChunk(httpClient, url, () => fs .createReadStream(archivePath, { fd, start, end, autoClose: false }) .on('error', error => { throw new Error(`Cache upload failed because file read failed with ${error.message}`); }), start, end); } }))); return eTags; } finally { fs.closeSync(fd); } }); } function saveCache(key, version, uploadId, urls, archivePath, archiveTimeMs, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); core.debug('Upload cache'); const uploadHttpClient = new http_client_1.HttpClient('actions/cache'); const beforeUpload = Date.now(); const eTags = yield uploadFile(uploadHttpClient, urls, archivePath, options); const uploadTimeMs = Date.now() - beforeUpload; // Commit Cache core.debug('Commiting cache'); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); let i = 1; const parts = eTags.map((eTag) => { const part = { partNumber: i++, eTag }; return part; }); const commitCacheRequest = { key, version, uploadId, cacheSize, parts, uploadTimeMs, archiveTimeMs }; const commitCacheResponse = yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`commitCache`), commitCacheRequest); })); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info('Cache saved successfully'); }); } exports.saveCache = saveCache; function deleteCache(keys) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}`; const response = yield (0, requestUtils_1.retryHttpClientResponse)('deleteCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.del(getCacheApiUrl(resource)); })); if (!(0, requestUtils_1.isSuccessStatusCode)(response.message.statusCode)) { throw new Error(`Cache service responded with ${response.message.statusCode}`); } }); } exports.deleteCache = deleteCache; //# sourceMappingURL=cacheHttpClient.js.map /***/ }), /***/ 48625: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core = __importStar(__nccwpck_require__(37484)); const exec = __importStar(__nccwpck_require__(95236)); const glob = __importStar(__nccwpck_require__(16938)); const io = __importStar(__nccwpck_require__(94994)); const fs = __importStar(__nccwpck_require__(79896)); const path = __importStar(__nccwpck_require__(16928)); const semver = __importStar(__nccwpck_require__(39318)); const util = __importStar(__nccwpck_require__(39023)); const uuid_1 = __nccwpck_require__(87723); const constants_1 = __nccwpck_require__(82957); // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 function createTempDirectory() { return __awaiter(this, void 0, void 0, function* () { const IS_WINDOWS = process.platform === 'win32'; let tempDirectory = process.env['RUNNER_TEMP'] || ''; if (!tempDirectory) { let baseLocation; if (IS_WINDOWS) { // On Windows use the USERPROFILE env variable baseLocation = process.env['USERPROFILE'] || 'C:\\'; } else { if (process.platform === 'darwin') { baseLocation = '/Users'; } else { baseLocation = '/home'; } } tempDirectory = path.join(baseLocation, 'actions', 'temp'); } const dest = path.join(tempDirectory, (0, uuid_1.v4)()); yield io.mkdirP(dest); return dest; }); } exports.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { return fs.statSync(filePath).size; } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { var e_1, _a; var _b; return __awaiter(this, void 0, void 0, function* () { const paths = []; const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { const file = _d.value; const relativeFile = path .relative(workspace, file) .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); core.debug(`Matched: ${relativeFile}`); // Paths are made relative so the tar entries are all relative to the root of the workspace. if (relativeFile === '') { // path.relative returns empty string if workspace and file are equal paths.push('.'); } else { paths.push(`${relativeFile}`); } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); } finally { if (e_1) throw e_1.error; } } return paths; }); } exports.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter(this, void 0, void 0, function* () { return util.promisify(fs.unlink)(filePath); }); } exports.unlinkFile = unlinkFile; function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { let versionOutput = ''; additionalArgs.push('--version'); core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { stdout: (data) => (versionOutput += data.toString()), stderr: (data) => (versionOutput += data.toString()) } }); } catch (err) { core.debug(err.message); } versionOutput = versionOutput.trim(); core.debug(versionOutput); return versionOutput; }); } // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); core.debug(`zstd version: ${version}`); if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } else { return constants_1.CompressionMethod.ZstdWithoutLong; } }); } exports.getCompressionMethod = getCompressionMethod; function getCacheFileName(compressionMethod) { return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion('tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); } return value; } exports.assertDefined = assertDefined; function isGhes() { const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; } exports.isGhes = isGhes; //# sourceMappingURL=cacheUtils.js.map /***/ }), /***/ 82957: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Zstd"] = "cache.tzst"; })(CacheFilename = exports.CacheFilename || (exports.CacheFilename = {})); var CompressionMethod; (function (CompressionMethod) { CompressionMethod["Gzip"] = "gzip"; // Long range mode was added to zstd in v1.3.2. // This enum is for earlier version of zstd that does not have --long support CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); var ArchiveToolType; (function (ArchiveToolType) { ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["BSD"] = "bsd"; })(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. exports.DefaultRetryDelay = 5000; // Socket timeout in milliseconds during download. If no traffic is received // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; // The default path of GNUtar on hosted Windows runners exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; // The default path of BSDtar on hosted Windows runners exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; exports.TarFilename = 'cache.tar'; exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), /***/ 7557: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.downloadCacheHttpClient = exports.downloadCachMultiConnection = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const fs = __importStar(__nccwpck_require__(79896)); const fsPromises = __importStar(__nccwpck_require__(91943)); const stream = __importStar(__nccwpck_require__(2203)); const util = __importStar(__nccwpck_require__(39023)); const utils = __importStar(__nccwpck_require__(48625)); const constants_1 = __nccwpck_require__(82957); const requestUtils_1 = __nccwpck_require__(76668); /** * Pipes the body of a HTTP response to a stream * * @param response the HTTP response * @param output the writable stream */ function pipeResponseToStream(response, output, progress) { return __awaiter(this, void 0, void 0, function* () { const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, new stream.Transform({ transform(chunk, encoding, callback) { if (progress) { progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length); } this.push(chunk); callback(); } }), output); }); } /** * Class for tracking the download state and displaying stats. */ class DownloadProgress { constructor(contentLength) { this.contentLength = contentLength; this.segmentIndex = 0; this.segmentSize = 0; this.segmentOffset = 0; this.receivedBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Progress to the next segment. Only call this method when the previous segment * is complete. * * @param segmentSize the length of the next segment */ nextSegment(segmentSize) { this.segmentOffset = this.segmentOffset + this.segmentSize; this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. * * @param receivedBytes the number of bytes received */ setReceivedBytes(receivedBytes) { this.receivedBytes = receivedBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.segmentOffset + this.receivedBytes; } /** * Returns true if the download is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current download stats. Once the download completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) { return; } const transferredBytes = this.segmentOffset + this.receivedBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1000)).toFixed(1); core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1000) { const displayCallback = () => { this.display(); if (!this.isDone()) { this.timeoutHandle = setTimeout(displayCallback, delayInMs); } }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the download * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = undefined; } this.display(); } } exports.DownloadProgress = DownloadProgress; /** * Download the cache using the Actions toolkit http-client with multiple connections * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved * @param connections number of connections to use */ function downloadCachMultiConnection(archiveLocation, archivePath, connections) { return __awaiter(this, void 0, void 0, function* () { let fileHandle; let downloadProgress; try { fileHandle = yield fsPromises.open(archivePath, 'w+'); const httpClient = new http_client_1.HttpClient('actions/cache'); //Request 1 byte to get total content size const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { Range: 'bytes=0-1' }); })); const contentRange = metadataResponse.message.headers['content-range']; if (!contentRange) { console.log(yield metadataResponse.readBody()); throw new Error("Range request not supported by server"); } const match = contentRange === null || contentRange === void 0 ? void 0 : contentRange.match(/bytes \d+-\d+\/(\d+)/); if (!match) { throw new Error("Content-Range header in server response not in correct format"); } const totalLength = parseInt(match[1]); yield fileHandle.truncate(totalLength); yield fileHandle.sync(); downloadProgress = new DownloadProgress(totalLength); downloadProgress.startDisplayTimer(); const segmentSize = Math.ceil(totalLength / connections); const promises = []; for (let i = 0; i < connections; i++) { promises.push((() => __awaiter(this, void 0, void 0, function* () { const rangeStart = i * segmentSize; const rangeEnd = Math.min((i + 1) * segmentSize - 1, totalLength - 1); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { Range: `bytes=${rangeStart}-${rangeEnd}` }); })); const writeStream = fs.createWriteStream(archiveLocation, { fd: fileHandle.fd, autoClose: false, start: rangeStart }); yield pipeResponseToStream(downloadResponse, writeStream, downloadProgress); }))()); } yield Promise.all(promises); } finally { downloadProgress === null || downloadProgress === void 0 ? void 0 : downloadProgress.stopDisplayTimer(); yield (fileHandle === null || fileHandle === void 0 ? void 0 : fileHandle.close()); } }); } exports.downloadCachMultiConnection = downloadCachMultiConnection; /** * Download the cache using the Actions toolkit http-client * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient('actions/cache'); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); const contentLengthHeader = downloadResponse.message.headers['content-length']; let downloadProgress; if (contentLengthHeader) { downloadProgress = new DownloadProgress(parseInt(contentLengthHeader)); } // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); try { downloadProgress === null || downloadProgress === void 0 ? void 0 : downloadProgress.startDisplayTimer(); yield pipeResponseToStream(downloadResponse, writeStream, downloadProgress); } finally { downloadProgress === null || downloadProgress === void 0 ? void 0 : downloadProgress.startDisplayTimer(); } // Validate download size. if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); const actualLength = utils.getArchiveFileSizeInBytes(archivePath); if (actualLength !== expectedLength) { throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { core.debug('Unable to validate download, no Content-Length header'); } }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { let timeoutHandle; const timeoutPromise = new Promise(resolve => { timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); }); return Promise.race([promise, timeoutPromise]).then(result => { clearTimeout(timeoutHandle); return result; }); }); //# sourceMappingURL=downloadUtils.js.map /***/ }), /***/ 76668: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const constants_1 = __nccwpck_require__(82957); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; } return statusCode >= 200 && statusCode < 300; } exports.isSuccessStatusCode = isSuccessStatusCode; function isServerErrorStatusCode(statusCode) { if (!statusCode) { return true; } return statusCode >= 500; } exports.isServerErrorStatusCode = isServerErrorStatusCode; function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; } const retryableStatusCodes = [ http_client_1.HttpCodes.BadGateway, http_client_1.HttpCodes.ServiceUnavailable, http_client_1.HttpCodes.GatewayTimeout ]; return retryableStatusCodes.includes(statusCode); } exports.isRetryableStatusCode = isRetryableStatusCode; function sleep(milliseconds) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => setTimeout(resolve, milliseconds)); }); } function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { return __awaiter(this, void 0, void 0, function* () { let errorMessage = ''; let attempt = 1; while (attempt <= maxAttempts) { let response = undefined; let statusCode = undefined; let isRetryable = false; try { response = yield method(); } catch (error) { if (onError) { response = onError(error); } isRetryable = true; errorMessage = error.message; } if (response) { statusCode = getStatusCode(response); if (!isServerErrorStatusCode(statusCode)) { return response; } } if (statusCode) { isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { core.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); attempt++; } throw Error(`${name} failed: ${errorMessage}`); }); } exports.retry = retry; function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, // If the error object contains the statusCode property, extract it and return // an TypedResponse so it can be processed by the retry logic. (error) => { if (error instanceof http_client_1.HttpClientError) { return { statusCode: error.statusCode, result: null, headers: {}, error }; } else { return undefined; } }); }); } exports.retryTypedResponse = retryTypedResponse; function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); }); } exports.retryHttpClientResponse = retryHttpClientResponse; //# sourceMappingURL=requestUtils.js.map /***/ }), /***/ 81739: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTar = exports.extractTar = exports.listTar = void 0; const exec_1 = __nccwpck_require__(95236); const io = __importStar(__nccwpck_require__(94994)); const fs_1 = __nccwpck_require__(79896); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(48625)); const constants_1 = __nccwpck_require__(82957); const IS_WINDOWS = process.platform === 'win32'; // Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { const gnuTar = yield utils.getGnuTarPathOnWindows(); const systemTar = constants_1.SystemTarPathOnWindows; if (gnuTar) { // Use GNUtar as default on windows return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if ((0, fs_1.existsSync)(systemTar)) { return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } case 'darwin': { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else { return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.BSD }; } } default: break; } // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU }; }); } // Return arguments for tar as per tarPath, compressionMethod, method type and os function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { const args = [`"${tarPath.path}"`]; const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; // Method specific args switch (type) { case 'create': args.push('--posix', '-cf', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); break; case 'extract': args.push('-xf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); break; case 'list': args.push('-tf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); break; } // Platform specific args if (tarPath.type === constants_1.ArchiveToolType.GNU) { switch (process.platform) { case 'win32': args.push('--force-local'); break; case 'darwin': args.push('--delay-directory-restore'); break; } } return args; }); } // Returns commands to run tar and compression program function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } if (BSD_TAR_ZSTD) { return args; } return [args.join(' ')]; }); } function getWorkingDirectory() { var _a; return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method function getDecompressionProgram(tarPath, compressionMethod, archivePath) { return __awaiter(this, void 0, void 0, function* () { // -d: Decompress. // unzstd is equivalent to 'zstd -d' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -d --long=30 --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -d --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: return ['-z']; } }); } // Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. function getCompressionProgram(tarPath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const cacheFileName = utils.getCacheFileName(compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -T0 --long=30 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -T0 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; default: return ['-z']; } }); } // Executes all commands as separate processes function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { for (const command of commands) { try { yield (0, exec_1.exec)(command, undefined, { cwd, env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) }); } catch (error) { throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } } }); } // List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const commands = yield getCommands(compressionMethod, 'list', archivePath); yield execCommands(commands); }); } exports.listTar = listTar; // Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); const commands = yield getCommands(compressionMethod, 'extract', archivePath); yield execCommands(commands); }); } exports.extractTar = extractTar; // Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); const commands = yield getCommands(compressionMethod, 'create'); yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; //# sourceMappingURL=tar.js.map /***/ }), /***/ 54308: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LIB_VERSION = void 0; exports.LIB_VERSION = "0.2.0"; //# sourceMappingURL=version.js.map /***/ }), /***/ 10390: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadOptions = exports.getUploadOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy of the upload options with defaults filled in. * * @param copy the original upload options */ function getUploadOptions(copy) { const result = { uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 }; if (copy) { if (typeof copy.uploadConcurrency === 'number') { result.uploadConcurrency = copy.uploadConcurrency; } if (typeof copy.uploadChunkSize === 'number') { result.uploadChunkSize = copy.uploadChunkSize; } } core.debug(`Upload concurrency: ${result.uploadConcurrency}`); core.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports.getUploadOptions = getUploadOptions; /** * Returns a copy of the download options with defaults filled in. * * @param copy the original download options */ function getDownloadOptions(copy) { const result = { useAzureSdk: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } if (typeof copy.timeoutInMs === 'number') { result.timeoutInMs = copy.timeoutInMs; } if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } if (typeof copy.lookupOnly === 'boolean') { result.lookupOnly = copy.lookupOnly; } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; } core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; //# sourceMappingURL=options.js.map /***/ }), /***/ 16938: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.create = void 0; const internal_globber_1 = __nccwpck_require__(70915); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } exports.create = create; //# sourceMappingURL=glob.js.map /***/ }), /***/ 35200: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy with defaults filled in. */ function getOptions(copy) { const result = { followSymbolicLinks: true, implicitDescendants: true, omitBrokenSymbolicLinks: true }; if (copy) { if (typeof copy.followSymbolicLinks === 'boolean') { result.followSymbolicLinks = copy.followSymbolicLinks; core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === 'boolean') { result.implicitDescendants = copy.implicitDescendants; core.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; } exports.getOptions = getOptions; //# sourceMappingURL=internal-glob-options-helper.js.map /***/ }), /***/ 70915: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume("next", value); } function reject(value) { resume("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultGlobber = void 0; const core = __importStar(__nccwpck_require__(37484)); const fs = __importStar(__nccwpck_require__(79896)); const globOptionsHelper = __importStar(__nccwpck_require__(35200)); const path = __importStar(__nccwpck_require__(16928)); const patternHelper = __importStar(__nccwpck_require__(78703)); const internal_match_kind_1 = __nccwpck_require__(91104); const internal_pattern_1 = __nccwpck_require__(9158); const internal_search_state_1 = __nccwpck_require__(91054); const IS_WINDOWS = process.platform === 'win32'; class DefaultGlobber { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper.getOptions(options); } getSearchPaths() { // Return a copy return this.searchPaths.slice(); } glob() { var e_1, _a; return __awaiter(this, void 0, void 0, function* () { const result = []; try { for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { const itemPath = _c.value; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator(this, arguments, function* globGenerator_1() { // Fill in defaults options const options = globOptionsHelper.getOptions(this.options); // Implicit descendants? const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== '**')) { patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); } } // Push the search paths const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { core.debug(`Search path '${searchPath}'`); // Exists? try { // Intentionally using lstat. Detection for broken symlink // will be performed later (if following symlinks). yield __await(fs.promises.lstat(searchPath)); } catch (err) { if (err.code === 'ENOENT') { continue; } throw err; } stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } // Search const traversalChain = []; // used to detect cycles while (stack.length) { // Pop const item = stack.pop(); // Match? const match = patternHelper.match(patterns, item.path); const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); if (!match && !partialMatch) { continue; } // Stat const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); // Broken symlink, or symlink cycle detected, or no longer exists if (!stats) { continue; } // Directory if (stats.isDirectory()) { // Matched if (match & internal_match_kind_1.MatchKind.Directory) { yield yield __await(item.path); } // Descend? else if (!partialMatch) { continue; } // Push the child items in reverse const childLevel = item.level + 1; const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } // File else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await(item.path); } } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { const result = new DefaultGlobber(options); if (IS_WINDOWS) { patterns = patterns.replace(/\r\n/g, '\n'); patterns = patterns.replace(/\r/g, '\n'); } const lines = patterns.split('\n').map(x => x.trim()); for (const line of lines) { // Empty or comment if (!line || line.startsWith('#')) { continue; } // Pattern else { result.patterns.push(new internal_pattern_1.Pattern(line)); } } result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { // Note: // `stat` returns info about the target of a symlink (or symlink chain) // `lstat` returns info about a symlink itself let stats; if (options.followSymbolicLinks) { try { // Use `stat` (following symlinks) stats = yield fs.promises.stat(item.path); } catch (err) { if (err.code === 'ENOENT') { if (options.omitBrokenSymbolicLinks) { core.debug(`Broken symlink '${item.path}'`); return undefined; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } } else { // Use `lstat` (not following symlinks) stats = yield fs.promises.lstat(item.path); } // Note, isDirectory() returns false for the lstat of a symlink if (stats.isDirectory() && options.followSymbolicLinks) { // Get the realpath const realPath = yield fs.promises.realpath(item.path); // Fixup the traversal chain to match the item level while (traversalChain.length >= item.level) { traversalChain.pop(); } // Test for a cycle if (traversalChain.some((x) => x === realPath)) { core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return undefined; } // Update the traversal chain traversalChain.push(realPath); } return stats; }); } } exports.DefaultGlobber = DefaultGlobber; //# sourceMappingURL=internal-globber.js.map /***/ }), /***/ 91104: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind; (function (MatchKind) { /** Not matched */ MatchKind[MatchKind["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind[MatchKind["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind[MatchKind["File"] = 2] = "File"; /** Matched */ MatchKind[MatchKind["All"] = 3] = "All"; })(MatchKind = exports.MatchKind || (exports.MatchKind = {})); //# sourceMappingURL=internal-match-kind.js.map /***/ }), /***/ 9054: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path = __importStar(__nccwpck_require__(16928)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname(p) { // Normalize slashes and trim unnecessary trailing slash p = safeTrimTrailingSeparator(p); // Windows UNC root, e.g. \\hello or \\hello\world if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } // Get dirname let result = path.dirname(p); // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } return result; } exports.dirname = dirname; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot(root, itemPath) { assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); // Already rooted if (hasAbsoluteRoot(itemPath)) { return itemPath; } // Windows if (IS_WINDOWS) { // Check for itemPath like C: or C:foo if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); // Drive letter matches cwd? Expand to cwd if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { // Drive only, e.g. C: if (itemPath.length === 2) { // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}`; } // Drive + path, e.g. C:foo else { if (!cwd.endsWith('\\')) { cwd += '\\'; } // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } } // Different drive else { return `${itemPath[0]}:\\${itemPath.substr(2)}`; } } // Check for itemPath like \ or \foo else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); // Otherwise ensure root ends with a separator if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { // Intentionally empty } else { // Append separator root += path.sep; } return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot(itemPath) { assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \\hello\share or C:\hello return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasAbsoluteRoot = hasAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot(itemPath) { assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \ or \hello or \\hello // E.g. C: or C:\hello return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasRoot = hasRoot; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators(p) { p = p || ''; // Windows if (IS_WINDOWS) { // Convert slashes on Windows p = p.replace(/\//g, '\\'); // Remove redundant slashes const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC } // Remove redundant slashes return p.replace(/\/\/+/g, '/'); } exports.normalizeSeparators = normalizeSeparators; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator(p) { // Short-circuit if empty if (!p) { return ''; } // Normalize separators p = normalizeSeparators(p); // No trailing slash if (!p.endsWith(path.sep)) { return p; } // Check '/' on Linux/macOS and '\' on Windows if (p === path.sep) { return p; } // On Windows check if drive root. E.g. C:\ if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { return p; } // Otherwise trim trailing slash return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; //# sourceMappingURL=internal-path-helper.js.map /***/ }), /***/ 50581: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Path = void 0; const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(9054)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments */ class Path { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; // String if (typeof itemPath === 'string') { assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // Not rooted if (!pathHelper.hasRoot(itemPath)) { this.segments = itemPath.split(path.sep); } // Rooted else { // Add all segments, while not at the root let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { // Add the segment const basename = path.basename(remaining); this.segments.unshift(basename); // Truncate the last segment remaining = dir; dir = pathHelper.dirname(remaining); } // Remainder is the root this.segments.unshift(remaining); } } // Array else { // Must not be empty assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); // Each segment for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; // Must not be empty assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); // Normalize slashes segment = pathHelper.normalizeSeparators(itemPath[i]); // Root segment if (i === 0 && pathHelper.hasRoot(segment)) { segment = pathHelper.safeTrimTrailingSeparator(segment); assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } // All other segments else { // Must not contain slash assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { // First segment let result = this.segments[0]; // All others let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { result += path.sep; } result += this.segments[i]; } return result; } } exports.Path = Path; //# sourceMappingURL=internal-path.js.map /***/ }), /***/ 78703: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper = __importStar(__nccwpck_require__(9054)); const internal_match_kind_1 = __nccwpck_require__(91104); const IS_WINDOWS = process.platform === 'win32'; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths(patterns) { // Ignore negate patterns patterns = patterns.filter(x => !x.negate); // Create a map of all search paths const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = 'candidate'; } const result = []; for (const pattern of patterns) { // Check if already included const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === 'included') { continue; } // Check for an ancestor search path let foundAncestor = false; let tempKey = key; let parent = pathHelper.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper.dirname(tempKey); } // Include the search pattern in the result if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = 'included'; } } return result; } exports.getSearchPaths = getSearchPaths; /** * Matches the patterns against the path */ function match(patterns, itemPath) { let result = internal_match_kind_1.MatchKind.None; for (const pattern of patterns) { if (pattern.negate) { result &= ~pattern.match(itemPath); } else { result |= pattern.match(itemPath); } } return result; } exports.match = match; /** * Checks whether to descend further into the directory */ function partialMatch(patterns, itemPath) { return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch; //# sourceMappingURL=internal-pattern-helper.js.map /***/ }), /***/ 9158: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Pattern = void 0; const os = __importStar(__nccwpck_require__(70857)); const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(9054)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const minimatch_1 = __nccwpck_require__(43772); const internal_match_kind_1 = __nccwpck_require__(91104); const internal_path_1 = __nccwpck_require__(50581); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; // Pattern overload let pattern; if (typeof patternOrNegate === 'string') { pattern = patternOrNegate.trim(); } // Segments overload else { // Convert to pattern segments = segments || []; assert_1.default(segments.length, `Parameter 'segments' must not empty`); const root = Pattern.getLiteral(segments[0]); assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1.Path(segments).toString().trim(); if (patternOrNegate) { pattern = `!${pattern}`; } } // Negate while (pattern.startsWith('!')) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } // Normalize slashes and ensures absolute root pattern = Pattern.fixupPattern(pattern, homedir); // Segments this.segments = new internal_path_1.Path(pattern).segments; // Trailing slash indicates the pattern should only match directories, not regular files this.trailingSeparator = pathHelper .normalizeSeparators(pattern) .endsWith(path.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); // Search path (literal path prior to the first glob segment) let foundGlob = false; const searchSegments = this.segments .map(x => Pattern.getLiteral(x)) .filter(x => !foundGlob && !(foundGlob = x === '')); this.searchPath = new internal_path_1.Path(searchSegments).toString(); // Root RegExp (required when determining partial match) this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); this.isImplicitPattern = isImplicitPattern; // Create minimatch const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { // Last segment is globstar? if (this.segments[this.segments.length - 1] === '**') { // Normalize slashes itemPath = pathHelper.normalizeSeparators(itemPath); // Append a trailing slash. Otherwise Minimatch will not match the directory immediately // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { // Note, this is safe because the constructor ensures the pattern has an absolute root. // For example, formats like C: and C:foo on Windows are resolved to an absolute root. itemPath = `${itemPath}${path.sep}`; } } else { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); } // Match if (this.minimatch.match(itemPath)) { return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } return internal_match_kind_1.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // matchOne does not handle root path correctly if (pathHelper.dirname(itemPath) === itemPath) { return this.rootRegExp.test(itemPath); } return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s) { return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment .replace(/\?/g, '[?]') // escape '?' .replace(/\*/g, '[*]'); // escape '*' } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { // Empty assert_1.default(pattern, 'pattern cannot be empty'); // Must not contain `.` segment, unless first segment // Must not contain `..` segment const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); // Normalize slashes pattern = pathHelper.normalizeSeparators(pattern); // Replace leading `.` segment if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); } // Replace leading `~` segment else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { homedir = homedir || os.homedir(); assert_1.default(homedir, 'Unable to determine HOME directory'); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern.globEscape(homedir) + pattern.substr(1); } // Replace relative drive root, e.g. pattern is C: or C:foo else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(2); } // Replace relative root, e.g. pattern is \ or \foo else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); if (!root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(1); } // Otherwise ensure absolute root else { pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); } return pathHelper.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ''; for (let i = 0; i < segment.length; i++) { const c = segment[i]; // Escape if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { literal += segment[++i]; continue; } // Wildcard else if (c === '*' || c === '?') { return ''; } // Character set else if (c === '[' && i + 1 < segment.length) { let set = ''; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; // Escape if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { set += segment[++i2]; continue; } // Closed else if (c2 === ']') { closed = i2; break; } // Otherwise else { set += c2; } } // Closed? if (closed >= 0) { // Cannot convert if (set.length > 1) { return ''; } // Convert to literal if (set) { literal += set; i = closed; continue; } } // Otherwise fall thru } // Append literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s) { return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } } exports.Pattern = Pattern; //# sourceMappingURL=internal-pattern.js.map /***/ }), /***/ 91054: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SearchState = void 0; class SearchState { constructor(path, level) { this.path = path; this.level = level; } } exports.SearchState = SearchState; //# sourceMappingURL=internal-search-state.js.map /***/ }), /***/ 5116: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(37484)); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(98299)); const cacheHttpClient = __importStar(__nccwpck_require__(73171)); const cacheTwirpClient = __importStar(__nccwpck_require__(96819)); const config_1 = __nccwpck_require__(17606); const tar_1 = __nccwpck_require__(95321); const constants_1 = __nccwpck_require__(58287); class ValidationError extends Error { constructor(message) { super(message); this.name = 'ValidationError'; Object.setPrototypeOf(this, ValidationError.prototype); } } exports.ValidationError = ValidationError; class ReserveCacheError extends Error { constructor(message) { super(message); this.name = 'ReserveCacheError'; Object.setPrototypeOf(this, ReserveCacheError.prototype); } } exports.ReserveCacheError = ReserveCacheError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } } function checkKey(key) { if (key.length > 512) { throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } const regex = /^[^,]*$/; if (!regex.test(key)) { throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } } /** * isFeatureAvailable to check the presence of Actions cache service * * @returns boolean return true if Actions cache service feature is available, otherwise false */ function isFeatureAvailable() { return !!process.env['ACTIONS_CACHE_URL']; } exports.isFeatureAvailable = isFeatureAvailable; /** * Restores cache from keys * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); core.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); switch (cacheServiceVersion) { case 'v2': return yield restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); case 'v1': default: return yield restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive); } }); } exports.restoreCache = restoreCache; /** * Restores cache using the legacy Cache Service * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching. * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param options cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on Windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCacheV1(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; core.debug('Resolved Keys:'); core.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } for (const key of keys) { checkKey(key); } const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { compressionMethod, enableCrossOsArchive }); if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { // Cache not found return undefined; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return cacheEntry.cacheKey; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); // Download the cache from the cache entry yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath, options); if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); return cacheEntry.cacheKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return undefined; }); } /** * Restores cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache. Lookup is done with prefix matching * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for primaryKey * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCacheV2(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { // Override UploadOptions to force the use of Azure options = Object.assign(Object.assign({}, options), { useAzureSdk: true }); restoreKeys = restoreKeys || []; const keys = [primaryKey, ...restoreKeys]; core.debug('Resolved Keys:'); core.debug(JSON.stringify(keys)); if (keys.length > 10) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } for (const key of keys) { checkKey(key); } let archivePath = ''; try { const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); const compressionMethod = yield utils.getCompressionMethod(); const request = { key: primaryKey, restoreKeys, version: utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive) }; const response = yield twirpClient.GetCacheEntryDownloadURL(request); if (!response.ok) { core.warning(`Cache not found for keys: ${keys.join(', ')}`); return undefined; } core.info(`Cache hit for: ${request.key}`); if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return response.matchedKey; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive path: ${archivePath}`); core.debug(`Starting download of archive to: ${archivePath}`); yield cacheHttpClient.downloadCache(response.signedDownloadUrl, archivePath, options); const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); return response.matchedKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else { // Supress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); } } finally { try { if (archivePath) { yield utils.unlinkFile(archivePath); } } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return undefined; }); } /** * Saves a list of files with the specified key * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param options cache upload options * @returns number returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { const cacheServiceVersion = (0, config_1.getCacheServiceVersion)(); core.debug(`Cache service version: ${cacheServiceVersion}`); checkPaths(paths); checkKey(key); switch (cacheServiceVersion) { case 'v2': return yield saveCacheV2(paths, key, options, enableCrossOsArchive); case 'v1': default: return yield saveCacheV1(paths, key, options, enableCrossOsArchive); } }); } exports.saveCache = saveCache; /** * Save cache using the legacy Cache Service * * @param paths * @param key * @param options * @param enableCrossOsArchive * @returns */ function saveCacheV1(paths, key, options, enableCrossOsArchive = false) { var _a, _b, _c, _d, _e; return __awaiter(this, void 0, void 0, function* () { const compressionMethod = yield utils.getCompressionMethod(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); // For GHES, this check will take place in ReserveCache API with enterprise file size limit if (archiveFileSize > fileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } core.debug('Reserving Cache'); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, { compressionMethod, enableCrossOsArchive, cacheSize: archiveFileSize }); if ((_a = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cacheId) { cacheId = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _b === void 0 ? void 0 : _b.cacheId; } else if ((reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode) === 400) { throw new Error((_d = (_c = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _c === void 0 ? void 0 : _c.message) !== null && _d !== void 0 ? _d : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); } else { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`); } core.debug(`Saving Cache (ID: ${cacheId})`); yield cacheHttpClient.saveCache(cacheId, archivePath, '', options); } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else if (typedError.name === ReserveCacheError.name) { core.info(`Failed to save: ${typedError.message}`); } else { core.warning(`Failed to save: ${typedError.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return cacheId; }); } /** * Save cache using Cache Service v2 * * @param paths a list of file paths to restore from the cache * @param key an explicit key for restoring the cache * @param options cache upload options * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @returns */ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) { return __awaiter(this, void 0, void 0, function* () { // Override UploadOptions to force the use of Azure // ...options goes first because we want to override the default values // set in UploadOptions with these specific figures options = Object.assign(Object.assign({}, options), { uploadChunkSize: 64 * 1024 * 1024, uploadConcurrency: 8, useAzureSdk: true }); const compressionMethod = yield utils.getCompressionMethod(); const twirpClient = cacheTwirpClient.internalCacheTwirpClient(); let cacheId = -1; const cachePaths = yield utils.resolvePaths(paths); core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); // For GHES, this check will take place in ReserveCache API with enterprise file size limit if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); } // Set the archive size in the options, will be used to display the upload progress options.archiveSizeBytes = archiveFileSize; core.debug('Reserving Cache'); const version = utils.getCacheVersion(paths, compressionMethod, enableCrossOsArchive); const request = { key, version }; const response = yield twirpClient.CreateCacheEntry(request); if (!response.ok) { throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache.`); } core.debug(`Attempting to upload cache located at: ${archivePath}`); yield cacheHttpClient.saveCache(cacheId, archivePath, response.signedUploadUrl, options); const finalizeRequest = { key, version, sizeBytes: `${archiveFileSize}` }; const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest); core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`); if (!finalizeResponse.ok) { throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`); } cacheId = parseInt(finalizeResponse.entryId); } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else if (typedError.name === ReserveCacheError.name) { core.info(`Failed to save: ${typedError.message}`); } else { core.warning(`Failed to save: ${typedError.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return cacheId; }); } //# sourceMappingURL=cache.js.map /***/ }), /***/ 28200: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Timestamp = void 0; const runtime_1 = __nccwpck_require__(68886); const runtime_2 = __nccwpck_require__(68886); const runtime_3 = __nccwpck_require__(68886); const runtime_4 = __nccwpck_require__(68886); const runtime_5 = __nccwpck_require__(68886); const runtime_6 = __nccwpck_require__(68886); const runtime_7 = __nccwpck_require__(68886); // @generated message type with reflection information, may provide speed optimized methods class Timestamp$Type extends runtime_7.MessageType { constructor() { super("google.protobuf.Timestamp", [ { no: 1, name: "seconds", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, { no: 2, name: "nanos", kind: "scalar", T: 5 /*ScalarType.INT32*/ } ]); } /** * Creates a new `Timestamp` for the current time. */ now() { const msg = this.create(); const ms = Date.now(); msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString(); msg.nanos = (ms % 1000) * 1000000; return msg; } /** * Converts a `Timestamp` to a JavaScript Date. */ toDate(message) { return new Date(runtime_6.PbLong.from(message.seconds).toNumber() * 1000 + Math.ceil(message.nanos / 1000000)); } /** * Converts a JavaScript Date to a `Timestamp`. */ fromDate(date) { const msg = this.create(); const ms = date.getTime(); msg.seconds = runtime_6.PbLong.from(Math.floor(ms / 1000)).toString(); msg.nanos = (ms % 1000) * 1000000; return msg; } /** * In JSON format, the `Timestamp` type is encoded as a string * in the RFC 3339 format. */ internalJsonWrite(message, options) { let ms = runtime_6.PbLong.from(message.seconds).toNumber() * 1000; if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) throw new Error("Unable to encode Timestamp to JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); if (message.nanos < 0) throw new Error("Unable to encode invalid Timestamp to JSON. Nanos must not be negative."); let z = "Z"; if (message.nanos > 0) { let nanosStr = (message.nanos + 1000000000).toString().substring(1); if (nanosStr.substring(3) === "000000") z = "." + nanosStr.substring(0, 3) + "Z"; else if (nanosStr.substring(6) === "000") z = "." + nanosStr.substring(0, 6) + "Z"; else z = "." + nanosStr + "Z"; } return new Date(ms).toISOString().replace(".000Z", z); } /** * In JSON format, the `Timestamp` type is encoded as a string * in the RFC 3339 format. */ internalJsonRead(json, options, target) { if (typeof json !== "string") throw new Error("Unable to parse Timestamp from JSON " + (0, runtime_5.typeofJsonValue)(json) + "."); let matches = json.match(/^([0-9]{4})-([0-9]{2})-([0-9]{2})T([0-9]{2}):([0-9]{2}):([0-9]{2})(?:Z|\.([0-9]{3,9})Z|([+-][0-9][0-9]:[0-9][0-9]))$/); if (!matches) throw new Error("Unable to parse Timestamp from JSON. Invalid format."); let ms = Date.parse(matches[1] + "-" + matches[2] + "-" + matches[3] + "T" + matches[4] + ":" + matches[5] + ":" + matches[6] + (matches[8] ? matches[8] : "Z")); if (Number.isNaN(ms)) throw new Error("Unable to parse Timestamp from JSON. Invalid value."); if (ms < Date.parse("0001-01-01T00:00:00Z") || ms > Date.parse("9999-12-31T23:59:59Z")) throw new globalThis.Error("Unable to parse Timestamp from JSON. Must be from 0001-01-01T00:00:00Z to 9999-12-31T23:59:59Z inclusive."); if (!target) target = this.create(); target.seconds = runtime_6.PbLong.from(ms / 1000).toString(); target.nanos = 0; if (matches[7]) target.nanos = (parseInt("1" + matches[7] + "0".repeat(9 - matches[7].length)) - 1000000000); return target; } create(value) { const message = { seconds: "0", nanos: 0 }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* int64 seconds */ 1: message.seconds = reader.int64().toString(); break; case /* int32 nanos */ 2: message.nanos = reader.int32(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* int64 seconds = 1; */ if (message.seconds !== "0") writer.tag(1, runtime_1.WireType.Varint).int64(message.seconds); /* int32 nanos = 2; */ if (message.nanos !== 0) writer.tag(2, runtime_1.WireType.Varint).int32(message.nanos); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message google.protobuf.Timestamp */ exports.Timestamp = new Timestamp$Type(); //# sourceMappingURL=timestamp.js.map /***/ }), /***/ 93156: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CacheService = exports.LookupCacheEntryResponse = exports.LookupCacheEntryRequest = exports.ListCacheEntriesResponse = exports.ListCacheEntriesRequest = exports.DeleteCacheEntryResponse = exports.DeleteCacheEntryRequest = exports.GetCacheEntryDownloadURLResponse = exports.GetCacheEntryDownloadURLRequest = exports.FinalizeCacheEntryUploadResponse = exports.FinalizeCacheEntryUploadRequest = exports.CreateCacheEntryResponse = exports.CreateCacheEntryRequest = void 0; // @generated by protobuf-ts 2.9.1 with parameter long_type_string,client_none,generate_dependencies // @generated from protobuf file "results/api/v1/cache.proto" (package "github.actions.results.api.v1", syntax proto3) // tslint:disable const runtime_rpc_1 = __nccwpck_require__(44420); const runtime_1 = __nccwpck_require__(68886); const runtime_2 = __nccwpck_require__(68886); const runtime_3 = __nccwpck_require__(68886); const runtime_4 = __nccwpck_require__(68886); const runtime_5 = __nccwpck_require__(68886); const cacheentry_1 = __nccwpck_require__(55893); const cachemetadata_1 = __nccwpck_require__(89444); // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.CreateCacheEntryRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "", version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; case /* string version */ 3: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); /* string version = 3; */ if (message.version !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryRequest */ exports.CreateCacheEntryRequest = new CreateCacheEntryRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class CreateCacheEntryResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.CreateCacheEntryResponse", [ { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, { no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { ok: false, signedUploadUrl: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* bool ok */ 1: message.ok = reader.bool(); break; case /* string signed_upload_url */ 2: message.signedUploadUrl = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* bool ok = 1; */ if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); /* string signed_upload_url = 2; */ if (message.signedUploadUrl !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.CreateCacheEntryResponse */ exports.CreateCacheEntryResponse = new CreateCacheEntryResponse$Type(); // @generated message type with reflection information, may provide speed optimized methods class FinalizeCacheEntryUploadRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.FinalizeCacheEntryUploadRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "", sizeBytes: "0", version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; case /* int64 size_bytes */ 3: message.sizeBytes = reader.int64().toString(); break; case /* string version */ 4: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); /* int64 size_bytes = 3; */ if (message.sizeBytes !== "0") writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); /* string version = 4; */ if (message.version !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadRequest */ exports.FinalizeCacheEntryUploadRequest = new FinalizeCacheEntryUploadRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [ { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } ]); } create(value) { const message = { ok: false, entryId: "0" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* bool ok */ 1: message.ok = reader.bool(); break; case /* int64 entry_id */ 2: message.entryId = reader.int64().toString(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* bool ok = 1; */ if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); /* int64 entry_id = 2; */ if (message.entryId !== "0") writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.FinalizeCacheEntryUploadResponse */ exports.FinalizeCacheEntryUploadResponse = new FinalizeCacheEntryUploadResponse$Type(); // @generated message type with reflection information, may provide speed optimized methods class GetCacheEntryDownloadURLRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.GetCacheEntryDownloadURLRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "", restoreKeys: [], version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; case /* repeated string restore_keys */ 3: message.restoreKeys.push(reader.string()); break; case /* string version */ 4: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); /* repeated string restore_keys = 3; */ for (let i = 0; i < message.restoreKeys.length; i++) writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); /* string version = 4; */ if (message.version !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLRequest */ exports.GetCacheEntryDownloadURLRequest = new GetCacheEntryDownloadURLRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class GetCacheEntryDownloadURLResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.GetCacheEntryDownloadURLResponse", [ { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, { no: 2, name: "signed_download_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "matched_key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { ok: false, signedDownloadUrl: "", matchedKey: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* bool ok */ 1: message.ok = reader.bool(); break; case /* string signed_download_url */ 2: message.signedDownloadUrl = reader.string(); break; case /* string matched_key */ 3: message.matchedKey = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* bool ok = 1; */ if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); /* string signed_download_url = 2; */ if (message.signedDownloadUrl !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedDownloadUrl); /* string matched_key = 3; */ if (message.matchedKey !== "") writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.matchedKey); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.GetCacheEntryDownloadURLResponse */ exports.GetCacheEntryDownloadURLResponse = new GetCacheEntryDownloadURLResponse$Type(); // @generated message type with reflection information, may provide speed optimized methods class DeleteCacheEntryRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.DeleteCacheEntryRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryRequest */ exports.DeleteCacheEntryRequest = new DeleteCacheEntryRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class DeleteCacheEntryResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.DeleteCacheEntryResponse", [ { no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, { no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ } ]); } create(value) { const message = { ok: false, entryId: "0" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* bool ok */ 1: message.ok = reader.bool(); break; case /* int64 entry_id */ 2: message.entryId = reader.int64().toString(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* bool ok = 1; */ if (message.ok !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.ok); /* int64 entry_id = 2; */ if (message.entryId !== "0") writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.DeleteCacheEntryResponse */ exports.DeleteCacheEntryResponse = new DeleteCacheEntryResponse$Type(); // @generated message type with reflection information, may provide speed optimized methods class ListCacheEntriesRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "", restoreKeys: [] }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; case /* repeated string restore_keys */ 3: message.restoreKeys.push(reader.string()); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); /* repeated string restore_keys = 3; */ for (let i = 0; i < message.restoreKeys.length; i++) writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesRequest */ exports.ListCacheEntriesRequest = new ListCacheEntriesRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class ListCacheEntriesResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.ListCacheEntriesResponse", [ { no: 1, name: "entries", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cacheentry_1.CacheEntry } ]); } create(value) { const message = { entries: [] }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* repeated github.actions.results.entities.v1.CacheEntry entries */ 1: message.entries.push(cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options)); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* repeated github.actions.results.entities.v1.CacheEntry entries = 1; */ for (let i = 0; i < message.entries.length; i++) cacheentry_1.CacheEntry.internalBinaryWrite(message.entries[i], writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.ListCacheEntriesResponse */ exports.ListCacheEntriesResponse = new ListCacheEntriesResponse$Type(); // @generated message type with reflection information, may provide speed optimized methods class LookupCacheEntryRequest$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryRequest", [ { no: 1, name: "metadata", kind: "message", T: () => cachemetadata_1.CacheMetadata }, { no: 2, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "restore_keys", kind: "scalar", repeat: 2 /*RepeatType.UNPACKED*/, T: 9 /*ScalarType.STRING*/ }, { no: 4, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ } ]); } create(value) { const message = { key: "", restoreKeys: [], version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* github.actions.results.entities.v1.CacheMetadata metadata */ 1: message.metadata = cachemetadata_1.CacheMetadata.internalBinaryRead(reader, reader.uint32(), options, message.metadata); break; case /* string key */ 2: message.key = reader.string(); break; case /* repeated string restore_keys */ 3: message.restoreKeys.push(reader.string()); break; case /* string version */ 4: message.version = reader.string(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* github.actions.results.entities.v1.CacheMetadata metadata = 1; */ if (message.metadata) cachemetadata_1.CacheMetadata.internalBinaryWrite(message.metadata, writer.tag(1, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* string key = 2; */ if (message.key !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.key); /* repeated string restore_keys = 3; */ for (let i = 0; i < message.restoreKeys.length; i++) writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.restoreKeys[i]); /* string version = 4; */ if (message.version !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.version); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryRequest */ exports.LookupCacheEntryRequest = new LookupCacheEntryRequest$Type(); // @generated message type with reflection information, may provide speed optimized methods class LookupCacheEntryResponse$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.api.v1.LookupCacheEntryResponse", [ { no: 1, name: "exists", kind: "scalar", T: 8 /*ScalarType.BOOL*/ }, { no: 2, name: "entry", kind: "message", T: () => cacheentry_1.CacheEntry } ]); } create(value) { const message = { exists: false }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* bool exists */ 1: message.exists = reader.bool(); break; case /* github.actions.results.entities.v1.CacheEntry entry */ 2: message.entry = cacheentry_1.CacheEntry.internalBinaryRead(reader, reader.uint32(), options, message.entry); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* bool exists = 1; */ if (message.exists !== false) writer.tag(1, runtime_1.WireType.Varint).bool(message.exists); /* github.actions.results.entities.v1.CacheEntry entry = 2; */ if (message.entry) cacheentry_1.CacheEntry.internalBinaryWrite(message.entry, writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.api.v1.LookupCacheEntryResponse */ exports.LookupCacheEntryResponse = new LookupCacheEntryResponse$Type(); /** * @generated ServiceType for protobuf service github.actions.results.api.v1.CacheService */ exports.CacheService = new runtime_rpc_1.ServiceType("github.actions.results.api.v1.CacheService", [ { name: "CreateCacheEntry", options: {}, I: exports.CreateCacheEntryRequest, O: exports.CreateCacheEntryResponse }, { name: "FinalizeCacheEntryUpload", options: {}, I: exports.FinalizeCacheEntryUploadRequest, O: exports.FinalizeCacheEntryUploadResponse }, { name: "GetCacheEntryDownloadURL", options: {}, I: exports.GetCacheEntryDownloadURLRequest, O: exports.GetCacheEntryDownloadURLResponse }, { name: "DeleteCacheEntry", options: {}, I: exports.DeleteCacheEntryRequest, O: exports.DeleteCacheEntryResponse }, { name: "ListCacheEntries", options: {}, I: exports.ListCacheEntriesRequest, O: exports.ListCacheEntriesResponse }, { name: "LookupCacheEntry", options: {}, I: exports.LookupCacheEntryRequest, O: exports.LookupCacheEntryResponse } ]); //# sourceMappingURL=cache.js.map /***/ }), /***/ 80564: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createCacheServiceServer = exports.CacheServiceMethodList = exports.CacheServiceMethod = exports.CacheServiceClientProtobuf = exports.CacheServiceClientJSON = void 0; const twirp_ts_1 = __nccwpck_require__(30430); const cache_1 = __nccwpck_require__(93156); class CacheServiceClientJSON { constructor(rpc) { this.rpc = rpc; this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); this.DeleteCacheEntry.bind(this); this.ListCacheEntries.bind(this); this.LookupCacheEntry.bind(this); } CreateCacheEntry(request) { const data = cache_1.CreateCacheEntryRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/json", data); return promise.then((data) => cache_1.CreateCacheEntryResponse.fromJson(data, { ignoreUnknownFields: true, })); } FinalizeCacheEntryUpload(request) { const data = cache_1.FinalizeCacheEntryUploadRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/json", data); return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromJson(data, { ignoreUnknownFields: true, })); } GetCacheEntryDownloadURL(request) { const data = cache_1.GetCacheEntryDownloadURLRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/json", data); return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromJson(data, { ignoreUnknownFields: true, })); } DeleteCacheEntry(request) { const data = cache_1.DeleteCacheEntryRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/json", data); return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromJson(data, { ignoreUnknownFields: true, })); } ListCacheEntries(request) { const data = cache_1.ListCacheEntriesRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/json", data); return promise.then((data) => cache_1.ListCacheEntriesResponse.fromJson(data, { ignoreUnknownFields: true, })); } LookupCacheEntry(request) { const data = cache_1.LookupCacheEntryRequest.toJson(request, { useProtoFieldName: true, emitDefaultValues: false, }); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/json", data); return promise.then((data) => cache_1.LookupCacheEntryResponse.fromJson(data, { ignoreUnknownFields: true, })); } } exports.CacheServiceClientJSON = CacheServiceClientJSON; class CacheServiceClientProtobuf { constructor(rpc) { this.rpc = rpc; this.CreateCacheEntry.bind(this); this.FinalizeCacheEntryUpload.bind(this); this.GetCacheEntryDownloadURL.bind(this); this.DeleteCacheEntry.bind(this); this.ListCacheEntries.bind(this); this.LookupCacheEntry.bind(this); } CreateCacheEntry(request) { const data = cache_1.CreateCacheEntryRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "CreateCacheEntry", "application/protobuf", data); return promise.then((data) => cache_1.CreateCacheEntryResponse.fromBinary(data)); } FinalizeCacheEntryUpload(request) { const data = cache_1.FinalizeCacheEntryUploadRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "FinalizeCacheEntryUpload", "application/protobuf", data); return promise.then((data) => cache_1.FinalizeCacheEntryUploadResponse.fromBinary(data)); } GetCacheEntryDownloadURL(request) { const data = cache_1.GetCacheEntryDownloadURLRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "GetCacheEntryDownloadURL", "application/protobuf", data); return promise.then((data) => cache_1.GetCacheEntryDownloadURLResponse.fromBinary(data)); } DeleteCacheEntry(request) { const data = cache_1.DeleteCacheEntryRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "DeleteCacheEntry", "application/protobuf", data); return promise.then((data) => cache_1.DeleteCacheEntryResponse.fromBinary(data)); } ListCacheEntries(request) { const data = cache_1.ListCacheEntriesRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "ListCacheEntries", "application/protobuf", data); return promise.then((data) => cache_1.ListCacheEntriesResponse.fromBinary(data)); } LookupCacheEntry(request) { const data = cache_1.LookupCacheEntryRequest.toBinary(request); const promise = this.rpc.request("github.actions.results.api.v1.CacheService", "LookupCacheEntry", "application/protobuf", data); return promise.then((data) => cache_1.LookupCacheEntryResponse.fromBinary(data)); } } exports.CacheServiceClientProtobuf = CacheServiceClientProtobuf; var CacheServiceMethod; (function (CacheServiceMethod) { CacheServiceMethod["CreateCacheEntry"] = "CreateCacheEntry"; CacheServiceMethod["FinalizeCacheEntryUpload"] = "FinalizeCacheEntryUpload"; CacheServiceMethod["GetCacheEntryDownloadURL"] = "GetCacheEntryDownloadURL"; CacheServiceMethod["DeleteCacheEntry"] = "DeleteCacheEntry"; CacheServiceMethod["ListCacheEntries"] = "ListCacheEntries"; CacheServiceMethod["LookupCacheEntry"] = "LookupCacheEntry"; })(CacheServiceMethod || (exports.CacheServiceMethod = CacheServiceMethod = {})); exports.CacheServiceMethodList = [ CacheServiceMethod.CreateCacheEntry, CacheServiceMethod.FinalizeCacheEntryUpload, CacheServiceMethod.GetCacheEntryDownloadURL, CacheServiceMethod.DeleteCacheEntry, CacheServiceMethod.ListCacheEntries, CacheServiceMethod.LookupCacheEntry, ]; function createCacheServiceServer(service) { return new twirp_ts_1.TwirpServer({ service, packageName: "github.actions.results.api.v1", serviceName: "CacheService", methodList: exports.CacheServiceMethodList, matchRoute: matchCacheServiceRoute, }); } exports.createCacheServiceServer = createCacheServiceServer; function matchCacheServiceRoute(method, events) { switch (method) { case "CreateCacheEntry": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "CreateCacheEntry" }); yield events.onMatch(ctx); return handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors); }); case "FinalizeCacheEntryUpload": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "FinalizeCacheEntryUpload" }); yield events.onMatch(ctx); return handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors); }); case "GetCacheEntryDownloadURL": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "GetCacheEntryDownloadURL" }); yield events.onMatch(ctx); return handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors); }); case "DeleteCacheEntry": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "DeleteCacheEntry" }); yield events.onMatch(ctx); return handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors); }); case "ListCacheEntries": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "ListCacheEntries" }); yield events.onMatch(ctx); return handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors); }); case "LookupCacheEntry": return (ctx, service, data, interceptors) => __awaiter(this, void 0, void 0, function* () { ctx = Object.assign(Object.assign({}, ctx), { methodName: "LookupCacheEntry" }); yield events.onMatch(ctx); return handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors); }); default: events.onNotFound(); const msg = `no handler found`; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceCreateCacheEntryRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceFinalizeCacheEntryUploadRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceGetCacheEntryDownloadURLRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceDeleteCacheEntryRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceListCacheEntriesRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceLookupCacheEntryRequest(ctx, service, data, interceptors) { switch (ctx.contentType) { case twirp_ts_1.TwirpContentType.JSON: return handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors); case twirp_ts_1.TwirpContentType.Protobuf: return handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors); default: const msg = "unexpected Content-Type"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.BadRoute, msg); } } function handleCacheServiceCreateCacheEntryJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.CreateCacheEntryRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.CreateCacheEntry(ctx, inputReq); }); } else { response = yield service.CreateCacheEntry(ctx, request); } return JSON.stringify(cache_1.CreateCacheEntryResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceFinalizeCacheEntryUploadJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.FinalizeCacheEntryUploadRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.FinalizeCacheEntryUpload(ctx, inputReq); }); } else { response = yield service.FinalizeCacheEntryUpload(ctx, request); } return JSON.stringify(cache_1.FinalizeCacheEntryUploadResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceGetCacheEntryDownloadURLJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.GetCacheEntryDownloadURLRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.GetCacheEntryDownloadURL(ctx, inputReq); }); } else { response = yield service.GetCacheEntryDownloadURL(ctx, request); } return JSON.stringify(cache_1.GetCacheEntryDownloadURLResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceDeleteCacheEntryJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.DeleteCacheEntryRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.DeleteCacheEntry(ctx, inputReq); }); } else { response = yield service.DeleteCacheEntry(ctx, request); } return JSON.stringify(cache_1.DeleteCacheEntryResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceListCacheEntriesJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.ListCacheEntriesRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.ListCacheEntries(ctx, inputReq); }); } else { response = yield service.ListCacheEntries(ctx, request); } return JSON.stringify(cache_1.ListCacheEntriesResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceLookupCacheEntryJSON(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { const body = JSON.parse(data.toString() || "{}"); request = cache_1.LookupCacheEntryRequest.fromJson(body, { ignoreUnknownFields: true, }); } catch (e) { if (e instanceof Error) { const msg = "the json request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.LookupCacheEntry(ctx, inputReq); }); } else { response = yield service.LookupCacheEntry(ctx, request); } return JSON.stringify(cache_1.LookupCacheEntryResponse.toJson(response, { useProtoFieldName: true, emitDefaultValues: false, })); }); } function handleCacheServiceCreateCacheEntryProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.CreateCacheEntryRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.CreateCacheEntry(ctx, inputReq); }); } else { response = yield service.CreateCacheEntry(ctx, request); } return Buffer.from(cache_1.CreateCacheEntryResponse.toBinary(response)); }); } function handleCacheServiceFinalizeCacheEntryUploadProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.FinalizeCacheEntryUploadRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.FinalizeCacheEntryUpload(ctx, inputReq); }); } else { response = yield service.FinalizeCacheEntryUpload(ctx, request); } return Buffer.from(cache_1.FinalizeCacheEntryUploadResponse.toBinary(response)); }); } function handleCacheServiceGetCacheEntryDownloadURLProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.GetCacheEntryDownloadURLRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.GetCacheEntryDownloadURL(ctx, inputReq); }); } else { response = yield service.GetCacheEntryDownloadURL(ctx, request); } return Buffer.from(cache_1.GetCacheEntryDownloadURLResponse.toBinary(response)); }); } function handleCacheServiceDeleteCacheEntryProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.DeleteCacheEntryRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.DeleteCacheEntry(ctx, inputReq); }); } else { response = yield service.DeleteCacheEntry(ctx, request); } return Buffer.from(cache_1.DeleteCacheEntryResponse.toBinary(response)); }); } function handleCacheServiceListCacheEntriesProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.ListCacheEntriesRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.ListCacheEntries(ctx, inputReq); }); } else { response = yield service.ListCacheEntries(ctx, request); } return Buffer.from(cache_1.ListCacheEntriesResponse.toBinary(response)); }); } function handleCacheServiceLookupCacheEntryProtobuf(ctx, service, data, interceptors) { return __awaiter(this, void 0, void 0, function* () { let request; let response; try { request = cache_1.LookupCacheEntryRequest.fromBinary(data); } catch (e) { if (e instanceof Error) { const msg = "the protobuf request could not be decoded"; throw new twirp_ts_1.TwirpError(twirp_ts_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } if (interceptors && interceptors.length > 0) { const interceptor = (0, twirp_ts_1.chainInterceptors)(...interceptors); response = yield interceptor(ctx, request, (ctx, inputReq) => { return service.LookupCacheEntry(ctx, inputReq); }); } else { response = yield service.LookupCacheEntry(ctx, request); } return Buffer.from(cache_1.LookupCacheEntryResponse.toBinary(response)); }); } //# sourceMappingURL=cache.twirp.js.map /***/ }), /***/ 55893: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CacheEntry = void 0; const runtime_1 = __nccwpck_require__(68886); const runtime_2 = __nccwpck_require__(68886); const runtime_3 = __nccwpck_require__(68886); const runtime_4 = __nccwpck_require__(68886); const runtime_5 = __nccwpck_require__(68886); const timestamp_1 = __nccwpck_require__(28200); // @generated message type with reflection information, may provide speed optimized methods class CacheEntry$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.entities.v1.CacheEntry", [ { no: 1, name: "key", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 2, name: "hash", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 3, name: "size_bytes", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, { no: 4, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 5, name: "version", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 6, name: "created_at", kind: "message", T: () => timestamp_1.Timestamp }, { no: 7, name: "last_accessed_at", kind: "message", T: () => timestamp_1.Timestamp }, { no: 8, name: "expires_at", kind: "message", T: () => timestamp_1.Timestamp } ]); } create(value) { const message = { key: "", hash: "", sizeBytes: "0", scope: "", version: "" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* string key */ 1: message.key = reader.string(); break; case /* string hash */ 2: message.hash = reader.string(); break; case /* int64 size_bytes */ 3: message.sizeBytes = reader.int64().toString(); break; case /* string scope */ 4: message.scope = reader.string(); break; case /* string version */ 5: message.version = reader.string(); break; case /* google.protobuf.Timestamp created_at */ 6: message.createdAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.createdAt); break; case /* google.protobuf.Timestamp last_accessed_at */ 7: message.lastAccessedAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.lastAccessedAt); break; case /* google.protobuf.Timestamp expires_at */ 8: message.expiresAt = timestamp_1.Timestamp.internalBinaryRead(reader, reader.uint32(), options, message.expiresAt); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* string key = 1; */ if (message.key !== "") writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.key); /* string hash = 2; */ if (message.hash !== "") writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.hash); /* int64 size_bytes = 3; */ if (message.sizeBytes !== "0") writer.tag(3, runtime_1.WireType.Varint).int64(message.sizeBytes); /* string scope = 4; */ if (message.scope !== "") writer.tag(4, runtime_1.WireType.LengthDelimited).string(message.scope); /* string version = 5; */ if (message.version !== "") writer.tag(5, runtime_1.WireType.LengthDelimited).string(message.version); /* google.protobuf.Timestamp created_at = 6; */ if (message.createdAt) timestamp_1.Timestamp.internalBinaryWrite(message.createdAt, writer.tag(6, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* google.protobuf.Timestamp last_accessed_at = 7; */ if (message.lastAccessedAt) timestamp_1.Timestamp.internalBinaryWrite(message.lastAccessedAt, writer.tag(7, runtime_1.WireType.LengthDelimited).fork(), options).join(); /* google.protobuf.Timestamp expires_at = 8; */ if (message.expiresAt) timestamp_1.Timestamp.internalBinaryWrite(message.expiresAt, writer.tag(8, runtime_1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheEntry */ exports.CacheEntry = new CacheEntry$Type(); //# sourceMappingURL=cacheentry.js.map /***/ }), /***/ 89444: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CacheMetadata = void 0; const runtime_1 = __nccwpck_require__(68886); const runtime_2 = __nccwpck_require__(68886); const runtime_3 = __nccwpck_require__(68886); const runtime_4 = __nccwpck_require__(68886); const runtime_5 = __nccwpck_require__(68886); const cachescope_1 = __nccwpck_require__(29425); // @generated message type with reflection information, may provide speed optimized methods class CacheMetadata$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.entities.v1.CacheMetadata", [ { no: 1, name: "repository_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }, { no: 2, name: "scope", kind: "message", repeat: 1 /*RepeatType.PACKED*/, T: () => cachescope_1.CacheScope } ]); } create(value) { const message = { repositoryId: "0", scope: [] }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* int64 repository_id */ 1: message.repositoryId = reader.int64().toString(); break; case /* repeated github.actions.results.entities.v1.CacheScope scope */ 2: message.scope.push(cachescope_1.CacheScope.internalBinaryRead(reader, reader.uint32(), options)); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* int64 repository_id = 1; */ if (message.repositoryId !== "0") writer.tag(1, runtime_1.WireType.Varint).int64(message.repositoryId); /* repeated github.actions.results.entities.v1.CacheScope scope = 2; */ for (let i = 0; i < message.scope.length; i++) cachescope_1.CacheScope.internalBinaryWrite(message.scope[i], writer.tag(2, runtime_1.WireType.LengthDelimited).fork(), options).join(); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheMetadata */ exports.CacheMetadata = new CacheMetadata$Type(); //# sourceMappingURL=cachemetadata.js.map /***/ }), /***/ 29425: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CacheScope = void 0; const runtime_1 = __nccwpck_require__(68886); const runtime_2 = __nccwpck_require__(68886); const runtime_3 = __nccwpck_require__(68886); const runtime_4 = __nccwpck_require__(68886); const runtime_5 = __nccwpck_require__(68886); // @generated message type with reflection information, may provide speed optimized methods class CacheScope$Type extends runtime_5.MessageType { constructor() { super("github.actions.results.entities.v1.CacheScope", [ { no: 1, name: "scope", kind: "scalar", T: 9 /*ScalarType.STRING*/ }, { no: 2, name: "permission", kind: "scalar", T: 3 /*ScalarType.INT64*/ } ]); } create(value) { const message = { scope: "", permission: "0" }; globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this }); if (value !== undefined) (0, runtime_3.reflectionMergePartial)(this, message, value); return message; } internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(), end = reader.pos + length; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case /* string scope */ 1: message.scope = reader.string(); break; case /* int64 permission */ 2: message.permission = reader.int64().toString(); break; default: let u = options.readUnknownField; if (u === "throw") throw new globalThis.Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? runtime_2.UnknownFieldHandler.onRead : u)(this.typeName, message, fieldNo, wireType, d); } } return message; } internalBinaryWrite(message, writer, options) { /* string scope = 1; */ if (message.scope !== "") writer.tag(1, runtime_1.WireType.LengthDelimited).string(message.scope); /* int64 permission = 2; */ if (message.permission !== "0") writer.tag(2, runtime_1.WireType.Varint).int64(message.permission); let u = options.writeUnknownFields; if (u !== false) (u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer); return writer; } } /** * @generated MessageType for protobuf message github.actions.results.entities.v1.CacheScope */ exports.CacheScope = new CacheScope$Type(); //# sourceMappingURL=cachescope.js.map /***/ }), /***/ 73171: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.saveCache = exports.reserveCache = exports.downloadCache = exports.getCacheEntry = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const auth_1 = __nccwpck_require__(44552); const fs = __importStar(__nccwpck_require__(79896)); const url_1 = __nccwpck_require__(87016); const utils = __importStar(__nccwpck_require__(98299)); const uploadUtils_1 = __nccwpck_require__(35268); const downloadUtils_1 = __nccwpck_require__(75067); const options_1 = __nccwpck_require__(98356); const requestUtils_1 = __nccwpck_require__(32846); const config_1 = __nccwpck_require__(17606); const user_agent_1 = __nccwpck_require__(41899); function getCacheApiUrl(resource) { const baseUrl = (0, config_1.getCacheServiceURL)(); if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.'); } const url = `${baseUrl}_apis/artifactcache/${resource}`; core.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { const requestOptions = { headers: { Accept: createAcceptHeader('application/json', '6.0-preview.1') } }; return requestOptions; } function createHttpClient() { const token = process.env['ACTIONS_RUNTIME_TOKEN'] || ''; const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient((0, user_agent_1.getUserAgentString)(), [bearerCredentialHandler], getRequestOptions()); } function getCacheEntry(keys, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); // Cache not found if (response.statusCode === 204) { // List cache for primary key only if cache miss occurs if (core.isDebug()) { yield printCachesListForDiagnostics(keys[0], httpClient, version); } return null; } if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; if (!cacheDownloadUrl) { // Cache achiveLocation not found. This should never happen, and hence bail out. throw new Error('Cache not found.'); } core.setSecret(cacheDownloadUrl); core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); return cacheResult; }); } exports.getCacheEntry = getCacheEntry; function printCachesListForDiagnostics(key, httpClient, version) { return __awaiter(this, void 0, void 0, function* () { const resource = `caches?key=${encodeURIComponent(key)}`; const response = yield (0, requestUtils_1.retryTypedResponse)('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); if (response.statusCode === 200) { const cacheListResult = response.result; const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; if (totalCount && totalCount > 0) { core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); } } } }); } function downloadCache(archiveLocation, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { const archiveUrl = new url_1.URL(archiveLocation); const downloadOptions = (0, options_1.getDownloadOptions)(options); if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { if (downloadOptions.useAzureSdk) { // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability. yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); } else if (downloadOptions.concurrentBlobDownloads) { // Use concurrent implementation with HttpClient to work around blob SDK issue yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); } else { // Otherwise, download using the Actions http-client. yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } } else { yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); } }); } exports.downloadCache = downloadCache; // Reserve Cache function reserveCache(key, paths, options) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = utils.getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive); const reserveCacheRequest = { key, version, cacheSize: options === null || options === void 0 ? void 0 : options.cacheSize }; const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('caches'), reserveCacheRequest); })); return response; }); } exports.reserveCache = reserveCache; function getContentRange(start, end) { // Format: `bytes start-end/filesize // start and end are inclusive // filesize can be * // For a 200 byte chunk starting at byte 0: // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const additionalHeaders = { 'Content-Type': 'application/octet-stream', 'Content-Range': getContentRange(start, end) }; const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () { return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders); })); if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) { throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`); } }); } function uploadFile(httpClient, cacheId, archivePath, options) { return __awaiter(this, void 0, void 0, function* () { // Upload Chunks const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); const fd = fs.openSync(archivePath, 'r'); const uploadOptions = (0, options_1.getUploadOptions)(options); const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency); const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize); const parallelUploads = [...new Array(concurrency).keys()]; core.debug('Awaiting all uploads'); let offset = 0; try { yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { while (offset < fileSize) { const chunkSize = Math.min(fileSize - offset, maxChunkSize); const start = offset; const end = offset + chunkSize - 1; offset += maxChunkSize; yield uploadChunk(httpClient, resourceUrl, () => fs .createReadStream(archivePath, { fd, start, end, autoClose: false }) .on('error', error => { throw new Error(`Cache upload failed because file read failed with ${error.message}`); }), start, end); } }))); } finally { fs.closeSync(fd); } return; }); } function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); })); }); } function saveCache(cacheId, archivePath, signedUploadURL, options) { return __awaiter(this, void 0, void 0, function* () { const uploadOptions = (0, options_1.getUploadOptions)(options); if (uploadOptions.useAzureSdk) { // Use Azure storage SDK to upload caches directly to Azure if (!signedUploadURL) { throw new Error('Azure Storage SDK can only be used when a signed URL is provided.'); } yield (0, uploadUtils_1.uploadCacheArchiveSDK)(signedUploadURL, archivePath, options); } else { const httpClient = createHttpClient(); core.debug('Upload cache'); yield uploadFile(httpClient, cacheId, archivePath, options); // Commit Cache core.debug('Commiting cache'); const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info('Cache saved successfully'); } }); } exports.saveCache = saveCache; //# sourceMappingURL=cacheHttpClient.js.map /***/ }), /***/ 98299: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getRuntimeToken = exports.getCacheVersion = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core = __importStar(__nccwpck_require__(37484)); const exec = __importStar(__nccwpck_require__(95236)); const glob = __importStar(__nccwpck_require__(39688)); const io = __importStar(__nccwpck_require__(94994)); const crypto = __importStar(__nccwpck_require__(76982)); const fs = __importStar(__nccwpck_require__(79896)); const path = __importStar(__nccwpck_require__(16928)); const semver = __importStar(__nccwpck_require__(39318)); const util = __importStar(__nccwpck_require__(39023)); const constants_1 = __nccwpck_require__(58287); const versionSalt = '1.0'; // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 function createTempDirectory() { return __awaiter(this, void 0, void 0, function* () { const IS_WINDOWS = process.platform === 'win32'; let tempDirectory = process.env['RUNNER_TEMP'] || ''; if (!tempDirectory) { let baseLocation; if (IS_WINDOWS) { // On Windows use the USERPROFILE env variable baseLocation = process.env['USERPROFILE'] || 'C:\\'; } else { if (process.platform === 'darwin') { baseLocation = '/Users'; } else { baseLocation = '/home'; } } tempDirectory = path.join(baseLocation, 'actions', 'temp'); } const dest = path.join(tempDirectory, crypto.randomUUID()); yield io.mkdirP(dest); return dest; }); } exports.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { return fs.statSync(filePath).size; } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { var _a, e_1, _b, _c; var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { _c = _g.value; _e = false; const file = _c; const relativeFile = path .relative(workspace, file) .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); core.debug(`Matched: ${relativeFile}`); // Paths are made relative so the tar entries are all relative to the root of the workspace. if (relativeFile === '') { // path.relative returns empty string if workspace and file are equal paths.push('.'); } else { paths.push(`${relativeFile}`); } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } return paths; }); } exports.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter(this, void 0, void 0, function* () { return util.promisify(fs.unlink)(filePath); }); } exports.unlinkFile = unlinkFile; function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { let versionOutput = ''; additionalArgs.push('--version'); core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { stdout: (data) => (versionOutput += data.toString()), stderr: (data) => (versionOutput += data.toString()) } }); } catch (err) { core.debug(err.message); } versionOutput = versionOutput.trim(); core.debug(versionOutput); return versionOutput; }); } // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); core.debug(`zstd version: ${version}`); if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } else { return constants_1.CompressionMethod.ZstdWithoutLong; } }); } exports.getCompressionMethod = getCompressionMethod; function getCacheFileName(compressionMethod) { return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion('tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); } return value; } exports.assertDefined = assertDefined; function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) { // don't pass changes upstream const components = paths.slice(); // Add compression method to cache version to restore // compressed cache as per compression method if (compressionMethod) { components.push(compressionMethod); } // Only check for windows platforms if enableCrossOsArchive is false if (process.platform === 'win32' && !enableCrossOsArchive) { components.push('windows-only'); } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getRuntimeToken() { const token = process.env['ACTIONS_RUNTIME_TOKEN']; if (!token) { throw new Error('Unable to get the ACTIONS_RUNTIME_TOKEN env variable'); } return token; } exports.getRuntimeToken = getRuntimeToken; //# sourceMappingURL=cacheUtils.js.map /***/ }), /***/ 17606: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getCacheServiceURL = exports.getCacheServiceVersion = exports.isGhes = void 0; function isGhes() { const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); const hostname = ghUrl.hostname.trimEnd().toUpperCase(); const isGitHubHost = hostname === 'GITHUB.COM'; const isGheHost = hostname.endsWith('.GHE.COM'); const isLocalHost = hostname.endsWith('.LOCALHOST'); return !isGitHubHost && !isGheHost && !isLocalHost; } exports.isGhes = isGhes; function getCacheServiceVersion() { // Cache service v2 is not supported on GHES. We will default to // cache service v1 even if the feature flag was enabled by user. if (isGhes()) return 'v1'; return process.env['ACTIONS_CACHE_SERVICE_V2'] ? 'v2' : 'v1'; } exports.getCacheServiceVersion = getCacheServiceVersion; function getCacheServiceURL() { const version = getCacheServiceVersion(); // Based on the version of the cache service, we will determine which // URL to use. switch (version) { case 'v1': return (process.env['ACTIONS_CACHE_URL'] || process.env['ACTIONS_RESULTS_URL'] || ''); case 'v2': return process.env['ACTIONS_RESULTS_URL'] || ''; default: throw new Error(`Unsupported cache service version: ${version}`); } } exports.getCacheServiceURL = getCacheServiceURL; //# sourceMappingURL=config.js.map /***/ }), /***/ 58287: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CacheFileSizeLimit = exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Zstd"] = "cache.tzst"; })(CacheFilename || (exports.CacheFilename = CacheFilename = {})); var CompressionMethod; (function (CompressionMethod) { CompressionMethod["Gzip"] = "gzip"; // Long range mode was added to zstd in v1.3.2. // This enum is for earlier version of zstd that does not have --long support CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {})); var ArchiveToolType; (function (ArchiveToolType) { ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["BSD"] = "bsd"; })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. exports.DefaultRetryDelay = 5000; // Socket timeout in milliseconds during download. If no traffic is received // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; // The default path of GNUtar on hosted Windows runners exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; // The default path of BSDtar on hosted Windows runners exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; exports.TarFilename = 'cache.tar'; exports.ManifestFilename = 'manifest.txt'; exports.CacheFileSizeLimit = 10 * Math.pow(1024, 3); // 10GiB per repository //# sourceMappingURL=constants.js.map /***/ }), /***/ 75067: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const storage_blob_1 = __nccwpck_require__(1012); const buffer = __importStar(__nccwpck_require__(20181)); const fs = __importStar(__nccwpck_require__(79896)); const stream = __importStar(__nccwpck_require__(2203)); const util = __importStar(__nccwpck_require__(39023)); const utils = __importStar(__nccwpck_require__(98299)); const constants_1 = __nccwpck_require__(58287); const requestUtils_1 = __nccwpck_require__(32846); const abort_controller_1 = __nccwpck_require__(68110); /** * Pipes the body of a HTTP response to a stream * * @param response the HTTP response * @param output the writable stream */ function pipeResponseToStream(response, output) { return __awaiter(this, void 0, void 0, function* () { const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, output); }); } /** * Class for tracking the download state and displaying stats. */ class DownloadProgress { constructor(contentLength) { this.contentLength = contentLength; this.segmentIndex = 0; this.segmentSize = 0; this.segmentOffset = 0; this.receivedBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Progress to the next segment. Only call this method when the previous segment * is complete. * * @param segmentSize the length of the next segment */ nextSegment(segmentSize) { this.segmentOffset = this.segmentOffset + this.segmentSize; this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. * * @param receivedBytes the number of bytes received */ setReceivedBytes(receivedBytes) { this.receivedBytes = receivedBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.segmentOffset + this.receivedBytes; } /** * Returns true if the download is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current download stats. Once the download completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) { return; } const transferredBytes = this.segmentOffset + this.receivedBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1000)).toFixed(1); core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } } /** * Returns a function used to handle TransferProgressEvents. */ onProgress() { return (progress) => { this.setReceivedBytes(progress.loadedBytes); }; } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1000) { const displayCallback = () => { this.display(); if (!this.isDone()) { this.timeoutHandle = setTimeout(displayCallback, delayInMs); } }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the download * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = undefined; } this.display(); } } exports.DownloadProgress = DownloadProgress; /** * Download the cache using the Actions toolkit http-client * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient('actions/cache'); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); // Validate download size. const contentLengthHeader = downloadResponse.message.headers['content-length']; if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); const actualLength = utils.getArchiveFileSizeInBytes(archivePath); if (actualLength !== expectedLength) { throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { core.debug('Unable to validate download, no Content-Length header'); } }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; /** * Download the cache using the Actions toolkit http-client concurrently * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { var _a; return __awaiter(this, void 0, void 0, function* () { const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { socketTimeout: options.timeoutInMs, keepAlive: true }); try { const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); const lengthHeader = res.message.headers['content-length']; if (lengthHeader === undefined || lengthHeader === null) { throw new Error('Content-Length not found on blob response'); } const length = parseInt(lengthHeader); if (Number.isNaN(length)) { throw new Error(`Could not interpret Content-Length: ${length}`); } const downloads = []; const blockSize = 4 * 1024 * 1024; for (let offset = 0; offset < length; offset += blockSize) { const count = Math.min(blockSize, length - offset); downloads.push({ offset, promiseGetter: () => __awaiter(this, void 0, void 0, function* () { return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); }) }); } // reverse to use .pop instead of .shift downloads.reverse(); let actives = 0; let bytesDownloaded = 0; const progress = new DownloadProgress(length); progress.startDisplayTimer(); const progressFn = progress.onProgress(); const activeDownloads = []; let nextDownload; const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { const segment = yield Promise.race(Object.values(activeDownloads)); yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); actives--; delete activeDownloads[segment.offset]; bytesDownloaded += segment.count; progressFn({ loadedBytes: bytesDownloaded }); }); while ((nextDownload = downloads.pop())) { activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); actives++; if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { yield waitAndWrite(); } } while (actives > 0) { yield waitAndWrite(); } } finally { httpClient.dispose(); yield archiveDescriptor.close(); } }); } exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { return __awaiter(this, void 0, void 0, function* () { const retries = 5; let failures = 0; while (true) { try { const timeout = 30000; const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); if (typeof result === 'string') { throw new Error('downloadSegmentRetry failed due to timeout'); } return result; } catch (err) { if (failures >= retries) { throw err; } failures++; } } }); } function downloadSegment(httpClient, archiveLocation, offset, count) { return __awaiter(this, void 0, void 0, function* () { const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.get(archiveLocation, { Range: `bytes=${offset}-${offset + count - 1}` }); })); if (!partRes.readBodyBuffer) { throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); } return { offset, count, buffer: yield partRes.readBodyBuffer() }; }); } /** * Download the cache using the Azure Storage SDK. Only call this method if the * URL points to an Azure Storage endpoint. * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved * @param options the download options with the defaults set */ function downloadCacheStorageSDK(archiveLocation, archivePath, options) { var _a; return __awaiter(this, void 0, void 0, function* () { const client = new storage_blob_1.BlockBlobClient(archiveLocation, undefined, { retryOptions: { // Override the timeout used when downloading each 4 MB chunk // The default is 2 min / MB, which is way too slow tryTimeoutInMs: options.timeoutInMs } }); const properties = yield client.getProperties(); const contentLength = (_a = properties.contentLength) !== null && _a !== void 0 ? _a : -1; if (contentLength < 0) { // We should never hit this condition, but just in case fall back to downloading the // file as one large stream core.debug('Unable to determine content length, downloading file with http-client...'); yield downloadCacheHttpClient(archiveLocation, archivePath); } else { // Use downloadToBuffer for faster downloads, since internally it splits the // file into 4 MB chunks which can then be parallelized and retried independently // // If the file exceeds the buffer maximum length (~1 GB on 32-bit systems and ~2 GB // on 64-bit systems), split the download into multiple segments // ~2 GB = 2147483647, beyond this, we start getting out of range error. So, capping it accordingly. // Updated segment size to 128MB = 134217728 bytes, to complete a segment faster and fail fast const maxSegmentSize = Math.min(134217728, buffer.constants.MAX_LENGTH); const downloadProgress = new DownloadProgress(contentLength); const fd = fs.openSync(archivePath, 'w'); try { downloadProgress.startDisplayTimer(); const controller = new abort_controller_1.AbortController(); const abortSignal = controller.signal; while (!downloadProgress.isDone()) { const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); downloadProgress.nextSegment(segmentSize); const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, { abortSignal, concurrency: options.downloadConcurrency, onProgress: downloadProgress.onProgress() })); if (result === 'timeout') { controller.abort(); throw new Error('Aborting cache download as the download time exceeded the timeout.'); } else if (Buffer.isBuffer(result)) { fs.writeFileSync(fd, result); } } } finally { downloadProgress.stopDisplayTimer(); fs.closeSync(fd); } } }); } exports.downloadCacheStorageSDK = downloadCacheStorageSDK; const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () { let timeoutHandle; const timeoutPromise = new Promise(resolve => { timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs); }); return Promise.race([promise, timeoutPromise]).then(result => { clearTimeout(timeoutHandle); return result; }); }); //# sourceMappingURL=downloadUtils.js.map /***/ }), /***/ 32846: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const constants_1 = __nccwpck_require__(58287); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; } return statusCode >= 200 && statusCode < 300; } exports.isSuccessStatusCode = isSuccessStatusCode; function isServerErrorStatusCode(statusCode) { if (!statusCode) { return true; } return statusCode >= 500; } exports.isServerErrorStatusCode = isServerErrorStatusCode; function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; } const retryableStatusCodes = [ http_client_1.HttpCodes.BadGateway, http_client_1.HttpCodes.ServiceUnavailable, http_client_1.HttpCodes.GatewayTimeout ]; return retryableStatusCodes.includes(statusCode); } exports.isRetryableStatusCode = isRetryableStatusCode; function sleep(milliseconds) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => setTimeout(resolve, milliseconds)); }); } function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { return __awaiter(this, void 0, void 0, function* () { let errorMessage = ''; let attempt = 1; while (attempt <= maxAttempts) { let response = undefined; let statusCode = undefined; let isRetryable = false; try { response = yield method(); } catch (error) { if (onError) { response = onError(error); } isRetryable = true; errorMessage = error.message; } if (response) { statusCode = getStatusCode(response); if (!isServerErrorStatusCode(statusCode)) { return response; } } if (statusCode) { isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { core.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); attempt++; } throw Error(`${name} failed: ${errorMessage}`); }); } exports.retry = retry; function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, // If the error object contains the statusCode property, extract it and return // an TypedResponse so it can be processed by the retry logic. (error) => { if (error instanceof http_client_1.HttpClientError) { return { statusCode: error.statusCode, result: null, headers: {}, error }; } else { return undefined; } }); }); } exports.retryTypedResponse = retryTypedResponse; function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); }); } exports.retryHttpClientResponse = retryHttpClientResponse; //# sourceMappingURL=requestUtils.js.map /***/ }), /***/ 96819: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.internalCacheTwirpClient = void 0; const core_1 = __nccwpck_require__(37484); const user_agent_1 = __nccwpck_require__(41899); const errors_1 = __nccwpck_require__(50263); const config_1 = __nccwpck_require__(17606); const cacheUtils_1 = __nccwpck_require__(98299); const auth_1 = __nccwpck_require__(44552); const http_client_1 = __nccwpck_require__(54844); const cache_twirp_1 = __nccwpck_require__(80564); /** * This class is a wrapper around the CacheServiceClientJSON class generated by Twirp. * * It adds retry logic to the request method, which is not present in the generated client. * * This class is used to interact with cache service v2. */ class CacheServiceClient { constructor(userAgent, maxAttempts, baseRetryIntervalMilliseconds, retryMultiplier) { this.maxAttempts = 5; this.baseRetryIntervalMilliseconds = 3000; this.retryMultiplier = 1.5; const token = (0, cacheUtils_1.getRuntimeToken)(); this.baseUrl = (0, config_1.getCacheServiceURL)(); if (maxAttempts) { this.maxAttempts = maxAttempts; } if (baseRetryIntervalMilliseconds) { this.baseRetryIntervalMilliseconds = baseRetryIntervalMilliseconds; } if (retryMultiplier) { this.retryMultiplier = retryMultiplier; } this.httpClient = new http_client_1.HttpClient(userAgent, [ new auth_1.BearerCredentialHandler(token) ]); } // This function satisfies the Rpc interface. It is compatible with the JSON // JSON generated client. request(service, method, contentType, data) { return __awaiter(this, void 0, void 0, function* () { const url = new URL(`/twirp/${service}/${method}`, this.baseUrl).href; (0, core_1.debug)(`[Request] ${method} ${url}`); const headers = { 'Content-Type': contentType }; try { const { body } = yield this.retryableRequest(() => __awaiter(this, void 0, void 0, function* () { return this.httpClient.post(url, JSON.stringify(data), headers); })); return body; } catch (error) { throw new Error(`Failed to ${method}: ${error.message}`); } }); } retryableRequest(operation) { return __awaiter(this, void 0, void 0, function* () { let attempt = 0; let errorMessage = ''; let rawBody = ''; while (attempt < this.maxAttempts) { let isRetryable = false; try { const response = yield operation(); const statusCode = response.message.statusCode; rawBody = yield response.readBody(); (0, core_1.debug)(`[Response] - ${response.message.statusCode}`); (0, core_1.debug)(`Headers: ${JSON.stringify(response.message.headers, null, 2)}`); const body = JSON.parse(rawBody); (0, core_1.debug)(`Body: ${JSON.stringify(body, null, 2)}`); if (this.isSuccessStatusCode(statusCode)) { return { response, body }; } isRetryable = this.isRetryableHttpStatusCode(statusCode); errorMessage = `Failed request: (${statusCode}) ${response.message.statusMessage}`; if (body.msg) { if (errors_1.UsageError.isUsageErrorMessage(body.msg)) { throw new errors_1.UsageError(); } errorMessage = `${errorMessage}: ${body.msg}`; } } catch (error) { if (error instanceof SyntaxError) { (0, core_1.debug)(`Raw Body: ${rawBody}`); } if (error instanceof errors_1.UsageError) { throw error; } if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) { throw new errors_1.NetworkError(error === null || error === void 0 ? void 0 : error.code); } isRetryable = true; errorMessage = error.message; } if (!isRetryable) { throw new Error(`Received non-retryable error: ${errorMessage}`); } if (attempt + 1 === this.maxAttempts) { throw new Error(`Failed to make request after ${this.maxAttempts} attempts: ${errorMessage}`); } const retryTimeMilliseconds = this.getExponentialRetryTimeMilliseconds(attempt); (0, core_1.info)(`Attempt ${attempt + 1} of ${this.maxAttempts} failed with error: ${errorMessage}. Retrying request in ${retryTimeMilliseconds} ms...`); yield this.sleep(retryTimeMilliseconds); attempt++; } throw new Error(`Request failed`); }); } isSuccessStatusCode(statusCode) { if (!statusCode) return false; return statusCode >= 200 && statusCode < 300; } isRetryableHttpStatusCode(statusCode) { if (!statusCode) return false; const retryableStatusCodes = [ http_client_1.HttpCodes.BadGateway, http_client_1.HttpCodes.GatewayTimeout, http_client_1.HttpCodes.InternalServerError, http_client_1.HttpCodes.ServiceUnavailable, http_client_1.HttpCodes.TooManyRequests ]; return retryableStatusCodes.includes(statusCode); } sleep(milliseconds) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => setTimeout(resolve, milliseconds)); }); } getExponentialRetryTimeMilliseconds(attempt) { if (attempt < 0) { throw new Error('attempt should be a positive integer'); } if (attempt === 0) { return this.baseRetryIntervalMilliseconds; } const minTime = this.baseRetryIntervalMilliseconds * Math.pow(this.retryMultiplier, attempt); const maxTime = minTime * this.retryMultiplier; // returns a random number between minTime and maxTime (exclusive) return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } } function internalCacheTwirpClient(options) { const client = new CacheServiceClient((0, user_agent_1.getUserAgentString)(), options === null || options === void 0 ? void 0 : options.maxAttempts, options === null || options === void 0 ? void 0 : options.retryIntervalMs, options === null || options === void 0 ? void 0 : options.retryMultiplier); return new cache_twirp_1.CacheServiceClientJSON(client); } exports.internalCacheTwirpClient = internalCacheTwirpClient; //# sourceMappingURL=cacheTwirpClient.js.map /***/ }), /***/ 50263: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.UsageError = exports.NetworkError = exports.GHESNotSupportedError = exports.CacheNotFoundError = exports.InvalidResponseError = exports.FilesNotFoundError = void 0; class FilesNotFoundError extends Error { constructor(files = []) { let message = 'No files were found to upload'; if (files.length > 0) { message += `: ${files.join(', ')}`; } super(message); this.files = files; this.name = 'FilesNotFoundError'; } } exports.FilesNotFoundError = FilesNotFoundError; class InvalidResponseError extends Error { constructor(message) { super(message); this.name = 'InvalidResponseError'; } } exports.InvalidResponseError = InvalidResponseError; class CacheNotFoundError extends Error { constructor(message = 'Cache not found') { super(message); this.name = 'CacheNotFoundError'; } } exports.CacheNotFoundError = CacheNotFoundError; class GHESNotSupportedError extends Error { constructor(message = '@actions/cache v4.1.4+, actions/cache/save@v4+ and actions/cache/restore@v4+ are not currently supported on GHES.') { super(message); this.name = 'GHESNotSupportedError'; } } exports.GHESNotSupportedError = GHESNotSupportedError; class NetworkError extends Error { constructor(code) { const message = `Unable to make request: ${code}\nIf you are using self-hosted runners, please make sure your runner has access to all GitHub endpoints: https://docs.github.com/en/actions/hosting-your-own-runners/managing-self-hosted-runners/about-self-hosted-runners#communication-between-self-hosted-runners-and-github`; super(message); this.code = code; this.name = 'NetworkError'; } } exports.NetworkError = NetworkError; NetworkError.isNetworkErrorCode = (code) => { if (!code) return false; return [ 'ECONNRESET', 'ENOTFOUND', 'ETIMEDOUT', 'ECONNREFUSED', 'EHOSTUNREACH' ].includes(code); }; class UsageError extends Error { constructor() { const message = `Cache storage quota has been hit. Unable to upload any new cache entries. Usage is recalculated every 6-12 hours.\nMore info on storage limits: https://docs.github.com/en/billing/managing-billing-for-github-actions/about-billing-for-github-actions#calculating-minute-and-storage-spending`; super(message); this.name = 'UsageError'; } } exports.UsageError = UsageError; UsageError.isUsageErrorMessage = (msg) => { if (!msg) return false; return msg.includes('insufficient usage'); }; //# sourceMappingURL=errors.js.map /***/ }), /***/ 41899: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getUserAgentString = void 0; // eslint-disable-next-line @typescript-eslint/no-var-requires, @typescript-eslint/no-require-imports const packageJson = __nccwpck_require__(64012); /** * Ensure that this User Agent String is used in all HTTP calls so that we can monitor telemetry between different versions of this package */ function getUserAgentString() { return `@actions/cache-${packageJson.version}`; } exports.getUserAgentString = getUserAgentString; //# sourceMappingURL=user-agent.js.map /***/ }), /***/ 95321: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTar = exports.extractTar = exports.listTar = void 0; const exec_1 = __nccwpck_require__(95236); const io = __importStar(__nccwpck_require__(94994)); const fs_1 = __nccwpck_require__(79896); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(98299)); const constants_1 = __nccwpck_require__(58287); const IS_WINDOWS = process.platform === 'win32'; // Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { const gnuTar = yield utils.getGnuTarPathOnWindows(); const systemTar = constants_1.SystemTarPathOnWindows; if (gnuTar) { // Use GNUtar as default on windows return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if ((0, fs_1.existsSync)(systemTar)) { return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } case 'darwin': { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else { return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.BSD }; } } default: break; } // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU }; }); } // Return arguments for tar as per tarPath, compressionMethod, method type and os function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { const args = [`"${tarPath.path}"`]; const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; // Method specific args switch (type) { case 'create': args.push('--posix', '-cf', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); break; case 'extract': args.push('-xf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); break; case 'list': args.push('-tf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); break; } // Platform specific args if (tarPath.type === constants_1.ArchiveToolType.GNU) { switch (process.platform) { case 'win32': args.push('--force-local'); break; case 'darwin': args.push('--delay-directory-restore'); break; } } return args; }); } // Returns commands to run tar and compression program function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== 'create' ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== 'create') { args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } if (BSD_TAR_ZSTD) { return args; } return [args.join(' ')]; }); } function getWorkingDirectory() { var _a; return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method function getDecompressionProgram(tarPath, compressionMethod, archivePath) { return __awaiter(this, void 0, void 0, function* () { // -d: Decompress. // unzstd is equivalent to 'zstd -d' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -d --long=30 --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -d --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: return ['-z']; } }); } // Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. function getCompressionProgram(tarPath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const cacheFileName = utils.getCacheFileName(compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -T0 --long=30 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -T0 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; default: return ['-z']; } }); } // Executes all commands as separate processes function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { for (const command of commands) { try { yield (0, exec_1.exec)(command, undefined, { cwd, env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) }); } catch (error) { throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } } }); } // List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const commands = yield getCommands(compressionMethod, 'list', archivePath); yield execCommands(commands); }); } exports.listTar = listTar; // Extract a tar function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); const commands = yield getCommands(compressionMethod, 'extract', archivePath); yield execCommands(commands); }); } exports.extractTar = extractTar; // Create a tar function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); const commands = yield getCommands(compressionMethod, 'create'); yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; //# sourceMappingURL=tar.js.map /***/ }), /***/ 35268: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uploadCacheArchiveSDK = exports.UploadProgress = void 0; const core = __importStar(__nccwpck_require__(37484)); const storage_blob_1 = __nccwpck_require__(1012); const errors_1 = __nccwpck_require__(50263); /** * Class for tracking the upload state and displaying stats. */ class UploadProgress { constructor(contentLength) { this.contentLength = contentLength; this.sentBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Sets the number of bytes sent * * @param sentBytes the number of bytes sent */ setSentBytes(sentBytes) { this.sentBytes = sentBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.sentBytes; } /** * Returns true if the upload is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current upload stats. Once the upload completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) { return; } const transferredBytes = this.sentBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const uploadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1000)).toFixed(1); core.info(`Sent ${transferredBytes} of ${this.contentLength} (${percentage}%), ${uploadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } } /** * Returns a function used to handle TransferProgressEvents. */ onProgress() { return (progress) => { this.setSentBytes(progress.loadedBytes); }; } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1000) { const displayCallback = () => { this.display(); if (!this.isDone()) { this.timeoutHandle = setTimeout(displayCallback, delayInMs); } }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the upload * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = undefined; } this.display(); } } exports.UploadProgress = UploadProgress; /** * Uploads a cache archive directly to Azure Blob Storage using the Azure SDK. * This function will display progress information to the console. Concurrency of the * upload is determined by the calling functions. * * @param signedUploadURL * @param archivePath * @param options * @returns */ function uploadCacheArchiveSDK(signedUploadURL, archivePath, options) { var _a; return __awaiter(this, void 0, void 0, function* () { const blobClient = new storage_blob_1.BlobClient(signedUploadURL); const blockBlobClient = blobClient.getBlockBlobClient(); const uploadProgress = new UploadProgress((_a = options === null || options === void 0 ? void 0 : options.archiveSizeBytes) !== null && _a !== void 0 ? _a : 0); // Specify data transfer options const uploadOptions = { blockSize: options === null || options === void 0 ? void 0 : options.uploadChunkSize, concurrency: options === null || options === void 0 ? void 0 : options.uploadConcurrency, maxSingleShotSize: 128 * 1024 * 1024, onProgress: uploadProgress.onProgress() }; try { uploadProgress.startDisplayTimer(); core.debug(`BlobClient: ${blobClient.name}:${blobClient.accountName}:${blobClient.containerName}`); const response = yield blockBlobClient.uploadFile(archivePath, uploadOptions); // TODO: better management of non-retryable errors if (response._response.status >= 400) { throw new errors_1.InvalidResponseError(`uploadCacheArchiveSDK: upload failed with status code ${response._response.status}`); } return response; } catch (error) { core.warning(`uploadCacheArchiveSDK: internal error uploading cache archive: ${error.message}`); throw error; } finally { uploadProgress.stopDisplayTimer(); } }); } exports.uploadCacheArchiveSDK = uploadCacheArchiveSDK; //# sourceMappingURL=uploadUtils.js.map /***/ }), /***/ 98356: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadOptions = exports.getUploadOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy of the upload options with defaults filled in. * * @param copy the original upload options */ function getUploadOptions(copy) { // Defaults if not overriden const result = { useAzureSdk: false, uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } if (typeof copy.uploadConcurrency === 'number') { result.uploadConcurrency = copy.uploadConcurrency; } if (typeof copy.uploadChunkSize === 'number') { result.uploadChunkSize = copy.uploadChunkSize; } } /** * Add env var overrides */ // Cap the uploadConcurrency at 32 result.uploadConcurrency = !isNaN(Number(process.env['CACHE_UPLOAD_CONCURRENCY'])) ? Math.min(32, Number(process.env['CACHE_UPLOAD_CONCURRENCY'])) : result.uploadConcurrency; // Cap the uploadChunkSize at 128MiB result.uploadChunkSize = !isNaN(Number(process.env['CACHE_UPLOAD_CHUNK_SIZE'])) ? Math.min(128 * 1024 * 1024, Number(process.env['CACHE_UPLOAD_CHUNK_SIZE']) * 1024 * 1024) : result.uploadChunkSize; core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Upload concurrency: ${result.uploadConcurrency}`); core.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports.getUploadOptions = getUploadOptions; /** * Returns a copy of the download options with defaults filled in. * * @param copy the original download options */ function getDownloadOptions(copy) { const result = { useAzureSdk: false, concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } if (typeof copy.concurrentBlobDownloads === 'boolean') { result.concurrentBlobDownloads = copy.concurrentBlobDownloads; } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } if (typeof copy.timeoutInMs === 'number') { result.timeoutInMs = copy.timeoutInMs; } if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } if (typeof copy.lookupOnly === 'boolean') { result.lookupOnly = copy.lookupOnly; } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; } core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; //# sourceMappingURL=options.js.map /***/ }), /***/ 39688: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.create = void 0; const internal_globber_1 = __nccwpck_require__(57698); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } exports.create = create; //# sourceMappingURL=glob.js.map /***/ }), /***/ 48462: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy with defaults filled in. */ function getOptions(copy) { const result = { followSymbolicLinks: true, implicitDescendants: true, omitBrokenSymbolicLinks: true }; if (copy) { if (typeof copy.followSymbolicLinks === 'boolean') { result.followSymbolicLinks = copy.followSymbolicLinks; core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === 'boolean') { result.implicitDescendants = copy.implicitDescendants; core.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; } exports.getOptions = getOptions; //# sourceMappingURL=internal-glob-options-helper.js.map /***/ }), /***/ 57698: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume("next", value); } function reject(value) { resume("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultGlobber = void 0; const core = __importStar(__nccwpck_require__(37484)); const fs = __importStar(__nccwpck_require__(79896)); const globOptionsHelper = __importStar(__nccwpck_require__(48462)); const path = __importStar(__nccwpck_require__(16928)); const patternHelper = __importStar(__nccwpck_require__(30637)); const internal_match_kind_1 = __nccwpck_require__(49222); const internal_pattern_1 = __nccwpck_require__(8188); const internal_search_state_1 = __nccwpck_require__(768); const IS_WINDOWS = process.platform === 'win32'; class DefaultGlobber { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper.getOptions(options); } getSearchPaths() { // Return a copy return this.searchPaths.slice(); } glob() { var e_1, _a; return __awaiter(this, void 0, void 0, function* () { const result = []; try { for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { const itemPath = _c.value; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator(this, arguments, function* globGenerator_1() { // Fill in defaults options const options = globOptionsHelper.getOptions(this.options); // Implicit descendants? const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== '**')) { patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); } } // Push the search paths const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { core.debug(`Search path '${searchPath}'`); // Exists? try { // Intentionally using lstat. Detection for broken symlink // will be performed later (if following symlinks). yield __await(fs.promises.lstat(searchPath)); } catch (err) { if (err.code === 'ENOENT') { continue; } throw err; } stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } // Search const traversalChain = []; // used to detect cycles while (stack.length) { // Pop const item = stack.pop(); // Match? const match = patternHelper.match(patterns, item.path); const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); if (!match && !partialMatch) { continue; } // Stat const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); // Broken symlink, or symlink cycle detected, or no longer exists if (!stats) { continue; } // Directory if (stats.isDirectory()) { // Matched if (match & internal_match_kind_1.MatchKind.Directory) { yield yield __await(item.path); } // Descend? else if (!partialMatch) { continue; } // Push the child items in reverse const childLevel = item.level + 1; const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } // File else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await(item.path); } } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { const result = new DefaultGlobber(options); if (IS_WINDOWS) { patterns = patterns.replace(/\r\n/g, '\n'); patterns = patterns.replace(/\r/g, '\n'); } const lines = patterns.split('\n').map(x => x.trim()); for (const line of lines) { // Empty or comment if (!line || line.startsWith('#')) { continue; } // Pattern else { result.patterns.push(new internal_pattern_1.Pattern(line)); } } result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { // Note: // `stat` returns info about the target of a symlink (or symlink chain) // `lstat` returns info about a symlink itself let stats; if (options.followSymbolicLinks) { try { // Use `stat` (following symlinks) stats = yield fs.promises.stat(item.path); } catch (err) { if (err.code === 'ENOENT') { if (options.omitBrokenSymbolicLinks) { core.debug(`Broken symlink '${item.path}'`); return undefined; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } } else { // Use `lstat` (not following symlinks) stats = yield fs.promises.lstat(item.path); } // Note, isDirectory() returns false for the lstat of a symlink if (stats.isDirectory() && options.followSymbolicLinks) { // Get the realpath const realPath = yield fs.promises.realpath(item.path); // Fixup the traversal chain to match the item level while (traversalChain.length >= item.level) { traversalChain.pop(); } // Test for a cycle if (traversalChain.some((x) => x === realPath)) { core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return undefined; } // Update the traversal chain traversalChain.push(realPath); } return stats; }); } } exports.DefaultGlobber = DefaultGlobber; //# sourceMappingURL=internal-globber.js.map /***/ }), /***/ 49222: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind; (function (MatchKind) { /** Not matched */ MatchKind[MatchKind["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind[MatchKind["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind[MatchKind["File"] = 2] = "File"; /** Matched */ MatchKind[MatchKind["All"] = 3] = "All"; })(MatchKind = exports.MatchKind || (exports.MatchKind = {})); //# sourceMappingURL=internal-match-kind.js.map /***/ }), /***/ 51256: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path = __importStar(__nccwpck_require__(16928)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname(p) { // Normalize slashes and trim unnecessary trailing slash p = safeTrimTrailingSeparator(p); // Windows UNC root, e.g. \\hello or \\hello\world if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } // Get dirname let result = path.dirname(p); // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } return result; } exports.dirname = dirname; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot(root, itemPath) { assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); // Already rooted if (hasAbsoluteRoot(itemPath)) { return itemPath; } // Windows if (IS_WINDOWS) { // Check for itemPath like C: or C:foo if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); // Drive letter matches cwd? Expand to cwd if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { // Drive only, e.g. C: if (itemPath.length === 2) { // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}`; } // Drive + path, e.g. C:foo else { if (!cwd.endsWith('\\')) { cwd += '\\'; } // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } } // Different drive else { return `${itemPath[0]}:\\${itemPath.substr(2)}`; } } // Check for itemPath like \ or \foo else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); // Otherwise ensure root ends with a separator if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { // Intentionally empty } else { // Append separator root += path.sep; } return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot(itemPath) { assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \\hello\share or C:\hello return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasAbsoluteRoot = hasAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot(itemPath) { assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \ or \hello or \\hello // E.g. C: or C:\hello return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasRoot = hasRoot; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators(p) { p = p || ''; // Windows if (IS_WINDOWS) { // Convert slashes on Windows p = p.replace(/\//g, '\\'); // Remove redundant slashes const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC } // Remove redundant slashes return p.replace(/\/\/+/g, '/'); } exports.normalizeSeparators = normalizeSeparators; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator(p) { // Short-circuit if empty if (!p) { return ''; } // Normalize separators p = normalizeSeparators(p); // No trailing slash if (!p.endsWith(path.sep)) { return p; } // Check '/' on Linux/macOS and '\' on Windows if (p === path.sep) { return p; } // On Windows check if drive root. E.g. C:\ if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { return p; } // Otherwise trim trailing slash return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; //# sourceMappingURL=internal-path-helper.js.map /***/ }), /***/ 70279: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Path = void 0; const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(51256)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments */ class Path { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; // String if (typeof itemPath === 'string') { assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // Not rooted if (!pathHelper.hasRoot(itemPath)) { this.segments = itemPath.split(path.sep); } // Rooted else { // Add all segments, while not at the root let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { // Add the segment const basename = path.basename(remaining); this.segments.unshift(basename); // Truncate the last segment remaining = dir; dir = pathHelper.dirname(remaining); } // Remainder is the root this.segments.unshift(remaining); } } // Array else { // Must not be empty assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); // Each segment for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; // Must not be empty assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); // Normalize slashes segment = pathHelper.normalizeSeparators(itemPath[i]); // Root segment if (i === 0 && pathHelper.hasRoot(segment)) { segment = pathHelper.safeTrimTrailingSeparator(segment); assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } // All other segments else { // Must not contain slash assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { // First segment let result = this.segments[0]; // All others let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { result += path.sep; } result += this.segments[i]; } return result; } } exports.Path = Path; //# sourceMappingURL=internal-path.js.map /***/ }), /***/ 30637: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper = __importStar(__nccwpck_require__(51256)); const internal_match_kind_1 = __nccwpck_require__(49222); const IS_WINDOWS = process.platform === 'win32'; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths(patterns) { // Ignore negate patterns patterns = patterns.filter(x => !x.negate); // Create a map of all search paths const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = 'candidate'; } const result = []; for (const pattern of patterns) { // Check if already included const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === 'included') { continue; } // Check for an ancestor search path let foundAncestor = false; let tempKey = key; let parent = pathHelper.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper.dirname(tempKey); } // Include the search pattern in the result if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = 'included'; } } return result; } exports.getSearchPaths = getSearchPaths; /** * Matches the patterns against the path */ function match(patterns, itemPath) { let result = internal_match_kind_1.MatchKind.None; for (const pattern of patterns) { if (pattern.negate) { result &= ~pattern.match(itemPath); } else { result |= pattern.match(itemPath); } } return result; } exports.match = match; /** * Checks whether to descend further into the directory */ function partialMatch(patterns, itemPath) { return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch; //# sourceMappingURL=internal-pattern-helper.js.map /***/ }), /***/ 8188: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Pattern = void 0; const os = __importStar(__nccwpck_require__(70857)); const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(51256)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const minimatch_1 = __nccwpck_require__(43772); const internal_match_kind_1 = __nccwpck_require__(49222); const internal_path_1 = __nccwpck_require__(70279); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; // Pattern overload let pattern; if (typeof patternOrNegate === 'string') { pattern = patternOrNegate.trim(); } // Segments overload else { // Convert to pattern segments = segments || []; assert_1.default(segments.length, `Parameter 'segments' must not empty`); const root = Pattern.getLiteral(segments[0]); assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1.Path(segments).toString().trim(); if (patternOrNegate) { pattern = `!${pattern}`; } } // Negate while (pattern.startsWith('!')) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } // Normalize slashes and ensures absolute root pattern = Pattern.fixupPattern(pattern, homedir); // Segments this.segments = new internal_path_1.Path(pattern).segments; // Trailing slash indicates the pattern should only match directories, not regular files this.trailingSeparator = pathHelper .normalizeSeparators(pattern) .endsWith(path.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); // Search path (literal path prior to the first glob segment) let foundGlob = false; const searchSegments = this.segments .map(x => Pattern.getLiteral(x)) .filter(x => !foundGlob && !(foundGlob = x === '')); this.searchPath = new internal_path_1.Path(searchSegments).toString(); // Root RegExp (required when determining partial match) this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); this.isImplicitPattern = isImplicitPattern; // Create minimatch const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { // Last segment is globstar? if (this.segments[this.segments.length - 1] === '**') { // Normalize slashes itemPath = pathHelper.normalizeSeparators(itemPath); // Append a trailing slash. Otherwise Minimatch will not match the directory immediately // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { // Note, this is safe because the constructor ensures the pattern has an absolute root. // For example, formats like C: and C:foo on Windows are resolved to an absolute root. itemPath = `${itemPath}${path.sep}`; } } else { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); } // Match if (this.minimatch.match(itemPath)) { return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } return internal_match_kind_1.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // matchOne does not handle root path correctly if (pathHelper.dirname(itemPath) === itemPath) { return this.rootRegExp.test(itemPath); } return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s) { return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment .replace(/\?/g, '[?]') // escape '?' .replace(/\*/g, '[*]'); // escape '*' } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { // Empty assert_1.default(pattern, 'pattern cannot be empty'); // Must not contain `.` segment, unless first segment // Must not contain `..` segment const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); // Normalize slashes pattern = pathHelper.normalizeSeparators(pattern); // Replace leading `.` segment if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); } // Replace leading `~` segment else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { homedir = homedir || os.homedir(); assert_1.default(homedir, 'Unable to determine HOME directory'); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern.globEscape(homedir) + pattern.substr(1); } // Replace relative drive root, e.g. pattern is C: or C:foo else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(2); } // Replace relative root, e.g. pattern is \ or \foo else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); if (!root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(1); } // Otherwise ensure absolute root else { pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); } return pathHelper.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ''; for (let i = 0; i < segment.length; i++) { const c = segment[i]; // Escape if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { literal += segment[++i]; continue; } // Wildcard else if (c === '*' || c === '?') { return ''; } // Character set else if (c === '[' && i + 1 < segment.length) { let set = ''; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; // Escape if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { set += segment[++i2]; continue; } // Closed else if (c2 === ']') { closed = i2; break; } // Otherwise else { set += c2; } } // Closed? if (closed >= 0) { // Cannot convert if (set.length > 1) { return ''; } // Convert to literal if (set) { literal += set; i = closed; continue; } } // Otherwise fall thru } // Append literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s) { return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } } exports.Pattern = Pattern; //# sourceMappingURL=internal-pattern.js.map /***/ }), /***/ 768: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SearchState = void 0; class SearchState { constructor(path, level) { this.path = path; this.level = level; } } exports.SearchState = SearchState; //# sourceMappingURL=internal-search-state.js.map /***/ }), /***/ 44914: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.issue = exports.issueCommand = void 0; const os = __importStar(__nccwpck_require__(70857)); const utils_1 = __nccwpck_require__(30302); /** * Commands * * Command Format: * ::name key=value,key=value::message * * Examples: * ::warning::This is the message * ::set-env name=MY_VAR::some value */ function issueCommand(command, properties, message) { const cmd = new Command(command, properties, message); process.stdout.write(cmd.toString() + os.EOL); } exports.issueCommand = issueCommand; function issue(name, message = '') { issueCommand(name, {}, message); } exports.issue = issue; const CMD_STRING = '::'; class Command { constructor(command, properties, message) { if (!command) { command = 'missing.command'; } this.command = command; this.properties = properties; this.message = message; } toString() { let cmdStr = CMD_STRING + this.command; if (this.properties && Object.keys(this.properties).length > 0) { cmdStr += ' '; let first = true; for (const key in this.properties) { if (this.properties.hasOwnProperty(key)) { const val = this.properties[key]; if (val) { if (first) { first = false; } else { cmdStr += ','; } cmdStr += `${key}=${escapeProperty(val)}`; } } } } cmdStr += `${CMD_STRING}${escapeData(this.message)}`; return cmdStr; } } function escapeData(s) { return (0, utils_1.toCommandValue)(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A'); } function escapeProperty(s) { return (0, utils_1.toCommandValue)(s) .replace(/%/g, '%25') .replace(/\r/g, '%0D') .replace(/\n/g, '%0A') .replace(/:/g, '%3A') .replace(/,/g, '%2C'); } //# sourceMappingURL=command.js.map /***/ }), /***/ 37484: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.platform = exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = exports.markdownSummary = exports.summary = exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0; const command_1 = __nccwpck_require__(44914); const file_command_1 = __nccwpck_require__(24753); const utils_1 = __nccwpck_require__(30302); const os = __importStar(__nccwpck_require__(70857)); const path = __importStar(__nccwpck_require__(16928)); const oidc_utils_1 = __nccwpck_require__(35306); /** * The code to exit an action */ var ExitCode; (function (ExitCode) { /** * A code indicating that the action was successful */ ExitCode[ExitCode["Success"] = 0] = "Success"; /** * A code indicating that the action was a failure */ ExitCode[ExitCode["Failure"] = 1] = "Failure"; })(ExitCode || (exports.ExitCode = ExitCode = {})); //----------------------------------------------------------------------- // Variables //----------------------------------------------------------------------- /** * Sets env variable for this action and future actions in the job * @param name the name of the variable to set * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function exportVariable(name, val) { const convertedVal = (0, utils_1.toCommandValue)(val); process.env[name] = convertedVal; const filePath = process.env['GITHUB_ENV'] || ''; if (filePath) { return (0, file_command_1.issueFileCommand)('ENV', (0, file_command_1.prepareKeyValueMessage)(name, val)); } (0, command_1.issueCommand)('set-env', { name }, convertedVal); } exports.exportVariable = exportVariable; /** * Registers a secret which will get masked from logs * @param secret value of the secret */ function setSecret(secret) { (0, command_1.issueCommand)('add-mask', {}, secret); } exports.setSecret = setSecret; /** * Prepends inputPath to the PATH (for this action and future actions) * @param inputPath */ function addPath(inputPath) { const filePath = process.env['GITHUB_PATH'] || ''; if (filePath) { (0, file_command_1.issueFileCommand)('PATH', inputPath); } else { (0, command_1.issueCommand)('add-path', {}, inputPath); } process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; } exports.addPath = addPath; /** * Gets the value of an input. * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed. * Returns an empty string if the value is not defined. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string */ function getInput(name, options) { const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; if (options && options.required && !val) { throw new Error(`Input required and not supplied: ${name}`); } if (options && options.trimWhitespace === false) { return val; } return val.trim(); } exports.getInput = getInput; /** * Gets the values of an multiline input. Each value is also trimmed. * * @param name name of the input to get * @param options optional. See InputOptions. * @returns string[] * */ function getMultilineInput(name, options) { const inputs = getInput(name, options) .split('\n') .filter(x => x !== ''); if (options && options.trimWhitespace === false) { return inputs; } return inputs.map(input => input.trim()); } exports.getMultilineInput = getMultilineInput; /** * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification. * Support boolean input list: `true | True | TRUE | false | False | FALSE` . * The return value is also in boolean type. * ref: https://yaml.org/spec/1.2/spec.html#id2804923 * * @param name name of the input to get * @param options optional. See InputOptions. * @returns boolean */ function getBooleanInput(name, options) { const trueValue = ['true', 'True', 'TRUE']; const falseValue = ['false', 'False', 'FALSE']; const val = getInput(name, options); if (trueValue.includes(val)) return true; if (falseValue.includes(val)) return false; throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` + `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``); } exports.getBooleanInput = getBooleanInput; /** * Sets the value of an output. * * @param name name of the output to set * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function setOutput(name, value) { const filePath = process.env['GITHUB_OUTPUT'] || ''; if (filePath) { return (0, file_command_1.issueFileCommand)('OUTPUT', (0, file_command_1.prepareKeyValueMessage)(name, value)); } process.stdout.write(os.EOL); (0, command_1.issueCommand)('set-output', { name }, (0, utils_1.toCommandValue)(value)); } exports.setOutput = setOutput; /** * Enables or disables the echoing of commands into stdout for the rest of the step. * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. * */ function setCommandEcho(enabled) { (0, command_1.issue)('echo', enabled ? 'on' : 'off'); } exports.setCommandEcho = setCommandEcho; //----------------------------------------------------------------------- // Results //----------------------------------------------------------------------- /** * Sets the action status to failed. * When the action exits it will be with an exit code of 1 * @param message add error issue message */ function setFailed(message) { process.exitCode = ExitCode.Failure; error(message); } exports.setFailed = setFailed; //----------------------------------------------------------------------- // Logging Commands //----------------------------------------------------------------------- /** * Gets whether Actions Step Debug is on or not */ function isDebug() { return process.env['RUNNER_DEBUG'] === '1'; } exports.isDebug = isDebug; /** * Writes debug message to user log * @param message debug message */ function debug(message) { (0, command_1.issueCommand)('debug', {}, message); } exports.debug = debug; /** * Adds an error issue * @param message error issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function error(message, properties = {}) { (0, command_1.issueCommand)('error', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.error = error; /** * Adds a warning issue * @param message warning issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function warning(message, properties = {}) { (0, command_1.issueCommand)('warning', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.warning = warning; /** * Adds a notice issue * @param message notice issue message. Errors will be converted to string via toString() * @param properties optional properties to add to the annotation. */ function notice(message, properties = {}) { (0, command_1.issueCommand)('notice', (0, utils_1.toCommandProperties)(properties), message instanceof Error ? message.toString() : message); } exports.notice = notice; /** * Writes info to log with console.log. * @param message info message */ function info(message) { process.stdout.write(message + os.EOL); } exports.info = info; /** * Begin an output group. * * Output until the next `groupEnd` will be foldable in this group * * @param name The name of the output group */ function startGroup(name) { (0, command_1.issue)('group', name); } exports.startGroup = startGroup; /** * End an output group. */ function endGroup() { (0, command_1.issue)('endgroup'); } exports.endGroup = endGroup; /** * Wrap an asynchronous function call in a group. * * Returns the same type as the function itself. * * @param name The name of the group * @param fn The function to wrap in the group */ function group(name, fn) { return __awaiter(this, void 0, void 0, function* () { startGroup(name); let result; try { result = yield fn(); } finally { endGroup(); } return result; }); } exports.group = group; //----------------------------------------------------------------------- // Wrapper action state //----------------------------------------------------------------------- /** * Saves state for current action, the state can only be retrieved by this action's post job execution. * * @param name name of the state to store * @param value value to store. Non-string values will be converted to a string via JSON.stringify */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function saveState(name, value) { const filePath = process.env['GITHUB_STATE'] || ''; if (filePath) { return (0, file_command_1.issueFileCommand)('STATE', (0, file_command_1.prepareKeyValueMessage)(name, value)); } (0, command_1.issueCommand)('save-state', { name }, (0, utils_1.toCommandValue)(value)); } exports.saveState = saveState; /** * Gets the value of an state set by this action's main execution. * * @param name name of the state to get * @returns string */ function getState(name) { return process.env[`STATE_${name}`] || ''; } exports.getState = getState; function getIDToken(aud) { return __awaiter(this, void 0, void 0, function* () { return yield oidc_utils_1.OidcClient.getIDToken(aud); }); } exports.getIDToken = getIDToken; /** * Summary exports */ var summary_1 = __nccwpck_require__(71847); Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } })); /** * @deprecated use core.summary */ var summary_2 = __nccwpck_require__(71847); Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } })); /** * Path exports */ var path_utils_1 = __nccwpck_require__(31976); Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } })); Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } })); Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } })); /** * Platform utilities exports */ exports.platform = __importStar(__nccwpck_require__(18968)); //# sourceMappingURL=core.js.map /***/ }), /***/ 24753: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // For internal use, subject to change. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.prepareKeyValueMessage = exports.issueFileCommand = void 0; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ const crypto = __importStar(__nccwpck_require__(76982)); const fs = __importStar(__nccwpck_require__(79896)); const os = __importStar(__nccwpck_require__(70857)); const utils_1 = __nccwpck_require__(30302); function issueFileCommand(command, message) { const filePath = process.env[`GITHUB_${command}`]; if (!filePath) { throw new Error(`Unable to find environment variable for file command ${command}`); } if (!fs.existsSync(filePath)) { throw new Error(`Missing file at path: ${filePath}`); } fs.appendFileSync(filePath, `${(0, utils_1.toCommandValue)(message)}${os.EOL}`, { encoding: 'utf8' }); } exports.issueFileCommand = issueFileCommand; function prepareKeyValueMessage(key, value) { const delimiter = `ghadelimiter_${crypto.randomUUID()}`; const convertedValue = (0, utils_1.toCommandValue)(value); // These should realistically never happen, but just in case someone finds a // way to exploit uuid generation let's not allow keys or values that contain // the delimiter. if (key.includes(delimiter)) { throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`); } if (convertedValue.includes(delimiter)) { throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`); } return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`; } exports.prepareKeyValueMessage = prepareKeyValueMessage; //# sourceMappingURL=file-command.js.map /***/ }), /***/ 35306: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OidcClient = void 0; const http_client_1 = __nccwpck_require__(54844); const auth_1 = __nccwpck_require__(44552); const core_1 = __nccwpck_require__(37484); class OidcClient { static createHttpClient(allowRetry = true, maxRetry = 10) { const requestOptions = { allowRetries: allowRetry, maxRetries: maxRetry }; return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions); } static getRequestToken() { const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN']; if (!token) { throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable'); } return token; } static getIDTokenUrl() { const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL']; if (!runtimeUrl) { throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable'); } return runtimeUrl; } static getCall(id_token_url) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpclient = OidcClient.createHttpClient(); const res = yield httpclient .getJson(id_token_url) .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { throw new Error('Response json body do not have ID Token field'); } return id_token; }); } static getIDToken(audience) { return __awaiter(this, void 0, void 0, function* () { try { // New ID Token is requested from action service let id_token_url = OidcClient.getIDTokenUrl(); if (audience) { const encodedAudience = encodeURIComponent(audience); id_token_url = `${id_token_url}&audience=${encodedAudience}`; } (0, core_1.debug)(`ID token url is ${id_token_url}`); const id_token = yield OidcClient.getCall(id_token_url); (0, core_1.setSecret)(id_token); return id_token; } catch (error) { throw new Error(`Error message: ${error.message}`); } }); } } exports.OidcClient = OidcClient; //# sourceMappingURL=oidc-utils.js.map /***/ }), /***/ 31976: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0; const path = __importStar(__nccwpck_require__(16928)); /** * toPosixPath converts the given path to the posix form. On Windows, \\ will be * replaced with /. * * @param pth. Path to transform. * @return string Posix path. */ function toPosixPath(pth) { return pth.replace(/[\\]/g, '/'); } exports.toPosixPath = toPosixPath; /** * toWin32Path converts the given path to the win32 form. On Linux, / will be * replaced with \\. * * @param pth. Path to transform. * @return string Win32 path. */ function toWin32Path(pth) { return pth.replace(/[/]/g, '\\'); } exports.toWin32Path = toWin32Path; /** * toPlatformPath converts the given path to a platform-specific path. It does * this by replacing instances of / and \ with the platform-specific path * separator. * * @param pth The path to platformize. * @return string The platform-specific path. */ function toPlatformPath(pth) { return pth.replace(/[/\\]/g, path.sep); } exports.toPlatformPath = toPlatformPath; //# sourceMappingURL=path-utils.js.map /***/ }), /***/ 18968: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDetails = exports.isLinux = exports.isMacOS = exports.isWindows = exports.arch = exports.platform = void 0; const os_1 = __importDefault(__nccwpck_require__(70857)); const exec = __importStar(__nccwpck_require__(95236)); const getWindowsInfo = () => __awaiter(void 0, void 0, void 0, function* () { const { stdout: version } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Version"', undefined, { silent: true }); const { stdout: name } = yield exec.getExecOutput('powershell -command "(Get-CimInstance -ClassName Win32_OperatingSystem).Caption"', undefined, { silent: true }); return { name: name.trim(), version: version.trim() }; }); const getMacOsInfo = () => __awaiter(void 0, void 0, void 0, function* () { var _a, _b, _c, _d; const { stdout } = yield exec.getExecOutput('sw_vers', undefined, { silent: true }); const version = (_b = (_a = stdout.match(/ProductVersion:\s*(.+)/)) === null || _a === void 0 ? void 0 : _a[1]) !== null && _b !== void 0 ? _b : ''; const name = (_d = (_c = stdout.match(/ProductName:\s*(.+)/)) === null || _c === void 0 ? void 0 : _c[1]) !== null && _d !== void 0 ? _d : ''; return { name, version }; }); const getLinuxInfo = () => __awaiter(void 0, void 0, void 0, function* () { const { stdout } = yield exec.getExecOutput('lsb_release', ['-i', '-r', '-s'], { silent: true }); const [name, version] = stdout.trim().split('\n'); return { name, version }; }); exports.platform = os_1.default.platform(); exports.arch = os_1.default.arch(); exports.isWindows = exports.platform === 'win32'; exports.isMacOS = exports.platform === 'darwin'; exports.isLinux = exports.platform === 'linux'; function getDetails() { return __awaiter(this, void 0, void 0, function* () { return Object.assign(Object.assign({}, (yield (exports.isWindows ? getWindowsInfo() : exports.isMacOS ? getMacOsInfo() : getLinuxInfo()))), { platform: exports.platform, arch: exports.arch, isWindows: exports.isWindows, isMacOS: exports.isMacOS, isLinux: exports.isLinux }); }); } exports.getDetails = getDetails; //# sourceMappingURL=platform.js.map /***/ }), /***/ 71847: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0; const os_1 = __nccwpck_require__(70857); const fs_1 = __nccwpck_require__(79896); const { access, appendFile, writeFile } = fs_1.promises; exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY'; exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary'; class Summary { constructor() { this._buffer = ''; } /** * Finds the summary file path from the environment, rejects if env var is not found or file does not exist * Also checks r/w permissions. * * @returns step summary file path */ filePath() { return __awaiter(this, void 0, void 0, function* () { if (this._filePath) { return this._filePath; } const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR]; if (!pathFromEnv) { throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`); } try { yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK); } catch (_a) { throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`); } this._filePath = pathFromEnv; return this._filePath; }); } /** * Wraps content in an HTML tag, adding any HTML attributes * * @param {string} tag HTML tag to wrap * @param {string | null} content content within the tag * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add * * @returns {string} content wrapped in HTML element */ wrap(tag, content, attrs = {}) { const htmlAttrs = Object.entries(attrs) .map(([key, value]) => ` ${key}="${value}"`) .join(''); if (!content) { return `<${tag}${htmlAttrs}>`; } return `<${tag}${htmlAttrs}>${content}`; } /** * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default. * * @param {SummaryWriteOptions} [options] (optional) options for write operation * * @returns {Promise} summary instance */ write(options) { return __awaiter(this, void 0, void 0, function* () { const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite); const filePath = yield this.filePath(); const writeFunc = overwrite ? writeFile : appendFile; yield writeFunc(filePath, this._buffer, { encoding: 'utf8' }); return this.emptyBuffer(); }); } /** * Clears the summary buffer and wipes the summary file * * @returns {Summary} summary instance */ clear() { return __awaiter(this, void 0, void 0, function* () { return this.emptyBuffer().write({ overwrite: true }); }); } /** * Returns the current summary buffer as a string * * @returns {string} string of summary buffer */ stringify() { return this._buffer; } /** * If the summary buffer is empty * * @returns {boolen} true if the buffer is empty */ isEmptyBuffer() { return this._buffer.length === 0; } /** * Resets the summary buffer without writing to summary file * * @returns {Summary} summary instance */ emptyBuffer() { this._buffer = ''; return this; } /** * Adds raw text to the summary buffer * * @param {string} text content to add * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false) * * @returns {Summary} summary instance */ addRaw(text, addEOL = false) { this._buffer += text; return addEOL ? this.addEOL() : this; } /** * Adds the operating system-specific end-of-line marker to the buffer * * @returns {Summary} summary instance */ addEOL() { return this.addRaw(os_1.EOL); } /** * Adds an HTML codeblock to the summary buffer * * @param {string} code content to render within fenced code block * @param {string} lang (optional) language to syntax highlight code * * @returns {Summary} summary instance */ addCodeBlock(code, lang) { const attrs = Object.assign({}, (lang && { lang })); const element = this.wrap('pre', this.wrap('code', code), attrs); return this.addRaw(element).addEOL(); } /** * Adds an HTML list to the summary buffer * * @param {string[]} items list of items to render * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false) * * @returns {Summary} summary instance */ addList(items, ordered = false) { const tag = ordered ? 'ol' : 'ul'; const listItems = items.map(item => this.wrap('li', item)).join(''); const element = this.wrap(tag, listItems); return this.addRaw(element).addEOL(); } /** * Adds an HTML table to the summary buffer * * @param {SummaryTableCell[]} rows table rows * * @returns {Summary} summary instance */ addTable(rows) { const tableBody = rows .map(row => { const cells = row .map(cell => { if (typeof cell === 'string') { return this.wrap('td', cell); } const { header, data, colspan, rowspan } = cell; const tag = header ? 'th' : 'td'; const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan })); return this.wrap(tag, data, attrs); }) .join(''); return this.wrap('tr', cells); }) .join(''); const element = this.wrap('table', tableBody); return this.addRaw(element).addEOL(); } /** * Adds a collapsable HTML details element to the summary buffer * * @param {string} label text for the closed state * @param {string} content collapsable content * * @returns {Summary} summary instance */ addDetails(label, content) { const element = this.wrap('details', this.wrap('summary', label) + content); return this.addRaw(element).addEOL(); } /** * Adds an HTML image tag to the summary buffer * * @param {string} src path to the image you to embed * @param {string} alt text description of the image * @param {SummaryImageOptions} options (optional) addition image attributes * * @returns {Summary} summary instance */ addImage(src, alt, options) { const { width, height } = options || {}; const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height })); const element = this.wrap('img', null, Object.assign({ src, alt }, attrs)); return this.addRaw(element).addEOL(); } /** * Adds an HTML section heading element * * @param {string} text heading text * @param {number | string} [level=1] (optional) the heading level, default: 1 * * @returns {Summary} summary instance */ addHeading(text, level) { const tag = `h${level}`; const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag) ? tag : 'h1'; const element = this.wrap(allowedTag, text); return this.addRaw(element).addEOL(); } /** * Adds an HTML thematic break (
) to the summary buffer * * @returns {Summary} summary instance */ addSeparator() { const element = this.wrap('hr', null); return this.addRaw(element).addEOL(); } /** * Adds an HTML line break (
) to the summary buffer * * @returns {Summary} summary instance */ addBreak() { const element = this.wrap('br', null); return this.addRaw(element).addEOL(); } /** * Adds an HTML blockquote to the summary buffer * * @param {string} text quote text * @param {string} cite (optional) citation url * * @returns {Summary} summary instance */ addQuote(text, cite) { const attrs = Object.assign({}, (cite && { cite })); const element = this.wrap('blockquote', text, attrs); return this.addRaw(element).addEOL(); } /** * Adds an HTML anchor tag to the summary buffer * * @param {string} text link text/content * @param {string} href hyperlink * * @returns {Summary} summary instance */ addLink(text, href) { const element = this.wrap('a', text, { href }); return this.addRaw(element).addEOL(); } } const _summary = new Summary(); /** * @deprecated use `core.summary` */ exports.markdownSummary = _summary; exports.summary = _summary; //# sourceMappingURL=summary.js.map /***/ }), /***/ 30302: /***/ ((__unused_webpack_module, exports) => { "use strict"; // We use any as a valid input type /* eslint-disable @typescript-eslint/no-explicit-any */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toCommandProperties = exports.toCommandValue = void 0; /** * Sanitizes an input into a string so it can be passed into issueCommand safely * @param input input to sanitize into a string */ function toCommandValue(input) { if (input === null || input === undefined) { return ''; } else if (typeof input === 'string' || input instanceof String) { return input; } return JSON.stringify(input); } exports.toCommandValue = toCommandValue; /** * * @param annotationProperties * @returns The command properties to send with the actual annotation command * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646 */ function toCommandProperties(annotationProperties) { if (!Object.keys(annotationProperties).length) { return {}; } return { title: annotationProperties.title, file: annotationProperties.file, line: annotationProperties.startLine, endLine: annotationProperties.endLine, col: annotationProperties.startColumn, endColumn: annotationProperties.endColumn }; } exports.toCommandProperties = toCommandProperties; //# sourceMappingURL=utils.js.map /***/ }), /***/ 95236: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getExecOutput = exports.exec = void 0; const string_decoder_1 = __nccwpck_require__(13193); const tr = __importStar(__nccwpck_require__(6665)); /** * Exec a command. * Output will be streamed to the live console. * Returns promise with return code * * @param commandLine command to execute (can include additional args). Must be correctly escaped. * @param args optional arguments for tool. Escaping is handled by the lib. * @param options optional exec options. See ExecOptions * @returns Promise exit code */ function exec(commandLine, args, options) { return __awaiter(this, void 0, void 0, function* () { const commandArgs = tr.argStringToArray(commandLine); if (commandArgs.length === 0) { throw new Error(`Parameter 'commandLine' cannot be null or empty.`); } // Path to tool to execute should be first arg const toolPath = commandArgs[0]; args = commandArgs.slice(1).concat(args || []); const runner = new tr.ToolRunner(toolPath, args, options); return runner.exec(); }); } exports.exec = exec; /** * Exec a command and get the output. * Output will be streamed to the live console. * Returns promise with the exit code and collected stdout and stderr * * @param commandLine command to execute (can include additional args). Must be correctly escaped. * @param args optional arguments for tool. Escaping is handled by the lib. * @param options optional exec options. See ExecOptions * @returns Promise exit code, stdout, and stderr */ function getExecOutput(commandLine, args, options) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { let stdout = ''; let stderr = ''; //Using string decoder covers the case where a mult-byte character is split const stdoutDecoder = new string_decoder_1.StringDecoder('utf8'); const stderrDecoder = new string_decoder_1.StringDecoder('utf8'); const originalStdoutListener = (_a = options === null || options === void 0 ? void 0 : options.listeners) === null || _a === void 0 ? void 0 : _a.stdout; const originalStdErrListener = (_b = options === null || options === void 0 ? void 0 : options.listeners) === null || _b === void 0 ? void 0 : _b.stderr; const stdErrListener = (data) => { stderr += stderrDecoder.write(data); if (originalStdErrListener) { originalStdErrListener(data); } }; const stdOutListener = (data) => { stdout += stdoutDecoder.write(data); if (originalStdoutListener) { originalStdoutListener(data); } }; const listeners = Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.listeners), { stdout: stdOutListener, stderr: stdErrListener }); const exitCode = yield exec(commandLine, args, Object.assign(Object.assign({}, options), { listeners })); //flush any remaining characters stdout += stdoutDecoder.end(); stderr += stderrDecoder.end(); return { exitCode, stdout, stderr }; }); } exports.getExecOutput = getExecOutput; //# sourceMappingURL=exec.js.map /***/ }), /***/ 6665: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.argStringToArray = exports.ToolRunner = void 0; const os = __importStar(__nccwpck_require__(70857)); const events = __importStar(__nccwpck_require__(24434)); const child = __importStar(__nccwpck_require__(35317)); const path = __importStar(__nccwpck_require__(16928)); const io = __importStar(__nccwpck_require__(94994)); const ioUtil = __importStar(__nccwpck_require__(75207)); const timers_1 = __nccwpck_require__(53557); /* eslint-disable @typescript-eslint/unbound-method */ const IS_WINDOWS = process.platform === 'win32'; /* * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. */ class ToolRunner extends events.EventEmitter { constructor(toolPath, args, options) { super(); if (!toolPath) { throw new Error("Parameter 'toolPath' cannot be null or empty."); } this.toolPath = toolPath; this.args = args || []; this.options = options || {}; } _debug(message) { if (this.options.listeners && this.options.listeners.debug) { this.options.listeners.debug(message); } } _getCommandString(options, noPrefix) { const toolPath = this._getSpawnFileName(); const args = this._getSpawnArgs(options); let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool if (IS_WINDOWS) { // Windows + cmd file if (this._isCmdFile()) { cmd += toolPath; for (const a of args) { cmd += ` ${a}`; } } // Windows + verbatim else if (options.windowsVerbatimArguments) { cmd += `"${toolPath}"`; for (const a of args) { cmd += ` ${a}`; } } // Windows (regular) else { cmd += this._windowsQuoteCmdArg(toolPath); for (const a of args) { cmd += ` ${this._windowsQuoteCmdArg(a)}`; } } } else { // OSX/Linux - this can likely be improved with some form of quoting. // creating processes on Unix is fundamentally different than Windows. // on Unix, execvp() takes an arg array. cmd += toolPath; for (const a of args) { cmd += ` ${a}`; } } return cmd; } _processLineBuffer(data, strBuffer, onLine) { try { let s = strBuffer + data.toString(); let n = s.indexOf(os.EOL); while (n > -1) { const line = s.substring(0, n); onLine(line); // the rest of the string ... s = s.substring(n + os.EOL.length); n = s.indexOf(os.EOL); } return s; } catch (err) { // streaming lines to console is best effort. Don't fail a build. this._debug(`error processing line. Failed with error ${err}`); return ''; } } _getSpawnFileName() { if (IS_WINDOWS) { if (this._isCmdFile()) { return process.env['COMSPEC'] || 'cmd.exe'; } } return this.toolPath; } _getSpawnArgs(options) { if (IS_WINDOWS) { if (this._isCmdFile()) { let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; for (const a of this.args) { argline += ' '; argline += options.windowsVerbatimArguments ? a : this._windowsQuoteCmdArg(a); } argline += '"'; return [argline]; } } return this.args; } _endsWith(str, end) { return str.endsWith(end); } _isCmdFile() { const upperToolPath = this.toolPath.toUpperCase(); return (this._endsWith(upperToolPath, '.CMD') || this._endsWith(upperToolPath, '.BAT')); } _windowsQuoteCmdArg(arg) { // for .exe, apply the normal quoting rules that libuv applies if (!this._isCmdFile()) { return this._uvQuoteCmdArg(arg); } // otherwise apply quoting rules specific to the cmd.exe command line parser. // the libuv rules are generic and are not designed specifically for cmd.exe // command line parser. // // for a detailed description of the cmd.exe command line parser, refer to // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 // need quotes for empty arg if (!arg) { return '""'; } // determine whether the arg needs to be quoted const cmdSpecialChars = [ ' ', '\t', '&', '(', ')', '[', ']', '{', '}', '^', '=', ';', '!', "'", '+', ',', '`', '~', '|', '<', '>', '"' ]; let needsQuotes = false; for (const char of arg) { if (cmdSpecialChars.some(x => x === char)) { needsQuotes = true; break; } } // short-circuit if quotes not needed if (!needsQuotes) { return arg; } // the following quoting rules are very similar to the rules that by libuv applies. // // 1) wrap the string in quotes // // 2) double-up quotes - i.e. " => "" // // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately // doesn't work well with a cmd.exe command line. // // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. // for example, the command line: // foo.exe "myarg:""my val""" // is parsed by a .NET console app into an arg array: // [ "myarg:\"my val\"" ] // which is the same end result when applying libuv quoting rules. although the actual // command line from libuv quoting rules would look like: // foo.exe "myarg:\"my val\"" // // 3) double-up slashes that precede a quote, // e.g. hello \world => "hello \world" // hello\"world => "hello\\""world" // hello\\"world => "hello\\\\""world" // hello world\ => "hello world\\" // // technically this is not required for a cmd.exe command line, or the batch argument parser. // the reasons for including this as a .cmd quoting rule are: // // a) this is optimized for the scenario where the argument is passed from the .cmd file to an // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. // // b) it's what we've been doing previously (by deferring to node default behavior) and we // haven't heard any complaints about that aspect. // // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be // escaped when used on the command line directly - even though within a .cmd file % can be escaped // by using %%. // // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. // // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args // to an external program. // // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. // % can be escaped within a .cmd file. let reverse = '"'; let quoteHit = true; for (let i = arg.length; i > 0; i--) { // walk the string in reverse reverse += arg[i - 1]; if (quoteHit && arg[i - 1] === '\\') { reverse += '\\'; // double the slash } else if (arg[i - 1] === '"') { quoteHit = true; reverse += '"'; // double the quote } else { quoteHit = false; } } reverse += '"'; return reverse .split('') .reverse() .join(''); } _uvQuoteCmdArg(arg) { // Tool runner wraps child_process.spawn() and needs to apply the same quoting as // Node in certain cases where the undocumented spawn option windowsVerbatimArguments // is used. // // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), // pasting copyright notice from Node within this function: // // Copyright Joyent, Inc. and other Node contributors. All rights reserved. // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to // deal in the Software without restriction, including without limitation the // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or // sell copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS // IN THE SOFTWARE. if (!arg) { // Need double quotation for empty argument return '""'; } if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { // No quotation needed return arg; } if (!arg.includes('"') && !arg.includes('\\')) { // No embedded double quotes or backslashes, so I can just wrap // quote marks around the whole thing. return `"${arg}"`; } // Expected input/output: // input : hello"world // output: "hello\"world" // input : hello""world // output: "hello\"\"world" // input : hello\world // output: hello\world // input : hello\\world // output: hello\\world // input : hello\"world // output: "hello\\\"world" // input : hello\\"world // output: "hello\\\\\"world" // input : hello world\ // output: "hello world\\" - note the comment in libuv actually reads "hello world\" // but it appears the comment is wrong, it should be "hello world\\" let reverse = '"'; let quoteHit = true; for (let i = arg.length; i > 0; i--) { // walk the string in reverse reverse += arg[i - 1]; if (quoteHit && arg[i - 1] === '\\') { reverse += '\\'; } else if (arg[i - 1] === '"') { quoteHit = true; reverse += '\\'; } else { quoteHit = false; } } reverse += '"'; return reverse .split('') .reverse() .join(''); } _cloneExecOptions(options) { options = options || {}; const result = { cwd: options.cwd || process.cwd(), env: options.env || process.env, silent: options.silent || false, windowsVerbatimArguments: options.windowsVerbatimArguments || false, failOnStdErr: options.failOnStdErr || false, ignoreReturnCode: options.ignoreReturnCode || false, delay: options.delay || 10000 }; result.outStream = options.outStream || process.stdout; result.errStream = options.errStream || process.stderr; return result; } _getSpawnOptions(options, toolPath) { options = options || {}; const result = {}; result.cwd = options.cwd; result.env = options.env; result['windowsVerbatimArguments'] = options.windowsVerbatimArguments || this._isCmdFile(); if (options.windowsVerbatimArguments) { result.argv0 = `"${toolPath}"`; } return result; } /** * Exec a tool. * Output will be streamed to the live console. * Returns promise with return code * * @param tool path to tool to exec * @param options optional exec options. See ExecOptions * @returns number */ exec() { return __awaiter(this, void 0, void 0, function* () { // root the tool path if it is unrooted and contains relative pathing if (!ioUtil.isRooted(this.toolPath) && (this.toolPath.includes('/') || (IS_WINDOWS && this.toolPath.includes('\\')))) { // prefer options.cwd if it is specified, however options.cwd may also need to be rooted this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); } // if the tool is only a file name, then resolve it from the PATH // otherwise verify it exists (add extension on Windows if necessary) this.toolPath = yield io.which(this.toolPath, true); return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { this._debug(`exec tool: ${this.toolPath}`); this._debug('arguments:'); for (const arg of this.args) { this._debug(` ${arg}`); } const optionsNonNull = this._cloneExecOptions(this.options); if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); } const state = new ExecState(optionsNonNull, this.toolPath); state.on('debug', (message) => { this._debug(message); }); if (this.options.cwd && !(yield ioUtil.exists(this.options.cwd))) { return reject(new Error(`The cwd: ${this.options.cwd} does not exist!`)); } const fileName = this._getSpawnFileName(); const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); let stdbuffer = ''; if (cp.stdout) { cp.stdout.on('data', (data) => { if (this.options.listeners && this.options.listeners.stdout) { this.options.listeners.stdout(data); } if (!optionsNonNull.silent && optionsNonNull.outStream) { optionsNonNull.outStream.write(data); } stdbuffer = this._processLineBuffer(data, stdbuffer, (line) => { if (this.options.listeners && this.options.listeners.stdline) { this.options.listeners.stdline(line); } }); }); } let errbuffer = ''; if (cp.stderr) { cp.stderr.on('data', (data) => { state.processStderr = true; if (this.options.listeners && this.options.listeners.stderr) { this.options.listeners.stderr(data); } if (!optionsNonNull.silent && optionsNonNull.errStream && optionsNonNull.outStream) { const s = optionsNonNull.failOnStdErr ? optionsNonNull.errStream : optionsNonNull.outStream; s.write(data); } errbuffer = this._processLineBuffer(data, errbuffer, (line) => { if (this.options.listeners && this.options.listeners.errline) { this.options.listeners.errline(line); } }); }); } cp.on('error', (err) => { state.processError = err.message; state.processExited = true; state.processClosed = true; state.CheckComplete(); }); cp.on('exit', (code) => { state.processExitCode = code; state.processExited = true; this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); state.CheckComplete(); }); cp.on('close', (code) => { state.processExitCode = code; state.processExited = true; state.processClosed = true; this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); state.CheckComplete(); }); state.on('done', (error, exitCode) => { if (stdbuffer.length > 0) { this.emit('stdline', stdbuffer); } if (errbuffer.length > 0) { this.emit('errline', errbuffer); } cp.removeAllListeners(); if (error) { reject(error); } else { resolve(exitCode); } }); if (this.options.input) { if (!cp.stdin) { throw new Error('child process missing stdin'); } cp.stdin.end(this.options.input); } })); }); } } exports.ToolRunner = ToolRunner; /** * Convert an arg string to an array of args. Handles escaping * * @param argString string of arguments * @returns string[] array of arguments */ function argStringToArray(argString) { const args = []; let inQuotes = false; let escaped = false; let arg = ''; function append(c) { // we only escape double quotes. if (escaped && c !== '"') { arg += '\\'; } arg += c; escaped = false; } for (let i = 0; i < argString.length; i++) { const c = argString.charAt(i); if (c === '"') { if (!escaped) { inQuotes = !inQuotes; } else { append(c); } continue; } if (c === '\\' && escaped) { append(c); continue; } if (c === '\\' && inQuotes) { escaped = true; continue; } if (c === ' ' && !inQuotes) { if (arg.length > 0) { args.push(arg); arg = ''; } continue; } append(c); } if (arg.length > 0) { args.push(arg.trim()); } return args; } exports.argStringToArray = argStringToArray; class ExecState extends events.EventEmitter { constructor(options, toolPath) { super(); this.processClosed = false; // tracks whether the process has exited and stdio is closed this.processError = ''; this.processExitCode = 0; this.processExited = false; // tracks whether the process has exited this.processStderr = false; // tracks whether stderr was written to this.delay = 10000; // 10 seconds this.done = false; this.timeout = null; if (!toolPath) { throw new Error('toolPath must not be empty'); } this.options = options; this.toolPath = toolPath; if (options.delay) { this.delay = options.delay; } } CheckComplete() { if (this.done) { return; } if (this.processClosed) { this._setResult(); } else if (this.processExited) { this.timeout = timers_1.setTimeout(ExecState.HandleTimeout, this.delay, this); } } _debug(message) { this.emit('debug', message); } _setResult() { // determine whether there is an error let error; if (this.processExited) { if (this.processError) { error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); } else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); } else if (this.processStderr && this.options.failOnStdErr) { error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); } } // clear the timeout if (this.timeout) { clearTimeout(this.timeout); this.timeout = null; } this.done = true; this.emit('done', error, this.processExitCode); } static HandleTimeout(state) { if (state.done) { return; } if (!state.processClosed && state.processExited) { const message = `The STDIO streams did not close within ${state.delay / 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; state._debug(message); } state._setResult(); } } //# sourceMappingURL=toolrunner.js.map /***/ }), /***/ 51648: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Context = void 0; const fs_1 = __nccwpck_require__(79896); const os_1 = __nccwpck_require__(70857); class Context { /** * Hydrate the context from the environment */ constructor() { var _a, _b, _c; this.payload = {}; if (process.env.GITHUB_EVENT_PATH) { if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); } else { const path = process.env.GITHUB_EVENT_PATH; process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); } } this.eventName = process.env.GITHUB_EVENT_NAME; this.sha = process.env.GITHUB_SHA; this.ref = process.env.GITHUB_REF; this.workflow = process.env.GITHUB_WORKFLOW; this.action = process.env.GITHUB_ACTION; this.actor = process.env.GITHUB_ACTOR; this.job = process.env.GITHUB_JOB; this.runAttempt = parseInt(process.env.GITHUB_RUN_ATTEMPT, 10); this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; } get issue() { const payload = this.payload; return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); } get repo() { if (process.env.GITHUB_REPOSITORY) { const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); return { owner, repo }; } if (this.payload.repository) { return { owner: this.payload.repository.owner.login, repo: this.payload.repository.name }; } throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); } } exports.Context = Context; //# sourceMappingURL=context.js.map /***/ }), /***/ 93228: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokit = exports.context = void 0; const Context = __importStar(__nccwpck_require__(51648)); const utils_1 = __nccwpck_require__(38006); exports.context = new Context.Context(); /** * Returns a hydrated octokit ready to use for GitHub Actions * * @param token the repo PAT or GITHUB_TOKEN * @param options other options to set */ function getOctokit(token, options, ...additionalPlugins) { const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); } exports.getOctokit = getOctokit; //# sourceMappingURL=github.js.map /***/ }), /***/ 65156: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; const httpClient = __importStar(__nccwpck_require__(54844)); const undici_1 = __nccwpck_require__(46752); function getAuthString(token, options) { if (!token && !options.auth) { throw new Error('Parameter token or opts.auth is required'); } else if (token && options.auth) { throw new Error('Parameters token and opts.auth may not both be specified'); } return typeof options.auth === 'string' ? options.auth : `token ${token}`; } exports.getAuthString = getAuthString; function getProxyAgent(destinationUrl) { const hc = new httpClient.HttpClient(); return hc.getAgent(destinationUrl); } exports.getProxyAgent = getProxyAgent; function getProxyAgentDispatcher(destinationUrl) { const hc = new httpClient.HttpClient(); return hc.getAgentDispatcher(destinationUrl); } exports.getProxyAgentDispatcher = getProxyAgentDispatcher; function getProxyFetch(destinationUrl) { const httpDispatcher = getProxyAgentDispatcher(destinationUrl); const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); }); return proxyFetch; } exports.getProxyFetch = getProxyFetch; function getApiBaseUrl() { return process.env['GITHUB_API_URL'] || 'https://api.github.com'; } exports.getApiBaseUrl = getApiBaseUrl; //# sourceMappingURL=utils.js.map /***/ }), /***/ 38006: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; const Context = __importStar(__nccwpck_require__(51648)); const Utils = __importStar(__nccwpck_require__(65156)); // octokit + plugins const core_1 = __nccwpck_require__(61897); const plugin_rest_endpoint_methods_1 = __nccwpck_require__(84935); const plugin_paginate_rest_1 = __nccwpck_require__(38082); exports.context = new Context.Context(); const baseUrl = Utils.getApiBaseUrl(); exports.defaults = { baseUrl, request: { agent: Utils.getProxyAgent(baseUrl), fetch: Utils.getProxyFetch(baseUrl) } }; exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); /** * Convience function to correctly format Octokit Options to pass into the constructor. * * @param token the repo PAT or GITHUB_TOKEN * @param options other options to set */ function getOctokitOptions(token, options) { const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller // Auth const auth = Utils.getAuthString(token, opts); if (auth) { opts.auth = auth; } return opts; } exports.getOctokitOptions = getOctokitOptions; //# sourceMappingURL=utils.js.map /***/ }), /***/ 47206: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.hashFiles = exports.create = void 0; const internal_globber_1 = __nccwpck_require__(10103); const internal_hash_files_1 = __nccwpck_require__(73608); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } exports.create = create; /** * Computes the sha256 hash of a glob * * @param patterns Patterns separated by newlines * @param currentWorkspace Workspace used when matching files * @param options Glob options * @param verbose Enables verbose logging */ function hashFiles(patterns, currentWorkspace = '', options, verbose = false) { return __awaiter(this, void 0, void 0, function* () { let followSymbolicLinks = true; if (options && typeof options.followSymbolicLinks === 'boolean') { followSymbolicLinks = options.followSymbolicLinks; } const globber = yield create(patterns, { followSymbolicLinks }); return (0, internal_hash_files_1.hashFiles)(globber, currentWorkspace, verbose); }); } exports.hashFiles = hashFiles; //# sourceMappingURL=glob.js.map /***/ }), /***/ 18164: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy with defaults filled in. */ function getOptions(copy) { const result = { followSymbolicLinks: true, implicitDescendants: true, matchDirectories: true, omitBrokenSymbolicLinks: true, excludeHiddenFiles: false }; if (copy) { if (typeof copy.followSymbolicLinks === 'boolean') { result.followSymbolicLinks = copy.followSymbolicLinks; core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === 'boolean') { result.implicitDescendants = copy.implicitDescendants; core.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.matchDirectories === 'boolean') { result.matchDirectories = copy.matchDirectories; core.debug(`matchDirectories '${result.matchDirectories}'`); } if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } if (typeof copy.excludeHiddenFiles === 'boolean') { result.excludeHiddenFiles = copy.excludeHiddenFiles; core.debug(`excludeHiddenFiles '${result.excludeHiddenFiles}'`); } } return result; } exports.getOptions = getOptions; //# sourceMappingURL=internal-glob-options-helper.js.map /***/ }), /***/ 10103: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume("next", value); } function reject(value) { resume("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultGlobber = void 0; const core = __importStar(__nccwpck_require__(37484)); const fs = __importStar(__nccwpck_require__(79896)); const globOptionsHelper = __importStar(__nccwpck_require__(18164)); const path = __importStar(__nccwpck_require__(16928)); const patternHelper = __importStar(__nccwpck_require__(98891)); const internal_match_kind_1 = __nccwpck_require__(62644); const internal_pattern_1 = __nccwpck_require__(25370); const internal_search_state_1 = __nccwpck_require__(79890); const IS_WINDOWS = process.platform === 'win32'; class DefaultGlobber { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper.getOptions(options); } getSearchPaths() { // Return a copy return this.searchPaths.slice(); } glob() { var _a, e_1, _b, _c; return __awaiter(this, void 0, void 0, function* () { const result = []; try { for (var _d = true, _e = __asyncValues(this.globGenerator()), _f; _f = yield _e.next(), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const itemPath = _c; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield _b.call(_e); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator(this, arguments, function* globGenerator_1() { // Fill in defaults options const options = globOptionsHelper.getOptions(this.options); // Implicit descendants? const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== '**')) { patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); } } // Push the search paths const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { core.debug(`Search path '${searchPath}'`); // Exists? try { // Intentionally using lstat. Detection for broken symlink // will be performed later (if following symlinks). yield __await(fs.promises.lstat(searchPath)); } catch (err) { if (err.code === 'ENOENT') { continue; } throw err; } stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } // Search const traversalChain = []; // used to detect cycles while (stack.length) { // Pop const item = stack.pop(); // Match? const match = patternHelper.match(patterns, item.path); const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); if (!match && !partialMatch) { continue; } // Stat const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); // Broken symlink, or symlink cycle detected, or no longer exists if (!stats) { continue; } // Hidden file or directory? if (options.excludeHiddenFiles && path.basename(item.path).match(/^\./)) { continue; } // Directory if (stats.isDirectory()) { // Matched if (match & internal_match_kind_1.MatchKind.Directory && options.matchDirectories) { yield yield __await(item.path); } // Descend? else if (!partialMatch) { continue; } // Push the child items in reverse const childLevel = item.level + 1; const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } // File else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await(item.path); } } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { const result = new DefaultGlobber(options); if (IS_WINDOWS) { patterns = patterns.replace(/\r\n/g, '\n'); patterns = patterns.replace(/\r/g, '\n'); } const lines = patterns.split('\n').map(x => x.trim()); for (const line of lines) { // Empty or comment if (!line || line.startsWith('#')) { continue; } // Pattern else { result.patterns.push(new internal_pattern_1.Pattern(line)); } } result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { // Note: // `stat` returns info about the target of a symlink (or symlink chain) // `lstat` returns info about a symlink itself let stats; if (options.followSymbolicLinks) { try { // Use `stat` (following symlinks) stats = yield fs.promises.stat(item.path); } catch (err) { if (err.code === 'ENOENT') { if (options.omitBrokenSymbolicLinks) { core.debug(`Broken symlink '${item.path}'`); return undefined; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } } else { // Use `lstat` (not following symlinks) stats = yield fs.promises.lstat(item.path); } // Note, isDirectory() returns false for the lstat of a symlink if (stats.isDirectory() && options.followSymbolicLinks) { // Get the realpath const realPath = yield fs.promises.realpath(item.path); // Fixup the traversal chain to match the item level while (traversalChain.length >= item.level) { traversalChain.pop(); } // Test for a cycle if (traversalChain.some((x) => x === realPath)) { core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return undefined; } // Update the traversal chain traversalChain.push(realPath); } return stats; }); } } exports.DefaultGlobber = DefaultGlobber; //# sourceMappingURL=internal-globber.js.map /***/ }), /***/ 73608: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.hashFiles = void 0; const crypto = __importStar(__nccwpck_require__(76982)); const core = __importStar(__nccwpck_require__(37484)); const fs = __importStar(__nccwpck_require__(79896)); const stream = __importStar(__nccwpck_require__(2203)); const util = __importStar(__nccwpck_require__(39023)); const path = __importStar(__nccwpck_require__(16928)); function hashFiles(globber, currentWorkspace, verbose = false) { var _a, e_1, _b, _c; var _d; return __awaiter(this, void 0, void 0, function* () { const writeDelegate = verbose ? core.info : core.debug; let hasMatch = false; const githubWorkspace = currentWorkspace ? currentWorkspace : (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const result = crypto.createHash('sha256'); let count = 0; try { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { _c = _g.value; _e = false; const file = _c; writeDelegate(file); if (!file.startsWith(`${githubWorkspace}${path.sep}`)) { writeDelegate(`Ignore '${file}' since it is not under GITHUB_WORKSPACE.`); continue; } if (fs.statSync(file).isDirectory()) { writeDelegate(`Skip directory '${file}'.`); continue; } const hash = crypto.createHash('sha256'); const pipeline = util.promisify(stream.pipeline); yield pipeline(fs.createReadStream(file), hash); result.write(hash.digest()); count++; if (!hasMatch) { hasMatch = true; } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } result.end(); if (hasMatch) { writeDelegate(`Found ${count} files to hash.`); return result.digest('hex'); } else { writeDelegate(`No matches found for glob`); return ''; } }); } exports.hashFiles = hashFiles; //# sourceMappingURL=internal-hash-files.js.map /***/ }), /***/ 62644: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind; (function (MatchKind) { /** Not matched */ MatchKind[MatchKind["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind[MatchKind["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind[MatchKind["File"] = 2] = "File"; /** Matched */ MatchKind[MatchKind["All"] = 3] = "All"; })(MatchKind || (exports.MatchKind = MatchKind = {})); //# sourceMappingURL=internal-match-kind.js.map /***/ }), /***/ 84138: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path = __importStar(__nccwpck_require__(16928)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname(p) { // Normalize slashes and trim unnecessary trailing slash p = safeTrimTrailingSeparator(p); // Windows UNC root, e.g. \\hello or \\hello\world if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } // Get dirname let result = path.dirname(p); // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } return result; } exports.dirname = dirname; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot(root, itemPath) { (0, assert_1.default)(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); (0, assert_1.default)(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); // Already rooted if (hasAbsoluteRoot(itemPath)) { return itemPath; } // Windows if (IS_WINDOWS) { // Check for itemPath like C: or C:foo if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); // Drive letter matches cwd? Expand to cwd if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { // Drive only, e.g. C: if (itemPath.length === 2) { // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}`; } // Drive + path, e.g. C:foo else { if (!cwd.endsWith('\\')) { cwd += '\\'; } // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } } // Different drive else { return `${itemPath[0]}:\\${itemPath.substr(2)}`; } } // Check for itemPath like \ or \foo else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); (0, assert_1.default)(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } (0, assert_1.default)(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); // Otherwise ensure root ends with a separator if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { // Intentionally empty } else { // Append separator root += path.sep; } return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot(itemPath) { (0, assert_1.default)(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \\hello\share or C:\hello return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasAbsoluteRoot = hasAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot(itemPath) { (0, assert_1.default)(itemPath, `isRooted parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \ or \hello or \\hello // E.g. C: or C:\hello return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasRoot = hasRoot; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators(p) { p = p || ''; // Windows if (IS_WINDOWS) { // Convert slashes on Windows p = p.replace(/\//g, '\\'); // Remove redundant slashes const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC } // Remove redundant slashes return p.replace(/\/\/+/g, '/'); } exports.normalizeSeparators = normalizeSeparators; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator(p) { // Short-circuit if empty if (!p) { return ''; } // Normalize separators p = normalizeSeparators(p); // No trailing slash if (!p.endsWith(path.sep)) { return p; } // Check '/' on Linux/macOS and '\' on Windows if (p === path.sep) { return p; } // On Windows check if drive root. E.g. C:\ if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { return p; } // Otherwise trim trailing slash return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; //# sourceMappingURL=internal-path-helper.js.map /***/ }), /***/ 76617: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Path = void 0; const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(84138)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments */ class Path { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; // String if (typeof itemPath === 'string') { (0, assert_1.default)(itemPath, `Parameter 'itemPath' must not be empty`); // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // Not rooted if (!pathHelper.hasRoot(itemPath)) { this.segments = itemPath.split(path.sep); } // Rooted else { // Add all segments, while not at the root let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { // Add the segment const basename = path.basename(remaining); this.segments.unshift(basename); // Truncate the last segment remaining = dir; dir = pathHelper.dirname(remaining); } // Remainder is the root this.segments.unshift(remaining); } } // Array else { // Must not be empty (0, assert_1.default)(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); // Each segment for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; // Must not be empty (0, assert_1.default)(segment, `Parameter 'itemPath' must not contain any empty segments`); // Normalize slashes segment = pathHelper.normalizeSeparators(itemPath[i]); // Root segment if (i === 0 && pathHelper.hasRoot(segment)) { segment = pathHelper.safeTrimTrailingSeparator(segment); (0, assert_1.default)(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } // All other segments else { // Must not contain slash (0, assert_1.default)(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { // First segment let result = this.segments[0]; // All others let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { result += path.sep; } result += this.segments[i]; } return result; } } exports.Path = Path; //# sourceMappingURL=internal-path.js.map /***/ }), /***/ 98891: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper = __importStar(__nccwpck_require__(84138)); const internal_match_kind_1 = __nccwpck_require__(62644); const IS_WINDOWS = process.platform === 'win32'; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths(patterns) { // Ignore negate patterns patterns = patterns.filter(x => !x.negate); // Create a map of all search paths const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = 'candidate'; } const result = []; for (const pattern of patterns) { // Check if already included const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === 'included') { continue; } // Check for an ancestor search path let foundAncestor = false; let tempKey = key; let parent = pathHelper.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper.dirname(tempKey); } // Include the search pattern in the result if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = 'included'; } } return result; } exports.getSearchPaths = getSearchPaths; /** * Matches the patterns against the path */ function match(patterns, itemPath) { let result = internal_match_kind_1.MatchKind.None; for (const pattern of patterns) { if (pattern.negate) { result &= ~pattern.match(itemPath); } else { result |= pattern.match(itemPath); } } return result; } exports.match = match; /** * Checks whether to descend further into the directory */ function partialMatch(patterns, itemPath) { return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch; //# sourceMappingURL=internal-pattern-helper.js.map /***/ }), /***/ 25370: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Pattern = void 0; const os = __importStar(__nccwpck_require__(70857)); const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(84138)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const minimatch_1 = __nccwpck_require__(43772); const internal_match_kind_1 = __nccwpck_require__(62644); const internal_path_1 = __nccwpck_require__(76617); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; // Pattern overload let pattern; if (typeof patternOrNegate === 'string') { pattern = patternOrNegate.trim(); } // Segments overload else { // Convert to pattern segments = segments || []; (0, assert_1.default)(segments.length, `Parameter 'segments' must not empty`); const root = Pattern.getLiteral(segments[0]); (0, assert_1.default)(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1.Path(segments).toString().trim(); if (patternOrNegate) { pattern = `!${pattern}`; } } // Negate while (pattern.startsWith('!')) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } // Normalize slashes and ensures absolute root pattern = Pattern.fixupPattern(pattern, homedir); // Segments this.segments = new internal_path_1.Path(pattern).segments; // Trailing slash indicates the pattern should only match directories, not regular files this.trailingSeparator = pathHelper .normalizeSeparators(pattern) .endsWith(path.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); // Search path (literal path prior to the first glob segment) let foundGlob = false; const searchSegments = this.segments .map(x => Pattern.getLiteral(x)) .filter(x => !foundGlob && !(foundGlob = x === '')); this.searchPath = new internal_path_1.Path(searchSegments).toString(); // Root RegExp (required when determining partial match) this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); this.isImplicitPattern = isImplicitPattern; // Create minimatch const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { // Last segment is globstar? if (this.segments[this.segments.length - 1] === '**') { // Normalize slashes itemPath = pathHelper.normalizeSeparators(itemPath); // Append a trailing slash. Otherwise Minimatch will not match the directory immediately // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { // Note, this is safe because the constructor ensures the pattern has an absolute root. // For example, formats like C: and C:foo on Windows are resolved to an absolute root. itemPath = `${itemPath}${path.sep}`; } } else { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); } // Match if (this.minimatch.match(itemPath)) { return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } return internal_match_kind_1.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // matchOne does not handle root path correctly if (pathHelper.dirname(itemPath) === itemPath) { return this.rootRegExp.test(itemPath); } return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s) { return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment .replace(/\?/g, '[?]') // escape '?' .replace(/\*/g, '[*]'); // escape '*' } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { // Empty (0, assert_1.default)(pattern, 'pattern cannot be empty'); // Must not contain `.` segment, unless first segment // Must not contain `..` segment const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); (0, assert_1.default)(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r (0, assert_1.default)(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); // Normalize slashes pattern = pathHelper.normalizeSeparators(pattern); // Replace leading `.` segment if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); } // Replace leading `~` segment else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { homedir = homedir || os.homedir(); (0, assert_1.default)(homedir, 'Unable to determine HOME directory'); (0, assert_1.default)(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern.globEscape(homedir) + pattern.substr(1); } // Replace relative drive root, e.g. pattern is C: or C:foo else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(2); } // Replace relative root, e.g. pattern is \ or \foo else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); if (!root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(1); } // Otherwise ensure absolute root else { pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); } return pathHelper.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ''; for (let i = 0; i < segment.length; i++) { const c = segment[i]; // Escape if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { literal += segment[++i]; continue; } // Wildcard else if (c === '*' || c === '?') { return ''; } // Character set else if (c === '[' && i + 1 < segment.length) { let set = ''; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; // Escape if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { set += segment[++i2]; continue; } // Closed else if (c2 === ']') { closed = i2; break; } // Otherwise else { set += c2; } } // Closed? if (closed >= 0) { // Cannot convert if (set.length > 1) { return ''; } // Convert to literal if (set) { literal += set; i = closed; continue; } } // Otherwise fall thru } // Append literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s) { return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } } exports.Pattern = Pattern; //# sourceMappingURL=internal-pattern.js.map /***/ }), /***/ 79890: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SearchState = void 0; class SearchState { constructor(path, level) { this.path = path; this.level = level; } } exports.SearchState = SearchState; //# sourceMappingURL=internal-search-state.js.map /***/ }), /***/ 44552: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0; class BasicCredentialHandler { constructor(username, password) { this.username = username; this.password = password; } prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.BasicCredentialHandler = BasicCredentialHandler; class BearerCredentialHandler { constructor(token) { this.token = token; } // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Bearer ${this.token}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.BearerCredentialHandler = BearerCredentialHandler; class PersonalAccessTokenCredentialHandler { constructor(token) { this.token = token; } // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { if (!options.headers) { throw Error('The request has no headers'); } options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`; } // This handler cannot handle 401 canHandleAuthentication() { return false; } handleAuthentication() { return __awaiter(this, void 0, void 0, function* () { throw new Error('not implemented'); }); } } exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; //# sourceMappingURL=auth.js.map /***/ }), /***/ 54844: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /* eslint-disable @typescript-eslint/no-explicit-any */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; const http = __importStar(__nccwpck_require__(58611)); const https = __importStar(__nccwpck_require__(65692)); const pm = __importStar(__nccwpck_require__(54988)); const tunnel = __importStar(__nccwpck_require__(20770)); const undici_1 = __nccwpck_require__(46752); var HttpCodes; (function (HttpCodes) { HttpCodes[HttpCodes["OK"] = 200] = "OK"; HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; })(HttpCodes || (exports.HttpCodes = HttpCodes = {})); var Headers; (function (Headers) { Headers["Accept"] = "accept"; Headers["ContentType"] = "content-type"; })(Headers || (exports.Headers = Headers = {})); var MediaTypes; (function (MediaTypes) { MediaTypes["ApplicationJson"] = "application/json"; })(MediaTypes || (exports.MediaTypes = MediaTypes = {})); /** * Returns the proxy URL, depending upon the supplied url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ function getProxyUrl(serverUrl) { const proxyUrl = pm.getProxyUrl(new URL(serverUrl)); return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; const HttpRedirectCodes = [ HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect ]; const HttpResponseRetryCodes = [ HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout ]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; class HttpClientError extends Error { constructor(message, statusCode) { super(message); this.name = 'HttpClientError'; this.statusCode = statusCode; Object.setPrototypeOf(this, HttpClientError.prototype); } } exports.HttpClientError = HttpClientError; class HttpClientResponse { constructor(message) { this.message = message; } readBody() { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { let output = Buffer.alloc(0); this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); this.message.on('end', () => { resolve(output.toString()); }); })); }); } readBodyBuffer() { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { const chunks = []; this.message.on('data', (chunk) => { chunks.push(chunk); }); this.message.on('end', () => { resolve(Buffer.concat(chunks)); }); })); }); } } exports.HttpClientResponse = HttpClientResponse; function isHttps(requestUrl) { const parsedUrl = new URL(requestUrl); return parsedUrl.protocol === 'https:'; } exports.isHttps = isHttps; class HttpClient { constructor(userAgent, handlers, requestOptions) { this._ignoreSslError = false; this._allowRedirects = true; this._allowRedirectDowngrade = false; this._maxRedirects = 50; this._allowRetries = false; this._maxRetries = 1; this._keepAlive = false; this._disposed = false; this.userAgent = userAgent; this.handlers = handlers || []; this.requestOptions = requestOptions; if (requestOptions) { if (requestOptions.ignoreSslError != null) { this._ignoreSslError = requestOptions.ignoreSslError; } this._socketTimeout = requestOptions.socketTimeout; if (requestOptions.allowRedirects != null) { this._allowRedirects = requestOptions.allowRedirects; } if (requestOptions.allowRedirectDowngrade != null) { this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; } if (requestOptions.maxRedirects != null) { this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); } if (requestOptions.keepAlive != null) { this._keepAlive = requestOptions.keepAlive; } if (requestOptions.allowRetries != null) { this._allowRetries = requestOptions.allowRetries; } if (requestOptions.maxRetries != null) { this._maxRetries = requestOptions.maxRetries; } } } options(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); }); } get(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('GET', requestUrl, null, additionalHeaders || {}); }); } del(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('DELETE', requestUrl, null, additionalHeaders || {}); }); } post(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('POST', requestUrl, data, additionalHeaders || {}); }); } patch(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('PATCH', requestUrl, data, additionalHeaders || {}); }); } put(requestUrl, data, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('PUT', requestUrl, data, additionalHeaders || {}); }); } head(requestUrl, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request('HEAD', requestUrl, null, additionalHeaders || {}); }); } sendStream(verb, requestUrl, stream, additionalHeaders) { return __awaiter(this, void 0, void 0, function* () { return this.request(verb, requestUrl, stream, additionalHeaders); }); } /** * Gets a typed object from an endpoint * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise */ getJson(requestUrl, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); const res = yield this.get(requestUrl, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } postJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.post(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } putJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.put(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } patchJson(requestUrl, obj, additionalHeaders = {}) { return __awaiter(this, void 0, void 0, function* () { const data = JSON.stringify(obj, null, 2); additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); const res = yield this.patch(requestUrl, data, additionalHeaders); return this._processResponse(res, this.requestOptions); }); } /** * Makes a raw http request. * All other methods such as get, post, patch, and request ultimately call this. * Prefer get, del, post and patch */ request(verb, requestUrl, data, headers) { return __awaiter(this, void 0, void 0, function* () { if (this._disposed) { throw new Error('Client has already been disposed.'); } const parsedUrl = new URL(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb) ? this._maxRetries + 1 : 1; let numTries = 0; let response; do { response = yield this.requestRaw(info, data); // Check if it's an authentication challenge if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (const handler of this.handlers) { if (handler.canHandleAuthentication(response)) { authenticationHandler = handler; break; } } if (authenticationHandler) { return authenticationHandler.handleAuthentication(this, info, data); } else { // We have received an unauthorized response but have no handlers to handle it. // Let the response return to the caller. return response; } } let redirectsRemaining = this._maxRedirects; while (response.message.statusCode && HttpRedirectCodes.includes(response.message.statusCode) && this._allowRedirects && redirectsRemaining > 0) { const redirectUrl = response.message.headers['location']; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } const parsedRedirectUrl = new URL(redirectUrl); if (parsedUrl.protocol === 'https:' && parsedUrl.protocol !== parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); } // we need to finish reading the response before reassigning response // which will leak the open socket. yield response.readBody(); // strip authorization header if redirected to a different hostname if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { for (const header in headers) { // header names are case insensitive if (header.toLowerCase() === 'authorization') { delete headers[header]; } } } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = yield this.requestRaw(info, data); redirectsRemaining--; } if (!response.message.statusCode || !HttpResponseRetryCodes.includes(response.message.statusCode)) { // If not a retry code, return immediately instead of retrying return response; } numTries += 1; if (numTries < maxTries) { yield response.readBody(); yield this._performExponentialBackoff(numTries); } } while (numTries < maxTries); return response; }); } /** * Needs to be called if keepAlive is set to true in request options. */ dispose() { if (this._agent) { this._agent.destroy(); } this._disposed = true; } /** * Raw request. * @param info * @param data */ requestRaw(info, data) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => { function callbackForResult(err, res) { if (err) { reject(err); } else if (!res) { // If `err` is not passed, then `res` must be passed. reject(new Error('Unknown error')); } else { resolve(res); } } this.requestRawWithCallback(info, data, callbackForResult); }); }); } /** * Raw request with callback. * @param info * @param data * @param onResult */ requestRawWithCallback(info, data, onResult) { if (typeof data === 'string') { if (!info.options.headers) { info.options.headers = {}; } info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; function handleResult(err, res) { if (!callbackCalled) { callbackCalled = true; onResult(err, res); } } const req = info.httpModule.request(info.options, (msg) => { const res = new HttpClientResponse(msg); handleResult(undefined, res); }); let socket; req.on('socket', sock => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually req.setTimeout(this._socketTimeout || 3 * 60000, () => { if (socket) { socket.end(); } handleResult(new Error(`Request timeout: ${info.options.path}`)); }); req.on('error', function (err) { // err has statusCode property // res should have headers handleResult(err); }); if (data && typeof data === 'string') { req.write(data, 'utf8'); } if (data && typeof data !== 'string') { data.on('close', function () { req.end(); }); data.pipe(req); } else { req.end(); } } /** * Gets an http agent. This function is useful when you need an http agent that handles * routing through a proxy server - depending upon the url and proxy environment variables. * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com */ getAgent(serverUrl) { const parsedUrl = new URL(serverUrl); return this._getAgent(parsedUrl); } getAgentDispatcher(serverUrl) { const parsedUrl = new URL(serverUrl); const proxyUrl = pm.getProxyUrl(parsedUrl); const useProxy = proxyUrl && proxyUrl.hostname; if (!useProxy) { return; } return this._getProxyAgentDispatcher(parsedUrl, proxyUrl); } _prepareRequest(method, requestUrl, headers) { const info = {}; info.parsedUrl = requestUrl; const usingSsl = info.parsedUrl.protocol === 'https:'; info.httpModule = usingSsl ? https : http; const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { info.options.headers['user-agent'] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { for (const handler of this.handlers) { handler.prepareRequest(info.options); } } return info; } _mergeHeaders(headers) { if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {})); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; } return additionalHeaders[header] || clientHeader || _default; } _getAgent(parsedUrl) { let agent; const proxyUrl = pm.getProxyUrl(parsedUrl); const useProxy = proxyUrl && proxyUrl.hostname; if (this._keepAlive && useProxy) { agent = this._proxyAgent; } if (!useProxy) { agent = this._agent; } // if agent is already assigned use that agent. if (agent) { return agent; } const usingSsl = parsedUrl.protocol === 'https:'; let maxSockets = 100; if (this.requestOptions) { maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; } // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis. if (proxyUrl && proxyUrl.hostname) { const agentOptions = { maxSockets, keepAlive: this._keepAlive, proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && { proxyAuth: `${proxyUrl.username}:${proxyUrl.password}` })), { host: proxyUrl.hostname, port: proxyUrl.port }) }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; if (usingSsl) { tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; } else { tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; } agent = tunnelAgent(agentOptions); this._proxyAgent = agent; } // if tunneling agent isn't assigned create a new agent if (!agent) { const options = { keepAlive: this._keepAlive, maxSockets }; agent = usingSsl ? new https.Agent(options) : new http.Agent(options); this._agent = agent; } if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); } return agent; } _getProxyAgentDispatcher(parsedUrl, proxyUrl) { let proxyAgent; if (this._keepAlive) { proxyAgent = this._proxyAgentDispatcher; } // if agent is already assigned use that agent. if (proxyAgent) { return proxyAgent; } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly proxyAgent.options = Object.assign(proxyAgent.options.requestTls || {}, { rejectUnauthorized: false }); } return proxyAgent; } _performExponentialBackoff(retryNumber) { return __awaiter(this, void 0, void 0, function* () { retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); return new Promise(resolve => setTimeout(() => resolve(), ms)); }); } _processResponse(res, options) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { const statusCode = res.message.statusCode || 0; const response = { statusCode, result: null, headers: {} }; // not found leads to null obj returned if (statusCode === HttpCodes.NotFound) { resolve(response); } // get the result from the body function dateTimeDeserializer(key, value) { if (typeof value === 'string') { const a = new Date(value); if (!isNaN(a.valueOf())) { return a; } } return value; } let obj; let contents; try { contents = yield res.readBody(); if (contents && contents.length > 0) { if (options && options.deserializeDates) { obj = JSON.parse(contents, dateTimeDeserializer); } else { obj = JSON.parse(contents); } response.result = obj; } response.headers = res.message.headers; } catch (err) { // Invalid resource (contents not json); leaving result obj null } // note that 3xx redirects are handled by the http layer. if (statusCode > 299) { let msg; // if exception/error in body, attempt to get better error if (obj && obj.message) { msg = obj.message; } else if (contents && contents.length > 0) { // it may be the case that the exception is in the body message as string msg = contents; } else { msg = `Failed request: (${statusCode})`; } const err = new HttpClientError(msg, statusCode); err.result = response.result; reject(err); } else { resolve(response); } })); }); } } exports.HttpClient = HttpClient; const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); //# sourceMappingURL=index.js.map /***/ }), /***/ 54988: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.checkBypass = exports.getProxyUrl = void 0; function getProxyUrl(reqUrl) { const usingSsl = reqUrl.protocol === 'https:'; if (checkBypass(reqUrl)) { return undefined; } const proxyVar = (() => { if (usingSsl) { return process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { return process.env['http_proxy'] || process.env['HTTP_PROXY']; } })(); if (proxyVar) { try { return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) return new DecodedURL(`http://${proxyVar}`); } } else { return undefined; } } exports.getProxyUrl = getProxyUrl; function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } const reqHost = reqUrl.hostname; if (isLoopbackAddress(reqHost)) { return true; } const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } // Determine the request port let reqPort; if (reqUrl.port) { reqPort = Number(reqUrl.port); } else if (reqUrl.protocol === 'http:') { reqPort = 80; } else if (reqUrl.protocol === 'https:') { reqPort = 443; } // Format the request hostname and hostname with port const upperReqHosts = [reqUrl.hostname.toUpperCase()]; if (typeof reqPort === 'number') { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy for (const upperNoProxyItem of noProxy .split(',') .map(x => x.trim().toUpperCase()) .filter(x => x)) { if (upperNoProxyItem === '*' || upperReqHosts.some(x => x === upperNoProxyItem || x.endsWith(`.${upperNoProxyItem}`) || (upperNoProxyItem.startsWith('.') && x.endsWith(`${upperNoProxyItem}`)))) { return true; } } return false; } exports.checkBypass = checkBypass; function isLoopbackAddress(host) { const hostLower = host.toLowerCase(); return (hostLower === 'localhost' || hostLower.startsWith('127.') || hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } class DecodedURL extends URL { constructor(url, base) { super(url, base); this._decodedUsername = decodeURIComponent(super.username); this._decodedPassword = decodeURIComponent(super.password); } get username() { return this._decodedUsername; } get password() { return this._decodedPassword; } } //# sourceMappingURL=proxy.js.map /***/ }), /***/ 75207: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; const fs = __importStar(__nccwpck_require__(79896)); const path = __importStar(__nccwpck_require__(16928)); _a = fs.promises // export const {open} = 'fs' , exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; // export const {open} = 'fs' exports.IS_WINDOWS = process.platform === 'win32'; // See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 exports.UV_FS_O_EXLOCK = 0x10000000; exports.READONLY = fs.constants.O_RDONLY; function exists(fsPath) { return __awaiter(this, void 0, void 0, function* () { try { yield exports.stat(fsPath); } catch (err) { if (err.code === 'ENOENT') { return false; } throw err; } return true; }); } exports.exists = exists; function isDirectory(fsPath, useStat = false) { return __awaiter(this, void 0, void 0, function* () { const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); return stats.isDirectory(); }); } exports.isDirectory = isDirectory; /** * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). */ function isRooted(p) { p = normalizeSeparators(p); if (!p) { throw new Error('isRooted() parameter "p" cannot be empty'); } if (exports.IS_WINDOWS) { return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello ); // e.g. C: or C:\hello } return p.startsWith('/'); } exports.isRooted = isRooted; /** * Best effort attempt to determine whether a file exists and is executable. * @param filePath file path to check * @param extensions additional file extensions to try * @return if file exists and is executable, returns the file path. otherwise empty string. */ function tryGetExecutablePath(filePath, extensions) { return __awaiter(this, void 0, void 0, function* () { let stats = undefined; try { // test file exists stats = yield exports.stat(filePath); } catch (err) { if (err.code !== 'ENOENT') { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); } } if (stats && stats.isFile()) { if (exports.IS_WINDOWS) { // on Windows, test for valid extension const upperExt = path.extname(filePath).toUpperCase(); if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { return filePath; } } else { if (isUnixExecutable(stats)) { return filePath; } } } // try each extension const originalFilePath = filePath; for (const extension of extensions) { filePath = originalFilePath + extension; stats = undefined; try { stats = yield exports.stat(filePath); } catch (err) { if (err.code !== 'ENOENT') { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); } } if (stats && stats.isFile()) { if (exports.IS_WINDOWS) { // preserve the case of the actual file (since an extension was appended) try { const directory = path.dirname(filePath); const upperName = path.basename(filePath).toUpperCase(); for (const actualName of yield exports.readdir(directory)) { if (upperName === actualName.toUpperCase()) { filePath = path.join(directory, actualName); break; } } } catch (err) { // eslint-disable-next-line no-console console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); } return filePath; } else { if (isUnixExecutable(stats)) { return filePath; } } } } return ''; }); } exports.tryGetExecutablePath = tryGetExecutablePath; function normalizeSeparators(p) { p = p || ''; if (exports.IS_WINDOWS) { // convert slashes on Windows p = p.replace(/\//g, '\\'); // remove redundant slashes return p.replace(/\\\\+/g, '\\'); } // remove redundant slashes return p.replace(/\/\/+/g, '/'); } // on Mac/Linux, test the execute bit // R W X R W X R W X // 256 128 64 32 16 8 4 2 1 function isUnixExecutable(stats) { return ((stats.mode & 1) > 0 || ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || ((stats.mode & 64) > 0 && stats.uid === process.getuid())); } // Get the path of cmd.exe in windows function getCmdPath() { var _a; return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; } exports.getCmdPath = getCmdPath; //# sourceMappingURL=io-util.js.map /***/ }), /***/ 94994: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.findInPath = exports.which = exports.mkdirP = exports.rmRF = exports.mv = exports.cp = void 0; const assert_1 = __nccwpck_require__(42613); const path = __importStar(__nccwpck_require__(16928)); const ioUtil = __importStar(__nccwpck_require__(75207)); /** * Copies a file or folder. * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js * * @param source source path * @param dest destination path * @param options optional. See CopyOptions. */ function cp(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { const { force, recursive, copySourceDirectory } = readCopyOptions(options); const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; // Dest is an existing file, but not forcing if (destStat && destStat.isFile() && !force) { return; } // If dest is an existing directory, should copy inside. const newDest = destStat && destStat.isDirectory() && copySourceDirectory ? path.join(dest, path.basename(source)) : dest; if (!(yield ioUtil.exists(source))) { throw new Error(`no such file or directory: ${source}`); } const sourceStat = yield ioUtil.stat(source); if (sourceStat.isDirectory()) { if (!recursive) { throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); } else { yield cpDirRecursive(source, newDest, 0, force); } } else { if (path.relative(source, newDest) === '') { // a file cannot be copied to itself throw new Error(`'${newDest}' and '${source}' are the same file`); } yield copyFile(source, newDest, force); } }); } exports.cp = cp; /** * Moves a path. * * @param source source path * @param dest destination path * @param options optional. See MoveOptions. */ function mv(source, dest, options = {}) { return __awaiter(this, void 0, void 0, function* () { if (yield ioUtil.exists(dest)) { let destExists = true; if (yield ioUtil.isDirectory(dest)) { // If dest is directory copy src into dest dest = path.join(dest, path.basename(source)); destExists = yield ioUtil.exists(dest); } if (destExists) { if (options.force == null || options.force) { yield rmRF(dest); } else { throw new Error('Destination already exists'); } } } yield mkdirP(path.dirname(dest)); yield ioUtil.rename(source, dest); }); } exports.mv = mv; /** * Remove a path recursively with force * * @param inputPath path to remove */ function rmRF(inputPath) { return __awaiter(this, void 0, void 0, function* () { if (ioUtil.IS_WINDOWS) { // Check for invalid characters // https://docs.microsoft.com/en-us/windows/win32/fileio/naming-a-file if (/[*"<>|]/.test(inputPath)) { throw new Error('File path must not contain `*`, `"`, `<`, `>` or `|` on Windows'); } } try { // note if path does not exist, error is silent yield ioUtil.rm(inputPath, { force: true, maxRetries: 3, recursive: true, retryDelay: 300 }); } catch (err) { throw new Error(`File was unable to be removed ${err}`); } }); } exports.rmRF = rmRF; /** * Make a directory. Creates the full path with folders in between * Will throw if it fails * * @param fsPath path to create * @returns Promise */ function mkdirP(fsPath) { return __awaiter(this, void 0, void 0, function* () { assert_1.ok(fsPath, 'a path argument must be provided'); yield ioUtil.mkdir(fsPath, { recursive: true }); }); } exports.mkdirP = mkdirP; /** * Returns path of a tool had the tool actually been invoked. Resolves via paths. * If you check and the tool does not exist, it will throw. * * @param tool name of the tool * @param check whether to check if tool exists * @returns Promise path to tool */ function which(tool, check) { return __awaiter(this, void 0, void 0, function* () { if (!tool) { throw new Error("parameter 'tool' is required"); } // recursive when check=true if (check) { const result = yield which(tool, false); if (!result) { if (ioUtil.IS_WINDOWS) { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); } else { throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); } } return result; } const matches = yield findInPath(tool); if (matches && matches.length > 0) { return matches[0]; } return ''; }); } exports.which = which; /** * Returns a list of all occurrences of the given tool on the system path. * * @returns Promise the paths of the tool */ function findInPath(tool) { return __awaiter(this, void 0, void 0, function* () { if (!tool) { throw new Error("parameter 'tool' is required"); } // build the list of extensions to try const extensions = []; if (ioUtil.IS_WINDOWS && process.env['PATHEXT']) { for (const extension of process.env['PATHEXT'].split(path.delimiter)) { if (extension) { extensions.push(extension); } } } // if it's rooted, return it if exists. otherwise return empty. if (ioUtil.isRooted(tool)) { const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); if (filePath) { return [filePath]; } return []; } // if any path separators, return empty if (tool.includes(path.sep)) { return []; } // build the list of directories // // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, // it feels like we should not do this. Checking the current directory seems like more of a use // case of a shell, and the which() function exposed by the toolkit should strive for consistency // across platforms. const directories = []; if (process.env.PATH) { for (const p of process.env.PATH.split(path.delimiter)) { if (p) { directories.push(p); } } } // find all matches const matches = []; for (const directory of directories) { const filePath = yield ioUtil.tryGetExecutablePath(path.join(directory, tool), extensions); if (filePath) { matches.push(filePath); } } return matches; }); } exports.findInPath = findInPath; function readCopyOptions(options) { const force = options.force == null ? true : options.force; const recursive = Boolean(options.recursive); const copySourceDirectory = options.copySourceDirectory == null ? true : Boolean(options.copySourceDirectory); return { force, recursive, copySourceDirectory }; } function cpDirRecursive(sourceDir, destDir, currentDepth, force) { return __awaiter(this, void 0, void 0, function* () { // Ensure there is not a run away recursive copy if (currentDepth >= 255) return; currentDepth++; yield mkdirP(destDir); const files = yield ioUtil.readdir(sourceDir); for (const fileName of files) { const srcFile = `${sourceDir}/${fileName}`; const destFile = `${destDir}/${fileName}`; const srcFileStat = yield ioUtil.lstat(srcFile); if (srcFileStat.isDirectory()) { // Recurse yield cpDirRecursive(srcFile, destFile, currentDepth, force); } else { yield copyFile(srcFile, destFile, force); } } // Change the mode for the newly created directory yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); }); } // Buffered file copy function copyFile(srcFile, destFile, force) { return __awaiter(this, void 0, void 0, function* () { if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { // unlink/re-link it try { yield ioUtil.lstat(destFile); yield ioUtil.unlink(destFile); } catch (e) { // Try to override file permission if (e.code === 'EPERM') { yield ioUtil.chmod(destFile, '0666'); yield ioUtil.unlink(destFile); } // other errors = it doesn't exist, no work to do } // Copy over symlink const symlinkFull = yield ioUtil.readlink(srcFile); yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); } else if (!(yield ioUtil.exists(destFile)) || force) { yield ioUtil.copyFile(srcFile, destFile); } }); } //# sourceMappingURL=io.js.map /***/ }), /***/ 22343: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0; const core = __importStar(__nccwpck_require__(37484)); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(40500)); const cacheHttpClient = __importStar(__nccwpck_require__(31834)); const tar_1 = __nccwpck_require__(47356); const options_1 = __nccwpck_require__(3347); const requestUtils_1 = __nccwpck_require__(85077); const downloadUtils_1 = __nccwpck_require__(87574); class ValidationError extends Error { constructor(message) { super(message); this.name = 'ValidationError'; Object.setPrototypeOf(this, ValidationError.prototype); } } exports.ValidationError = ValidationError; class ReserveCacheError extends Error { constructor(message) { super(message); this.name = 'ReserveCacheError'; Object.setPrototypeOf(this, ReserveCacheError.prototype); } } exports.ReserveCacheError = ReserveCacheError; function checkPaths(paths) { if (!paths || paths.length === 0) { throw new ValidationError(`Path Validation Error: At least one directory or file path is required`); } } function checkKey(key) { if (key.length > 512) { throw new ValidationError(`Key Validation Error: ${key} cannot be larger than 512 characters.`); } const regex = /^[^,]*$/; if (!regex.test(key)) { throw new ValidationError(`Key Validation Error: ${key} cannot contain commas.`); } } /** * isFeatureAvailable to check the presence of Actions cache service * * @returns boolean return true if Actions cache service feature is available, otherwise false */ function isFeatureAvailable() { return !!process.env['WARPBUILD_RUNNER_VERIFICATION_TOKEN']; } exports.isFeatureAvailable = isFeatureAvailable; /** * Restores cache from keys * * @param paths a list of file paths to restore from the cache * @param primaryKey an explicit key for restoring the cache * @param restoreKeys an optional ordered list of keys to use for restoring the cache if no cache hit occurred for key * @param downloadOptions cache download options * @param enableCrossOsArchive an optional boolean enabled to restore on windows any cache created on any platform * @param enableCrossArchArchive an optional boolean enabled to restore cache created on any arch * @returns string returns the key for the cache hit, otherwise returns undefined */ function restoreCache(paths, primaryKey, restoreKeys, options, enableCrossOsArchive = false, enableCrossArchArchive = false) { var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(primaryKey); restoreKeys = restoreKeys !== null && restoreKeys !== void 0 ? restoreKeys : []; core.debug('Resolved Restore Keys:'); core.debug(JSON.stringify(restoreKeys)); if (restoreKeys.length > 9) { throw new ValidationError(`Key Validation Error: Keys are limited to a maximum of 10.`); } for (const key of restoreKeys) { checkKey(key); } const compressionMethod = yield utils.getCompressionMethod(); let archivePath = ''; try { // path are needed to compute version const cacheEntry = yield cacheHttpClient.getCacheEntry(primaryKey, restoreKeys, paths, { compressionMethod, enableCrossOsArchive, enableCrossArchArchive }); if (!cacheEntry) { // Internal Error return undefined; } archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); const cacheKey = (_b = (_a = cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cache_entry) === null || _a === void 0 ? void 0 : _a.cache_user_given_key) !== null && _b !== void 0 ? _b : primaryKey; switch (cacheEntry.provider) { case 's3': case 'r2': { if (!((_c = cacheEntry.s3) === null || _c === void 0 ? void 0 : _c.pre_signed_url)) { return undefined; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return cacheKey; } try { yield cacheHttpClient.downloadCache('s3', (_d = cacheEntry.s3) === null || _d === void 0 ? void 0 : _d.pre_signed_url, archivePath); } catch (error) { core.info('Cache Miss. Failed to download cache.'); return undefined; } if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); break; } case 'gcs': { if (!((_e = cacheEntry.gcs) === null || _e === void 0 ? void 0 : _e.cache_key)) { return undefined; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return cacheKey; } const archiveLocation = `gs://${(_f = cacheEntry.gcs) === null || _f === void 0 ? void 0 : _f.bucket_name}/${(_g = cacheEntry.gcs) === null || _g === void 0 ? void 0 : _g.cache_key}`; // For GCS, we do a streaming download which means that we extract the archive while we are downloading it. let readStream; let downloadCommandPipe; if ((_h = cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.gcs) === null || _h === void 0 ? void 0 : _h.pre_signed_url) { downloadCommandPipe = (0, downloadUtils_1.getDownloadCommandPipeForWget)((_j = cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.gcs) === null || _j === void 0 ? void 0 : _j.pre_signed_url); } else { readStream = cacheHttpClient.downloadCacheStreaming('gcs', archiveLocation, (_m = (_l = (_k = cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.gcs) === null || _k === void 0 ? void 0 : _k.short_lived_token) === null || _l === void 0 ? void 0 : _l.access_token) !== null && _m !== void 0 ? _m : ''); if (!readStream) { return undefined; } } try { yield (0, tar_1.extractStreamingTar)(readStream, archivePath, compressionMethod, downloadCommandPipe); } catch (error) { core.debug(`Failed to download cache: ${error}`); core.info(`Streaming download failed. Likely a cloud provider issue. Retrying with multipart download`); // Wait 1 second yield new Promise(resolve => setTimeout(resolve, 1000)); // Try to download the cache using the non-streaming method try { yield cacheHttpClient.downloadCache(cacheEntry.provider, archiveLocation, archivePath, (_q = (_p = (_o = cacheEntry.gcs) === null || _o === void 0 ? void 0 : _o.short_lived_token) === null || _p === void 0 ? void 0 : _p.access_token) !== null && _q !== void 0 ? _q : ''); } catch (error) { core.debug(`Failed to download cache: ${error}`); core.info(`Multipart download failed. Likely a cloud provider issue. Retrying with basic download`); // Wait 1 second yield new Promise(resolve => setTimeout(resolve, 1000)); // Try to download the cache using the basic method try { yield cacheHttpClient.downloadCacheSingleThread(cacheEntry.provider, archiveLocation, archivePath, (_t = (_s = (_r = cacheEntry.gcs) === null || _r === void 0 ? void 0 : _r.short_lived_token) === null || _s === void 0 ? void 0 : _s.access_token) !== null && _t !== void 0 ? _t : ''); } catch (error) { core.info('Cache Miss. Failed to download cache.'); return undefined; } } if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); } core.info('Cache restored successfully'); break; } case 'azure_blob': { if (!((_u = cacheEntry.azure_blob) === null || _u === void 0 ? void 0 : _u.pre_signed_url)) { return undefined; } if (options === null || options === void 0 ? void 0 : options.lookupOnly) { core.info('Lookup only - skipping download'); return cacheKey; } try { yield cacheHttpClient.downloadCache('azure_blob', (_v = cacheEntry.azure_blob) === null || _v === void 0 ? void 0 : _v.pre_signed_url, archivePath); } catch (error) { core.info('Cache Miss. Failed to download cache.'); return undefined; } if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); yield (0, tar_1.extractTar)(archivePath, compressionMethod); core.info('Cache restored successfully'); break; } } return cacheKey; } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else { // Suppress all non-validation cache related errors because caching should be optional core.warning(`Failed to restore: ${error.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return undefined; }); } exports.restoreCache = restoreCache; /** * Saves a list of files with the specified key * * @param paths a list of file paths to be cached * @param key an explicit key for restoring the cache * @param enableCrossOsArchive an optional boolean enabled to save cache on windows which could be restored on any platform * @param enableCrossArchArchive an optional boolean enabled to save cache on any arch which could be restored on any arch * @returns string returns cacheId if the cache was saved successfully and throws an error if save fails */ function saveCache(paths, key, enableCrossOsArchive = false, enableCrossArchArchive = false) { var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t, _u, _v, _w, _x, _y, _z, _0, _1; return __awaiter(this, void 0, void 0, function* () { checkPaths(paths); checkKey(key); const compressionMethod = yield utils.getCompressionMethod(); const cachePaths = yield utils.resolvePaths(paths); let cacheKey = ''; core.debug('Cache Paths:'); core.debug(`${JSON.stringify(cachePaths)}`); if (cachePaths.length === 0) { throw new Error(`Path Validation Error: Path(s) specified in the action for caching do(es) not exist, hence no cache is being saved.`); } const archiveFolder = yield utils.createTempDirectory(); const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); core.debug(`Archive Path: ${archivePath}`); try { yield (0, tar_1.createTar)(archiveFolder, cachePaths, compressionMethod); if (core.isDebug()) { yield (0, tar_1.listTar)(archivePath, compressionMethod); } const fileSizeLimit = 1000 * 1024 * 1024 * 1024; // 1000GB per repo limit const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 1000GB limit, not saving cache.`); } const cacheVersion = cacheHttpClient.getCacheVersion(paths, compressionMethod, enableCrossOsArchive, enableCrossArchArchive); core.debug('Reserving Cache'); // Calculate number of chunks required. This is only required if backend is S3 as Google Cloud SDK will do it for us const uploadOptions = (0, options_1.getUploadOptions)(); const maxChunkSize = (_a = uploadOptions === null || uploadOptions === void 0 ? void 0 : uploadOptions.uploadChunkSize) !== null && _a !== void 0 ? _a : 32 * 1024 * 1024; // Default 32MB const numberOfChunks = Math.max(Math.floor(archiveFileSize / maxChunkSize), 1); const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, numberOfChunks, cacheVersion); if (!(0, requestUtils_1.isSuccessStatusCode)(reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode)) { core.debug(`Failed to reserve cache: ${reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.statusCode}`); core.debug(`Reserve Cache Request: ${JSON.stringify({ key, numberOfChunks, cacheVersion })}`); throw new Error((_c = (_b = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _b === void 0 ? void 0 : _b.message) !== null && _c !== void 0 ? _c : `Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the data cap limit, not saving cache.`); } switch ((_d = reserveCacheResponse.result) === null || _d === void 0 ? void 0 : _d.provider) { case 's3': case 'r2': core.debug(`Saving Cache to S3`); cacheKey = yield cacheHttpClient.saveCache('s3', key, cacheVersion, archivePath, (_g = (_f = (_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _e === void 0 ? void 0 : _e.s3) === null || _f === void 0 ? void 0 : _f.upload_id) !== null && _g !== void 0 ? _g : '', (_k = (_j = (_h = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.s3) === null || _j === void 0 ? void 0 : _j.upload_key) !== null && _k !== void 0 ? _k : '', numberOfChunks, (_o = (_m = (_l = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _l === void 0 ? void 0 : _l.s3) === null || _m === void 0 ? void 0 : _m.pre_signed_urls) !== null && _o !== void 0 ? _o : []); break; case 'gcs': core.debug(`Saving Cache to GCS`); cacheKey = yield cacheHttpClient.saveCache('gcs', key, cacheVersion, archivePath, // S3 Params are undefined for GCS undefined, undefined, undefined, undefined, (_s = (_r = (_q = (_p = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _p === void 0 ? void 0 : _p.gcs) === null || _q === void 0 ? void 0 : _q.short_lived_token) === null || _r === void 0 ? void 0 : _r.access_token) !== null && _s !== void 0 ? _s : '', (_v = (_u = (_t = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _t === void 0 ? void 0 : _t.gcs) === null || _u === void 0 ? void 0 : _u.bucket_name) !== null && _v !== void 0 ? _v : '', (_y = (_x = (_w = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _w === void 0 ? void 0 : _w.gcs) === null || _x === void 0 ? void 0 : _x.cache_key) !== null && _y !== void 0 ? _y : ''); break; case 'azure_blob': core.debug(`Saving Cache to Azure Blob`); cacheKey = yield cacheHttpClient.saveCache('azure_blob', key, cacheVersion, archivePath, // S3 Params are undefined for GCS undefined, undefined, undefined, undefined, // GCS Params are undefined for Azure Blob undefined, undefined, undefined, (_1 = (_0 = (_z = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.result) === null || _z === void 0 ? void 0 : _z.azure_blob) === null || _0 === void 0 ? void 0 : _0.pre_signed_url) !== null && _1 !== void 0 ? _1 : ''); } } catch (error) { const typedError = error; if (typedError.name === ValidationError.name) { throw error; } else if (typedError.name === ReserveCacheError.name) { core.info(`Failed to save: ${typedError.message}`); } else { core.warning(`Failed to save: ${typedError.message}`); } } finally { // Try to delete the archive to save space try { yield utils.unlinkFile(archivePath); } catch (error) { core.debug(`Failed to delete archive: ${error}`); } } return cacheKey; }); } exports.saveCache = saveCache; /** * Deletes an entire cache by cache key. * @param key The cache keys */ function deleteCache(paths, key, enableCrossOsArchive = false, enableCrossArchArchive = false) { return __awaiter(this, void 0, void 0, function* () { checkKey(key); core.debug('Deleting Cache'); core.debug(`Cache Key: ${key}`); const compressionMethod = yield utils.getCompressionMethod(); const cacheVersion = cacheHttpClient.getCacheVersion(paths, compressionMethod, enableCrossOsArchive, enableCrossArchArchive); try { yield cacheHttpClient.deleteCache(key, cacheVersion); } catch (error) { core.warning(`Failed to delete cache: ${error}`); } }); } exports.deleteCache = deleteCache; //# sourceMappingURL=cache.js.map /***/ }), /***/ 31834: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deleteCache = exports.saveCache = exports.reserveCache = exports.downloadCacheStreaming = exports.downloadCacheSingleThread = exports.downloadCache = exports.getCacheEntry = exports.getCacheVersion = void 0; const core = __importStar(__nccwpck_require__(37484)); const github = __importStar(__nccwpck_require__(93228)); const http_client_1 = __nccwpck_require__(54844); const auth_1 = __nccwpck_require__(44552); const crypto = __importStar(__nccwpck_require__(76982)); const utils = __importStar(__nccwpck_require__(40500)); const os_1 = __importDefault(__nccwpck_require__(70857)); const downloadUtils_1 = __nccwpck_require__(87574); const requestUtils_1 = __nccwpck_require__(85077); const storage_1 = __nccwpck_require__(28525); const uploadUtils_1 = __nccwpck_require__(89149); const google_auth_library_1 = __nccwpck_require__(492); const storage_blob_1 = __nccwpck_require__(1012); const versionSalt = '1.0'; function getCacheApiUrl(resource) { var _a; const baseUrl = (_a = process.env['WARPBUILD_CACHE_URL']) !== null && _a !== void 0 ? _a : 'https://cache.warpbuild.com'; if (!baseUrl) { throw new Error('Cache Service Url not found, unable to restore cache.'); } const url = `${baseUrl}/v1/${resource}`; core.debug(`Resource Url: ${url}`); return url; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getVCSRepository() { var _a; const vcsRepository = (_a = process.env['GITHUB_REPOSITORY']) !== null && _a !== void 0 ? _a : ''; return vcsRepository; } function getVCSRef() { var _a; const vcsBranch = (_a = process.env['GITHUB_REF']) !== null && _a !== void 0 ? _a : ''; return vcsBranch; } function getAnnotations() { var _a, _b, _c, _d, _e, _f, _g, _h; const annotations = { GITHUB_WORKFLOW: (_a = process.env['GITHUB_WORKFLOW']) !== null && _a !== void 0 ? _a : '', GITHUB_RUN_ID: (_b = process.env['GITHUB_RUN_ID']) !== null && _b !== void 0 ? _b : '', GITHUB_RUN_ATTEMPT: (_c = process.env['GITHUB_RUN_ATTEMPT']) !== null && _c !== void 0 ? _c : '', GITHUB_JOB: (_d = process.env['GITHUB_JOB']) !== null && _d !== void 0 ? _d : '', GITHUB_REPOSITORY: (_e = process.env['GITHUB_REPOSITORY']) !== null && _e !== void 0 ? _e : '', GITHUB_REF: (_f = process.env['GITHUB_REF']) !== null && _f !== void 0 ? _f : '', GITHUB_ACTION: (_g = process.env['GITHUB_ACTION']) !== null && _g !== void 0 ? _g : '', RUNNER_NAME: (_h = process.env['RUNNER_NAME']) !== null && _h !== void 0 ? _h : '' }; return annotations; } function getRequestOptions() { const requestOptions = { headers: { Accept: createAcceptHeader('application/json', 'v1') } }; return requestOptions; } function createHttpClient() { var _a; const token = (_a = process.env['WARPBUILD_RUNNER_VERIFICATION_TOKEN']) !== null && _a !== void 0 ? _a : ''; const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient('warp/cache', [bearerCredentialHandler], getRequestOptions()); } function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false, enableCrossArchArchive = false) { const components = paths; // Add compression method to cache version to restore // compressed cache as per compression method if (compressionMethod) { components.push(compressionMethod); } // Only check for windows platforms if enableCrossOsArchive is false if (process.platform === 'win32' && !enableCrossOsArchive) { components.push('windows-only'); } // Check for mac platforms if enableCrossOsArchive is false if (process.platform === 'darwin' && !enableCrossOsArchive) { components.push('mac-only'); } // Add architecture to cache version if (!enableCrossArchArchive) { components.push(process.arch); } // Add salt to cache version to support breaking changes in cache entry components.push(versionSalt); return crypto.createHash('sha256').update(components.join('|')).digest('hex'); } exports.getCacheVersion = getCacheVersion; function getCacheEntry(key, restoreKeys, paths, options) { var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive, options === null || options === void 0 ? void 0 : options.enableCrossArchArchive); const restoreBranches = new Set(); const restoreRepos = new Set(); switch (github.context.eventName) { case 'pull_request': { const pullPayload = github.context.payload; // Adds PR head branch and base branch to restoreBranches restoreBranches.add(`refs/heads/${(_b = (_a = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _a === void 0 ? void 0 : _a.head) === null || _b === void 0 ? void 0 : _b.ref}`); restoreBranches.add(`refs/heads/${(_d = (_c = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _c === void 0 ? void 0 : _c.base) === null || _d === void 0 ? void 0 : _d.ref}`); // Adds default branch to restoreBranches restoreBranches.add(`refs/heads/${(_e = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _e === void 0 ? void 0 : _e.default_branch}`); // If head points to a different repository, add it to restoreRepos. We allow restores from head repos as well. if (((_h = (_g = (_f = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _f === void 0 ? void 0 : _f.head) === null || _g === void 0 ? void 0 : _g.repo) === null || _h === void 0 ? void 0 : _h.name) !== ((_j = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.repository) === null || _j === void 0 ? void 0 : _j.name)) { restoreRepos.add((_m = (_l = (_k = pullPayload === null || pullPayload === void 0 ? void 0 : pullPayload.pull_request) === null || _k === void 0 ? void 0 : _k.head) === null || _l === void 0 ? void 0 : _l.repo) === null || _m === void 0 ? void 0 : _m.name); } } break; default: const payload = (_o = github === null || github === void 0 ? void 0 : github.context) === null || _o === void 0 ? void 0 : _o.payload; // Default branch is not in the complete format // Ref: https://docs.github.com/en/developers/webhooks-and-events/webhooks/webhook-events-and-payloads#push if ((_p = payload === null || payload === void 0 ? void 0 : payload.repository) === null || _p === void 0 ? void 0 : _p.default_branch) { restoreBranches.add(`refs/heads/${(_q = payload === null || payload === void 0 ? void 0 : payload.repository) === null || _q === void 0 ? void 0 : _q.default_branch}`); } break; } const getCacheRequest = { cache_key: key, restore_keys: restoreKeys, cache_version: version, vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), annotations: getAnnotations(), restore_branches: Array.from(restoreBranches), restore_repos: Array.from(restoreRepos) }; const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cache/get'), getCacheRequest); })); if (response.statusCode === 204) { // TODO: List cache for primary key only if cache miss occurs // if (core.isDebug()) { // await printCachesListForDiagnostics(keys[0], httpClient, version) // } return null; } if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); return cacheResult; }); } exports.getCacheEntry = getCacheEntry; /* async function printCachesListForDiagnostics( key: string, httpClient: HttpClient, version: string ): Promise { const resource = `caches?key=${encodeURIComponent(key)}` const response = await retryTypedResponse('listCache', async () => httpClient.getJson(getCacheApiUrl(resource)) ) if (response.statusCode === 200) { const cacheListResult = response.result const totalCount = cacheListResult?.totalCount if (totalCount && totalCount > 0) { core.debug( `No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:` ) for (const cacheEntry of cacheListResult?.artifactCaches ?? []) { core.debug( `Cache Key: ${cacheEntry?.cache_key}, Cache Version: ${cacheEntry?.cache_version}` ) } } } } */ function downloadCache(provider, archiveLocation, archivePath, gcsToken) { return __awaiter(this, void 0, void 0, function* () { switch (provider) { case 's3': { const numberOfConnections = 2 + os_1.default.cpus().length; yield (0, downloadUtils_1.downloadCacheMultiConnection)(archiveLocation, archivePath, Math.min(numberOfConnections, 30)); } break; case 'gcs': { if (!gcsToken) { throw new Error('Unable to download cache from GCS. GCP token is not provided.'); } const oauth2Client = new google_auth_library_1.OAuth2Client(); oauth2Client.setCredentials({ access_token: gcsToken }); const storage = new storage_1.Storage({ authClient: oauth2Client }); yield (0, downloadUtils_1.downloadCacheMultipartGCP)(storage, archiveLocation, archivePath); break; } case 'azure_blob': { const blockBlobClient = new storage_blob_1.BlockBlobClient(archiveLocation); yield blockBlobClient.downloadToFile(archivePath); break; } } }); } exports.downloadCache = downloadCache; function downloadCacheSingleThread(provider, archiveLocation, archivePath, gcsToken) { return __awaiter(this, void 0, void 0, function* () { switch (provider) { case 's3': break; case 'gcs': { if (!gcsToken) { throw new Error('Unable to download cache from GCS. GCP token is not provided.'); } const oauth2Client = new google_auth_library_1.OAuth2Client(); oauth2Client.setCredentials({ access_token: gcsToken }); const storage = new storage_1.Storage({ authClient: oauth2Client, retryOptions: { autoRetry: false, maxRetries: 1 } }); yield (0, downloadUtils_1.downloadCacheGCP)(storage, archiveLocation, archivePath); break; } } }); } exports.downloadCacheSingleThread = downloadCacheSingleThread; function downloadCacheStreaming(provider, archiveLocation, gcsToken) { switch (provider) { case 's3': return undefined; case 'gcs': { if (!gcsToken) { throw new Error('Unable to download cache from GCS. GCP token is not provided.'); } const oauth2Client = new google_auth_library_1.OAuth2Client(); oauth2Client.setCredentials({ access_token: gcsToken }); const storage = new storage_1.Storage({ authClient: oauth2Client }); return (0, downloadUtils_1.downloadCacheStreamingGCP)(storage, archiveLocation); } default: return undefined; } } exports.downloadCacheStreaming = downloadCacheStreaming; function reserveCache(cacheKey, numberOfChunks, cacheVersion) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const reserveCacheRequest = { cache_key: cacheKey, cache_version: cacheVersion, number_of_chunks: numberOfChunks, content_type: 'application/zstd', vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), annotations: getAnnotations() }; const response = yield (0, requestUtils_1.retryTypedResponse)('reserveCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cache/reserve'), reserveCacheRequest); })); return response; }); } exports.reserveCache = reserveCache; function commitCache(cacheKey, cacheVersion, uploadKey, uploadID, parts) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); if (!parts) { parts = []; } const commitCacheRequest = { cache_key: cacheKey, cache_version: cacheVersion, upload_key: uploadKey, upload_id: uploadID, parts, vcs_type: 'github', vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), annotations: getAnnotations() }; return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl(`cache/commit`), commitCacheRequest); })); }); } function saveCache(provider, cacheKey, cacheVersion, archivePath, S3UploadId, S3UploadKey, S3NumberOfChunks, S3PreSignedURLs, GCSAuthToken, GCSBucketName, GCSObjectName, AzureSASURL) { var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p, _q, _r, _s, _t; return __awaiter(this, void 0, void 0, function* () { const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); let commitCacheResponse = { headers: {}, statusCode: 0, result: null }; let cacheKeyResponse = ''; switch (provider) { case 's3': { if (!S3NumberOfChunks || !S3PreSignedURLs || !S3UploadId || !S3UploadKey) { core.debug(`S3 params are not set. Number of Chunks: ${S3NumberOfChunks}, PreSigned URLs: ${S3PreSignedURLs}, Upload ID: ${S3UploadId}, Upload Key: ${S3UploadKey}`); throw new Error('Unable to upload cache to S3. One of the following required parameters is missing: numberOfChunks, preSignedURLs, uploadId, uploadKey.'); } // Number of chunks should match the number of pre-signed URLs if (S3NumberOfChunks !== S3PreSignedURLs.length) { throw new Error(`Number of chunks (${S3NumberOfChunks}) should match the number of pre-signed URLs (${S3PreSignedURLs.length}).`); } core.debug('Uploading cache'); const completedParts = yield (0, uploadUtils_1.uploadFileToS3)(S3PreSignedURLs, archivePath); // Sort parts in ascending order by partNumber completedParts.sort((a, b) => a.PartNumber - b.PartNumber); core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion, S3UploadKey, S3UploadId, completedParts); cacheKeyResponse = (_f = (_c = (_b = (_a = commitCacheResponse.result) === null || _a === void 0 ? void 0 : _a.cache_entry) === null || _b === void 0 ? void 0 : _b.cache_user_given_key) !== null && _c !== void 0 ? _c : (_e = (_d = commitCacheResponse.result) === null || _d === void 0 ? void 0 : _d.s3) === null || _e === void 0 ? void 0 : _e.cache_key) !== null && _f !== void 0 ? _f : ''; break; } case 'gcs': { if (!GCSBucketName || !GCSObjectName || !GCSAuthToken) { throw new Error('Unable to upload cache to GCS. One of the following required parameters is missing: GCSBucketName, GCSObjectName, GCSAuthToken.'); } core.debug('Uploading cache'); const oauth2Client = new google_auth_library_1.OAuth2Client(); oauth2Client.setCredentials({ access_token: GCSAuthToken }); const storage = new storage_1.Storage({ authClient: oauth2Client }); yield (0, uploadUtils_1.multiPartUploadToGCS)(storage, archivePath, GCSBucketName, GCSObjectName); core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion); cacheKeyResponse = (_m = (_j = (_h = (_g = commitCacheResponse.result) === null || _g === void 0 ? void 0 : _g.cache_entry) === null || _h === void 0 ? void 0 : _h.cache_user_given_key) !== null && _j !== void 0 ? _j : (_l = (_k = commitCacheResponse.result) === null || _k === void 0 ? void 0 : _k.gcs) === null || _l === void 0 ? void 0 : _l.cache_key) !== null && _m !== void 0 ? _m : ''; break; } case 'azure_blob': { if (!AzureSASURL) { core.debug(`Azure SAS URL is not set. SAS URL: ${AzureSASURL}`); throw new Error('Unable to upload cache to Azure Blob. SAS URL is not provided.'); } core.debug('Uploading cache'); const blockBlobClient = new storage_blob_1.BlockBlobClient(AzureSASURL); yield blockBlobClient.uploadFile(archivePath, { maxSingleShotSize: 10 * 1024 * 1024 // 10 MB }); core.debug('Committing cache'); commitCacheResponse = yield commitCache(cacheKey, cacheVersion); cacheKeyResponse = (_t = (_q = (_p = (_o = commitCacheResponse.result) === null || _o === void 0 ? void 0 : _o.cache_entry) === null || _p === void 0 ? void 0 : _p.cache_user_given_key) !== null && _q !== void 0 ? _q : (_s = (_r = commitCacheResponse.result) === null || _r === void 0 ? void 0 : _r.azure_blob) === null || _s === void 0 ? void 0 : _s.cache_key) !== null && _t !== void 0 ? _t : ''; break; } } if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) { throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info('Cache saved successfully'); return cacheKeyResponse; }); } exports.saveCache = saveCache; function deleteCache(cacheKey, cacheVersion) { return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const deleteCacheRequest = { cache_key: cacheKey, cache_version: cacheVersion, vcs_repository: getVCSRepository(), vcs_ref: getVCSRef(), annotations: getAnnotations() }; const response = yield (0, requestUtils_1.retryTypedResponse)('deleteCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.postJson(getCacheApiUrl('cache/delete'), deleteCacheRequest); })); if (!(0, requestUtils_1.isSuccessStatusCode)(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } }); } exports.deleteCache = deleteCache; //# sourceMappingURL=cacheHttpClient.js.map /***/ }), /***/ 40500: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retrieveGCSBucketAndObjectName = exports.streamToBuffer = exports.isGhes = exports.assertDefined = exports.getGnuTarPathOnWindows = exports.getCacheFileName = exports.getCompressionMethod = exports.unlinkFile = exports.resolvePaths = exports.getArchiveFileSizeInBytes = exports.createTempDirectory = void 0; const core = __importStar(__nccwpck_require__(37484)); const exec = __importStar(__nccwpck_require__(95236)); const glob = __importStar(__nccwpck_require__(97507)); const io = __importStar(__nccwpck_require__(94994)); const fs = __importStar(__nccwpck_require__(79896)); const path = __importStar(__nccwpck_require__(16928)); const semver = __importStar(__nccwpck_require__(39318)); const util = __importStar(__nccwpck_require__(39023)); const uuid_1 = __nccwpck_require__(87723); const constants_1 = __nccwpck_require__(31974); // From https://github.com/actions/toolkit/blob/main/packages/tool-cache/src/tool-cache.ts#L23 function createTempDirectory() { return __awaiter(this, void 0, void 0, function* () { const IS_WINDOWS = process.platform === 'win32'; let tempDirectory = process.env['RUNNER_TEMP'] || ''; if (!tempDirectory) { let baseLocation; if (IS_WINDOWS) { // On Windows use the USERPROFILE env variable baseLocation = process.env['USERPROFILE'] || 'C:\\'; } else { if (process.platform === 'darwin') { baseLocation = '/Users'; } else { baseLocation = '/home'; } } tempDirectory = path.join(baseLocation, 'actions', 'temp'); } const dest = path.join(tempDirectory, (0, uuid_1.v4)()); yield io.mkdirP(dest); return dest; }); } exports.createTempDirectory = createTempDirectory; function getArchiveFileSizeInBytes(filePath) { return fs.statSync(filePath).size; } exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; function resolvePaths(patterns) { var _a, e_1, _b, _c; var _d; return __awaiter(this, void 0, void 0, function* () { const paths = []; const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); const globber = yield glob.create(patterns.join('\n'), { implicitDescendants: false }); try { for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a; _e = true) { _c = _g.value; _e = false; const file = _c; const relativeFile = path .relative(workspace, file) .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); core.debug(`Matched: ${relativeFile}`); // Paths are made relative so the tar entries are all relative to the root of the workspace. if (relativeFile === '') { // path.relative returns empty string if workspace and file are equal paths.push('.'); } else { paths.push(`${relativeFile}`); } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); } finally { if (e_1) throw e_1.error; } } return paths; }); } exports.resolvePaths = resolvePaths; function unlinkFile(filePath) { return __awaiter(this, void 0, void 0, function* () { return util.promisify(fs.unlink)(filePath); }); } exports.unlinkFile = unlinkFile; function getVersion(app, additionalArgs = []) { return __awaiter(this, void 0, void 0, function* () { let versionOutput = ''; additionalArgs.push('--version'); core.debug(`Checking ${app} ${additionalArgs.join(' ')}`); try { yield exec.exec(`${app}`, additionalArgs, { ignoreReturnCode: true, silent: true, listeners: { stdout: (data) => (versionOutput += data.toString()), stderr: (data) => (versionOutput += data.toString()) } }); } catch (err) { core.debug(err.message); } versionOutput = versionOutput.trim(); core.debug(versionOutput); return versionOutput; }); } // Use zstandard if possible to maximize cache performance function getCompressionMethod() { return __awaiter(this, void 0, void 0, function* () { const versionOutput = yield getVersion('zstd', ['--quiet']); const version = semver.clean(versionOutput); core.debug(`zstd version: ${version}`); if (versionOutput === '') { return constants_1.CompressionMethod.Gzip; } else { return constants_1.CompressionMethod.ZstdWithoutLong; } }); } exports.getCompressionMethod = getCompressionMethod; function getCacheFileName(compressionMethod) { return compressionMethod === constants_1.CompressionMethod.Gzip ? constants_1.CacheFilename.Gzip : constants_1.CacheFilename.Zstd; } exports.getCacheFileName = getCacheFileName; function getGnuTarPathOnWindows() { return __awaiter(this, void 0, void 0, function* () { if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { return constants_1.GnuTarPathOnWindows; } const versionOutput = yield getVersion('tar'); return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; }); } exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; function assertDefined(name, value) { if (value === undefined) { throw Error(`Expected ${name} but value was undefiend`); } return value; } exports.assertDefined = assertDefined; function isGhes() { const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; } exports.isGhes = isGhes; function streamToBuffer(stream) { return new Promise((resolve, reject) => { const buffer = []; stream.on('data', (chunk) => buffer.push(chunk)); stream.on('error', reject); stream.on('end', () => resolve(Buffer.concat(buffer))); }); } exports.streamToBuffer = streamToBuffer; /* * Retrieve the bucket name and object name from the GCS URL * @param gcsURL - The URL for the cache in the format gs:/// */ function retrieveGCSBucketAndObjectName(gcsURL) { const bucketName = gcsURL.split('/')[2]; if (!bucketName || bucketName.length < 2) { throw new Error(`Invalid GCS URL: ${gcsURL}. Should be in the format gs:///`); } const objectName = gcsURL.split('/').slice(3).join('/'); if (!objectName || objectName.length < 1) { throw new Error(`Invalid GCS URL: ${gcsURL}. Should be in the format gs:///`); } return { bucketName, objectName }; } exports.retrieveGCSBucketAndObjectName = retrieveGCSBucketAndObjectName; //# sourceMappingURL=cacheUtils.js.map /***/ }), /***/ 31974: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ManifestFilename = exports.TarFilename = exports.SystemTarPathOnWindows = exports.GnuTarPathOnWindows = exports.SocketTimeout = exports.DefaultRetryDelay = exports.DefaultRetryAttempts = exports.ArchiveToolType = exports.CompressionMethod = exports.CacheFilename = void 0; var CacheFilename; (function (CacheFilename) { CacheFilename["Gzip"] = "cache.tgz"; CacheFilename["Zstd"] = "cache.tzst"; })(CacheFilename || (exports.CacheFilename = CacheFilename = {})); var CompressionMethod; (function (CompressionMethod) { CompressionMethod["Gzip"] = "gzip"; // Long range mode was added to zstd in v1.3.2. // This enum is for earlier version of zstd that does not have --long support CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; CompressionMethod["Zstd"] = "zstd"; })(CompressionMethod || (exports.CompressionMethod = CompressionMethod = {})); var ArchiveToolType; (function (ArchiveToolType) { ArchiveToolType["GNU"] = "gnu"; ArchiveToolType["BSD"] = "bsd"; })(ArchiveToolType || (exports.ArchiveToolType = ArchiveToolType = {})); // The default number of retry attempts. exports.DefaultRetryAttempts = 2; // The default delay in milliseconds between retry attempts. exports.DefaultRetryDelay = 5000; // Socket timeout in milliseconds during download. If no traffic is received // over the socket during this period, the socket is destroyed and the download // is aborted. exports.SocketTimeout = 5000; // The default path of GNUtar on hosted Windows runners exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; // The default path of BSDtar on hosted Windows runners exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; exports.TarFilename = 'cache.tar'; exports.ManifestFilename = 'manifest.txt'; //# sourceMappingURL=constants.js.map /***/ }), /***/ 87574: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadCommandPipeForWget = exports.downloadCacheStreamingGCP = exports.downloadCacheGCP = exports.downloadCacheMultipartGCP = exports.downloadCacheMultiConnection = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const fs = __importStar(__nccwpck_require__(79896)); const stream = __importStar(__nccwpck_require__(2203)); const util = __importStar(__nccwpck_require__(39023)); const utils = __importStar(__nccwpck_require__(40500)); const constants_1 = __nccwpck_require__(31974); const requestUtils_1 = __nccwpck_require__(85077); const storage_1 = __nccwpck_require__(28525); const child_process_1 = __nccwpck_require__(35317); /** * Pipes the body of a HTTP response to a stream * * @param response the HTTP response * @param output the writable stream */ function pipeResponseToStream(response, output, progress) { return __awaiter(this, void 0, void 0, function* () { const pipeline = util.promisify(stream.pipeline); yield pipeline(response.message, new stream.Transform({ transform(chunk, encoding, callback) { if (progress) { progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length); } this.push(chunk); callback(); } }), output); }); } /** * Class for tracking the download state and displaying stats. */ class DownloadProgress { constructor(contentLength) { this.contentLength = contentLength; this.segmentIndex = 0; this.segmentSize = 0; this.segmentOffset = 0; this.receivedBytes = 0; this.displayedComplete = false; this.startTime = Date.now(); } /** * Progress to the next segment. Only call this method when the previous segment * is complete. * * @param segmentSize the length of the next segment */ nextSegment(segmentSize) { this.segmentOffset = this.segmentOffset + this.segmentSize; this.segmentIndex = this.segmentIndex + 1; this.segmentSize = segmentSize; this.receivedBytes = 0; core.debug(`Downloading segment at offset ${this.segmentOffset} with length ${this.segmentSize}...`); } /** * Sets the number of bytes received for the current segment. * * @param receivedBytes the number of bytes received */ setReceivedBytes(receivedBytes) { this.receivedBytes = receivedBytes; } /** * Returns the total number of bytes transferred. */ getTransferredBytes() { return this.segmentOffset + this.receivedBytes; } /** * Returns true if the download is complete. */ isDone() { return this.getTransferredBytes() === this.contentLength; } /** * Prints the current download stats. Once the download completes, this will print one * last line and then stop. */ display() { if (this.displayedComplete) { return; } const transferredBytes = this.segmentOffset + this.receivedBytes; const percentage = (100 * (transferredBytes / this.contentLength)).toFixed(1); const elapsedTime = Date.now() - this.startTime; const downloadSpeed = (transferredBytes / (1024 * 1024) / (elapsedTime / 1000)).toFixed(1); core.info(`Received ${transferredBytes} of ${this.contentLength} (${percentage}%), ${downloadSpeed} MBs/sec`); if (this.isDone()) { this.displayedComplete = true; } } /** * Returns a function used to handle TransferProgressEvents. */ onProgress() { return (progress) => { this.setReceivedBytes(progress.loadedBytes); }; } /** * Starts the timer that displays the stats. * * @param delayInMs the delay between each write */ startDisplayTimer(delayInMs = 1000) { const displayCallback = () => { this.display(); if (!this.isDone()) { this.timeoutHandle = setTimeout(displayCallback, delayInMs); } }; this.timeoutHandle = setTimeout(displayCallback, delayInMs); } /** * Stops the timer that displays the stats. As this typically indicates the download * is complete, this will display one last line, unless the last line has already * been written. */ stopDisplayTimer() { if (this.timeoutHandle) { clearTimeout(this.timeoutHandle); this.timeoutHandle = undefined; } this.display(); } } exports.DownloadProgress = DownloadProgress; /** * Download the cache using the Actions toolkit http-client * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved */ function downloadCacheHttpClient(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const writeStream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient('actions/cache'); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); })); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); }); yield pipeResponseToStream(downloadResponse, writeStream); // Validate download size. const contentLengthHeader = downloadResponse.message.headers['content-length']; if (contentLengthHeader) { const expectedLength = parseInt(contentLengthHeader); const actualLength = utils.getArchiveFileSizeInBytes(archivePath); if (actualLength !== expectedLength) { throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); } } else { core.debug('Unable to validate download, no Content-Length header'); } }); } exports.downloadCacheHttpClient = downloadCacheHttpClient; /** * Download the cache using the Actions toolkit http-client with multiple connections * * @param archiveLocation the URL for the cache * @param archivePath the local path where the cache is saved * @param connections number of connections to use */ function downloadCacheMultiConnection(archiveLocation, archivePath, connections) { return __awaiter(this, void 0, void 0, function* () { let fileHandle = null; let downloadProgress = null; try { fileHandle = yield fs.promises.open(archivePath, 'w+'); const httpClient = new http_client_1.HttpClient('actions/cache'); //Request 1 byte to get total content size const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { Range: 'bytes=0-1' }); })); const contentRange = metadataResponse.message.headers['content-range']; if (!contentRange) { console.log(yield metadataResponse.readBody()); throw new Error('Range request not supported by server'); } const match = RegExp(/bytes \d+-\d+\/(\d+)/).exec(contentRange); if (!match) { throw new Error('Content-Range header in server response not in correct format'); } const totalLength = parseInt(match[1]); yield fileHandle.truncate(totalLength); yield fileHandle.sync(); downloadProgress = new DownloadProgress(totalLength); downloadProgress.startDisplayTimer(); const segmentSize = Math.ceil(totalLength / connections); const promises = []; for (let i = 0; i < connections; i++) { promises.push((() => __awaiter(this, void 0, void 0, function* () { const rangeStart = i * segmentSize; const rangeEnd = Math.min((i + 1) * segmentSize - 1, totalLength - 1); const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation, { Range: `bytes=${rangeStart}-${rangeEnd}` }); })); const writeStream = fs.createWriteStream(archiveLocation, { fd: fileHandle.fd, autoClose: false, start: rangeStart }); yield pipeResponseToStream(downloadResponse, writeStream, downloadProgress); }))()); } yield Promise.all(promises); } finally { downloadProgress === null || downloadProgress === void 0 ? void 0 : downloadProgress.stopDisplayTimer(); yield (fileHandle === null || fileHandle === void 0 ? void 0 : fileHandle.close()); } }); } exports.downloadCacheMultiConnection = downloadCacheMultiConnection; /** * Download cache in multipart using the Gcloud SDK * * @param archiveLocation the URL for the cache */ function downloadCacheMultipartGCP(storage, archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { try { const { bucketName, objectName } = utils.retrieveGCSBucketAndObjectName(archiveLocation); const transferManager = new storage_1.TransferManager(storage.bucket(bucketName)); yield transferManager.downloadFileInChunks(objectName, { destination: archivePath, noReturnData: true, validation: 'crc32c' }); } catch (error) { core.debug(`Failed to download cache: ${error}`); throw error; } }); } exports.downloadCacheMultipartGCP = downloadCacheMultipartGCP; function downloadCacheGCP(storage, archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { try { const timeoutDuration = 300000; // 5 minutes const timeoutPromise = new Promise((_, reject) => setTimeout(() => reject(new Error('Download timed out')), timeoutDuration)); const { bucketName, objectName } = utils.retrieveGCSBucketAndObjectName(archiveLocation); const downloadPromise = storage .bucket(bucketName) .file(objectName) .download({ destination: archivePath, validation: 'crc32c' }); try { yield Promise.race([downloadPromise, timeoutPromise]); core.debug(`Download completed for bucket: ${bucketName}, object: ${objectName}`); } catch (error) { core.debug(`Failed to download cache: ${error}`); throw error; } } catch (error) { core.debug(`Failed to download cache: ${error}`); throw error; } }); } exports.downloadCacheGCP = downloadCacheGCP; /** * Download the cache to a provider writable stream using GCloud SDK * * @param archiveLocation the URL for the cache */ function downloadCacheStreamingGCP(storage, archiveLocation) { try { // The archiveLocation for GCP will be in the format of gs:/// const { bucketName, objectName } = utils.retrieveGCSBucketAndObjectName(archiveLocation); storage .bucket(bucketName) .file(objectName) .getMetadata() .then(data => { var _a; core.info(`File size: ${(_a = data[0]) === null || _a === void 0 ? void 0 : _a.size} bytes`); }); return storage.bucket(bucketName).file(objectName).createReadStream(); } catch (error) { core.debug(`Failed to download cache: ${error}`); throw error; } } exports.downloadCacheStreamingGCP = downloadCacheStreamingGCP; function getDownloadCommandPipeForWget(url) { return (0, child_process_1.spawn)('wget', ['-qO', '-', url]); } exports.getDownloadCommandPipeForWget = getDownloadCommandPipeForWget; //# sourceMappingURL=downloadUtils.js.map /***/ }), /***/ 85077: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryHttpClientResponse = exports.retryTypedResponse = exports.retry = exports.isRetryableStatusCode = exports.isServerErrorStatusCode = exports.isSuccessStatusCode = void 0; const core = __importStar(__nccwpck_require__(37484)); const http_client_1 = __nccwpck_require__(54844); const constants_1 = __nccwpck_require__(31974); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; } return statusCode >= 200 && statusCode < 300; } exports.isSuccessStatusCode = isSuccessStatusCode; function isServerErrorStatusCode(statusCode) { if (!statusCode) { return true; } return statusCode >= 500; } exports.isServerErrorStatusCode = isServerErrorStatusCode; function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; } const retryableStatusCodes = [ http_client_1.HttpCodes.BadGateway, http_client_1.HttpCodes.ServiceUnavailable, http_client_1.HttpCodes.GatewayTimeout ]; return retryableStatusCodes.includes(statusCode); } exports.isRetryableStatusCode = isRetryableStatusCode; function sleep(milliseconds) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => setTimeout(resolve, milliseconds)); }); } function retry(name, method, getStatusCode, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay, onError = undefined) { return __awaiter(this, void 0, void 0, function* () { let errorMessage = ''; let attempt = 1; while (attempt <= maxAttempts) { let response = undefined; let statusCode = undefined; let isRetryable = false; try { response = yield method(); } catch (error) { if (onError) { response = onError(error); } isRetryable = true; errorMessage = error.message; } if (response) { statusCode = getStatusCode(response); if (!isServerErrorStatusCode(statusCode)) { return response; } } if (statusCode) { isRetryable = isRetryableStatusCode(statusCode); errorMessage = `Cache service responded with ${statusCode}`; } core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); if (!isRetryable) { core.debug(`${name} - Error is not retryable`); break; } yield sleep(delay); attempt++; } throw Error(`${name} failed: ${errorMessage}`); }); } exports.retry = retry; function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.statusCode, maxAttempts, delay, // If the error object contains the statusCode property, extract it and return // an TypedResponse so it can be processed by the retry logic. (error) => { var _a; if (error instanceof http_client_1.HttpClientError) { return { statusCode: error.statusCode, result: (_a = error.result) !== null && _a !== void 0 ? _a : null, headers: {}, error }; } else { return undefined; } }); }); } exports.retryTypedResponse = retryTypedResponse; function retryHttpClientResponse(name, method, maxAttempts = constants_1.DefaultRetryAttempts, delay = constants_1.DefaultRetryDelay) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, (response) => response.message.statusCode, maxAttempts, delay); }); } exports.retryHttpClientResponse = retryHttpClientResponse; //# sourceMappingURL=requestUtils.js.map /***/ }), /***/ 47356: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTar = exports.extractStreamingTar = exports.extractTar = exports.listTar = void 0; const exec_1 = __nccwpck_require__(95236); const io = __importStar(__nccwpck_require__(94994)); const fs_1 = __nccwpck_require__(79896); const path = __importStar(__nccwpck_require__(16928)); const utils = __importStar(__nccwpck_require__(40500)); const constants_1 = __nccwpck_require__(31974); const child_process_1 = __nccwpck_require__(35317); const IS_WINDOWS = process.platform === 'win32'; var TAR_MODE; (function (TAR_MODE) { TAR_MODE["CREATE"] = "create"; TAR_MODE["EXTRACT"] = "extract"; TAR_MODE["EXTRACT_STREAM"] = "extractStream"; TAR_MODE["LIST"] = "list"; })(TAR_MODE || (TAR_MODE = {})); // Returns tar path and type: BSD or GNU function getTarPath() { return __awaiter(this, void 0, void 0, function* () { switch (process.platform) { case 'win32': { const gnuTar = yield utils.getGnuTarPathOnWindows(); const systemTar = constants_1.SystemTarPathOnWindows; if (gnuTar) { // Use GNUtar as default on windows return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else if ((0, fs_1.existsSync)(systemTar)) { return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; } break; } case 'darwin': { const gnuTar = yield io.which('gtar', false); if (gnuTar) { // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527 return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; } else { return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.BSD }; } } default: break; } // Default assumption is GNU tar is present in path return { path: yield io.which('tar', true), type: constants_1.ArchiveToolType.GNU }; }); } // Return arguments for tar as per tarPath, compressionMethod, method type and os function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { const args = [`"${tarPath.path}"`]; const cacheFileName = utils.getCacheFileName(compressionMethod); const tarFile = 'cache.tar'; const workingDirectory = getWorkingDirectory(); // Speficic args for BSD tar on windows for workaround const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; // Method specific args switch (type) { case TAR_MODE.CREATE: args.push('--posix', '-cf', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD ? tarFile : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); break; case TAR_MODE.EXTRACT: args.push('-xf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); break; case TAR_MODE.EXTRACT_STREAM: args.push('-xf', '-', '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); break; case TAR_MODE.LIST: args.push('-tf', BSD_TAR_ZSTD ? tarFile : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); break; } // Platform specific args if (tarPath.type === constants_1.ArchiveToolType.GNU) { switch (process.platform) { case 'win32': args.push('--force-local'); break; case 'darwin': args.push('--delay-directory-restore'); break; } } return args; }); } // Returns commands to run tar and compression program function getCommands(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { let args; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); const compressionArgs = type !== TAR_MODE.CREATE ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) : yield getCompressionProgram(tarPath, compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; if (BSD_TAR_ZSTD && type !== TAR_MODE.CREATE) { args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; } else { args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; } if (BSD_TAR_ZSTD) { return args; } return [args.join(' ')]; }); } /* * Returns command pipes to stream data to tar and compression program. * Only supports tar and zstd at the moment * @returns Array of ChildProcessWithoutNullStreams. Pipe to the processes in the order they are returned */ function getCommandPipes(compressionMethod, type, archivePath = '') { return __awaiter(this, void 0, void 0, function* () { const spawnedProcesses = []; const tarPath = yield getTarPath(); const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); // Remove tar executable from tarArgs tarArgs.shift(); let zstdInfo = type !== TAR_MODE.CREATE ? yield getDecompressionProgramStream(tarPath, compressionMethod) : yield getCompressionProgramStream(tarPath, compressionMethod); const zstdProcess = (0, child_process_1.spawn)(zstdInfo.command, zstdInfo.args); spawnedProcesses.push(zstdProcess); const tarProcess = (0, child_process_1.spawn)(tarPath.path, tarArgs); spawnedProcesses.push(tarProcess); return spawnedProcesses; }); } function getWorkingDirectory() { var _a; return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); } // Common function for extractTar and listTar to get the compression method function getDecompressionProgram(tarPath, compressionMethod, archivePath) { return __awaiter(this, void 0, void 0, function* () { // -d: Decompress. // unzstd is equivalent to 'zstd -d' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -d --long=30 --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -d --force -o', constants_1.TarFilename, archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; default: return ['-z']; } }); } // Alternative to getDecompressionProgram which returns zstd that command that can be piped into function getDecompressionProgramStream(tarPath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? { command: 'zstd', args: ['-d', '--long=30', '--force', '--stdout'] } : { command: IS_WINDOWS ? 'zstd' : 'unzstd', args: IS_WINDOWS ? ['-d', '--long=30', '--stdout', '-T0'] : ['--long=30', '--stdout', '-T0'] }; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? { command: 'zstd', args: ['-d', '--force', '--stdout'] } : { command: IS_WINDOWS ? 'zstd' : 'unzstd', args: ['-d', '--stdout', '-T0'] }; default: // Assuming gzip is the default method if none specified return { command: 'gzip', args: ['-d'] }; } }); } // Used for creating the archive // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // zstdmt is equivalent to 'zstd -T0' // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // Using 30 here because we also support 32-bit self-hosted runners. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. function getCompressionProgram(tarPath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const cacheFileName = utils.getCacheFileName(compressionMethod); const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? [ 'zstd -T0 --long=30 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : [ '--use-compress-program', IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' ]; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? [ 'zstd -T0 --force -o', cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), constants_1.TarFilename ] : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; default: return ['-z']; } }); } function getCompressionProgramStream(tarPath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && compressionMethod !== constants_1.CompressionMethod.Gzip && IS_WINDOWS; switch (compressionMethod) { case constants_1.CompressionMethod.Zstd: return BSD_TAR_ZSTD ? { command: 'zstd', args: ['-T0', '--long=30', '--force', '--stdout'] } : { command: IS_WINDOWS ? 'zstd' : 'zstdmt', args: IS_WINDOWS ? ['-T0', '--long=30', '--stdout', '-T0'] : ['--long=30', '--stdout', '-T0'] }; case constants_1.CompressionMethod.ZstdWithoutLong: return BSD_TAR_ZSTD ? { command: 'zstd', args: ['-T0', '--force', '--stdout'] } : { command: IS_WINDOWS ? 'zstd' : 'zstdmt', args: ['-T0', '--stdout'] }; default: // Assuming gzip is the default method if none specified return { command: 'gzip', args: [] }; } }); } // Executes all commands as separate processes function execCommands(commands, cwd) { return __awaiter(this, void 0, void 0, function* () { for (const command of commands) { try { yield (0, exec_1.exec)(command, undefined, { cwd, env: Object.assign(Object.assign({}, process.env), { MSYS: 'winsymlinks:nativestrict' }) }); } catch (error) { throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); } } }); } // List the contents of a tar function listTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { const commands = yield getCommands(compressionMethod, TAR_MODE.LIST, archivePath); yield execCommands(commands); }); } exports.listTar = listTar; function extractTar(archivePath, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); const commands = yield getCommands(compressionMethod, TAR_MODE.EXTRACT, archivePath); yield execCommands(commands); }); } exports.extractTar = extractTar; /* * NOTE: Currently tested only on archives created using tar and zstd */ function extractStreamingTar(stream, archivePath, compressionMethod, downloadCommandPipe) { return __awaiter(this, void 0, void 0, function* () { const workingDirectory = getWorkingDirectory(); yield io.mkdirP(workingDirectory); const commandPipes = yield getCommandPipes(compressionMethod, TAR_MODE.EXTRACT_STREAM, archivePath); if (downloadCommandPipe) { commandPipes.unshift(downloadCommandPipe); } if (commandPipes.length < 2) { throw new Error('At least two processes should be present as the archive is compressed at least twice.'); } return new Promise((resolve, reject) => { const handleStreamError = (stream, commandName) => { stream.on('error', error => { reject(new Error(`Error in ${commandName}: ${error.message}`)); }); }; // Attach error handlers and pipe the streams commandPipes.forEach(commandPipe => { handleStreamError(commandPipe.stdin, commandPipe.spawnfile); handleStreamError(commandPipe.stdout, commandPipe.spawnfile); handleStreamError(commandPipe.stderr, commandPipe.spawnfile); commandPipe.stderr.on('data', data => { reject(new Error(`Error in ${commandPipe.spawnfile}: ${data.toString()}`)); }); }); if (stream) { stream.pipe(commandPipes[0].stdin).on('error', error => { reject(new Error(`Error piping to ${commandPipes[0].spawnfile}: ${error.message}`)); }); } for (let i = 0; i < commandPipes.length - 1; i++) { commandPipes[i].stdout .pipe(commandPipes[i + 1].stdin) .on('error', error => { reject(new Error(`Error piping between ${commandPipes[i].spawnfile} and ${commandPipes[i + 1].spawnfile}: ${error.message}`)); }); } const lastCommand = commandPipes[commandPipes.length - 1]; lastCommand.stderr.on('data', data => { console.error(`Error in ${lastCommand.spawnfile}:`, data.toString()); reject(new Error(`Error in ${lastCommand.spawnfile}: ${data.toString()}`)); }); lastCommand.on('close', code => { if (code === 0) { resolve(); } else { reject(new Error(`Last command exited with code ${code}`)); } }); lastCommand.on('error', error => { reject(new Error(`Error in ${lastCommand.spawnfile}: ${error.message}`)); }); }); }); } exports.extractStreamingTar = extractStreamingTar; function createTar(archiveFolder, sourceDirectories, compressionMethod) { return __awaiter(this, void 0, void 0, function* () { // Write source directories to manifest.txt to avoid command length limits (0, fs_1.writeFileSync)(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); const commands = yield getCommands(compressionMethod, TAR_MODE.CREATE); yield execCommands(commands, archiveFolder); }); } exports.createTar = createTar; //# sourceMappingURL=tar.js.map /***/ }), /***/ 89149: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.multiPartUploadToGCS = exports.uploadFileToS3 = void 0; const core = __importStar(__nccwpck_require__(37484)); const utils = __importStar(__nccwpck_require__(40500)); const os = __importStar(__nccwpck_require__(70857)); const fs_1 = __importDefault(__nccwpck_require__(79896)); const axios_1 = __importDefault(__nccwpck_require__(87269)); const storage_1 = __nccwpck_require__(28525); function getContentRange(start, end) { // Format: `bytes start-end/filesize // start and end are inclusive // filesize can be * // For a 200 byte chunk starting at byte 0: // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } function uploadChunk(resourceUrl, openStream, partNumber, start, end) { var _a, _b, _c; return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); // Manually convert the readable stream to a buffer. S3 doesn't allow stream as input const chunks = yield utils.streamToBuffer(openStream()); try { // HACK: Using axios here as S3 API doesn't allow readable stream as input and Github's HTTP client is not able to send buffer as body const response = yield axios_1.default.request({ method: 'PUT', url: resourceUrl, headers: { 'Content-Type': 'application/octet-stream' }, data: chunks }); return { ETag: (_a = response.headers.etag) !== null && _a !== void 0 ? _a : '', PartNumber: partNumber }; } catch (error) { core.debug(JSON.stringify(error)); core.debug(JSON.stringify((_b = error.response) === null || _b === void 0 ? void 0 : _b.data)); throw new Error(`Cache service responded with ${(_c = error.response) === null || _c === void 0 ? void 0 : _c.status} during upload chunk.`); } }); } function uploadFileToS3(preSignedURLs, archivePath) { return __awaiter(this, void 0, void 0, function* () { const fileSize = utils.getArchiveFileSizeInBytes(archivePath); const numberOfChunks = preSignedURLs.length; let concurrency = 4; // Adjust concurrency based on the number of cpu cores if (os.cpus().length > 4) { concurrency = 8; } const fd = fs_1.default.openSync(archivePath, 'r'); core.debug(`Awaiting all uploads with concurrency limit of ${concurrency}`); let offset = 0; const completedParts = []; try { for (let i = 0; i < numberOfChunks; i += concurrency) { const batch = preSignedURLs .slice(i, i + concurrency) .map((presignedURL, index) => __awaiter(this, void 0, void 0, function* () { const chunkIndex = i + index; const chunkSize = Math.ceil(fileSize / numberOfChunks); const start = offset; const end = offset + chunkSize - 1; offset += chunkSize; return uploadChunk(presignedURL, () => fs_1.default .createReadStream(archivePath, { fd, start, end, autoClose: false }) .on('error', error => { core.debug(JSON.stringify(error)); throw new Error(`Cache upload failed because file read failed with ${error.message}`); }), chunkIndex + 1, start, end); })); const batchResults = yield Promise.all(batch); completedParts.push(...batchResults); } return completedParts; } finally { fs_1.default.closeSync(fd); } }); } exports.uploadFileToS3 = uploadFileToS3; /* * Uploads the cache to GCS * @param localArchivePath - The path to the cache archive * @param bucketName - The name of the bucket in GCS * @param objectName - The name of the object in GCS */ function multiPartUploadToGCS(storage, localArchivePath, bucketName, objectName) { return __awaiter(this, void 0, void 0, function* () { try { const transferManager = new storage_1.TransferManager(storage.bucket(bucketName)); yield transferManager.uploadFileInChunks(localArchivePath, { uploadName: objectName }); } catch (error) { core.debug(JSON.stringify(error)); throw new Error(`Failed to upload to GCS: ${error}`); } }); } exports.multiPartUploadToGCS = multiPartUploadToGCS; //# sourceMappingURL=uploadUtils.js.map /***/ }), /***/ 3347: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getDownloadOptions = exports.getUploadOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy of the upload options with defaults filled in. * * @param copy the original upload options */ function getUploadOptions(copy) { const result = { uploadConcurrency: 4, uploadChunkSize: 32 * 1024 * 1024 }; if (copy) { if (typeof copy.uploadConcurrency === 'number') { result.uploadConcurrency = copy.uploadConcurrency; } if (typeof copy.uploadChunkSize === 'number') { result.uploadChunkSize = copy.uploadChunkSize; } } core.debug(`Upload concurrency: ${result.uploadConcurrency}`); core.debug(`Upload chunk size: ${result.uploadChunkSize}`); return result; } exports.getUploadOptions = getUploadOptions; /** * Returns a copy of the download options with defaults filled in. * * @param copy the original download options */ function getDownloadOptions(copy) { const result = { useAzureSdk: false, concurrentBlobDownloads: true, downloadConcurrency: 8, timeoutInMs: 30000, segmentTimeoutInMs: 600000, lookupOnly: false }; if (copy) { if (typeof copy.useAzureSdk === 'boolean') { result.useAzureSdk = copy.useAzureSdk; } if (typeof copy.concurrentBlobDownloads === 'boolean') { result.concurrentBlobDownloads = copy.concurrentBlobDownloads; } if (typeof copy.downloadConcurrency === 'number') { result.downloadConcurrency = copy.downloadConcurrency; } if (typeof copy.timeoutInMs === 'number') { result.timeoutInMs = copy.timeoutInMs; } if (typeof copy.segmentTimeoutInMs === 'number') { result.segmentTimeoutInMs = copy.segmentTimeoutInMs; } if (typeof copy.lookupOnly === 'boolean') { result.lookupOnly = copy.lookupOnly; } } const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']; if (segmentDownloadTimeoutMins && !isNaN(Number(segmentDownloadTimeoutMins)) && isFinite(Number(segmentDownloadTimeoutMins))) { result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000; } core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`); core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`); core.debug(`Lookup only: ${result.lookupOnly}`); return result; } exports.getDownloadOptions = getDownloadOptions; //# sourceMappingURL=options.js.map /***/ }), /***/ 97507: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.create = void 0; const internal_globber_1 = __nccwpck_require__(80946); /** * Constructs a globber * * @param patterns Patterns separated by newlines * @param options Glob options */ function create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { return yield internal_globber_1.DefaultGlobber.create(patterns, options); }); } exports.create = create; //# sourceMappingURL=glob.js.map /***/ }), /***/ 28305: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOptions = void 0; const core = __importStar(__nccwpck_require__(37484)); /** * Returns a copy with defaults filled in. */ function getOptions(copy) { const result = { followSymbolicLinks: true, implicitDescendants: true, omitBrokenSymbolicLinks: true }; if (copy) { if (typeof copy.followSymbolicLinks === 'boolean') { result.followSymbolicLinks = copy.followSymbolicLinks; core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); } if (typeof copy.implicitDescendants === 'boolean') { result.implicitDescendants = copy.implicitDescendants; core.debug(`implicitDescendants '${result.implicitDescendants}'`); } if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); } } return result; } exports.getOptions = getOptions; //# sourceMappingURL=internal-glob-options-helper.js.map /***/ }), /***/ 80946: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __asyncValues = (this && this.__asyncValues) || function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume("next", value); } function reject(value) { resume("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultGlobber = void 0; const core = __importStar(__nccwpck_require__(37484)); const fs = __importStar(__nccwpck_require__(79896)); const globOptionsHelper = __importStar(__nccwpck_require__(28305)); const path = __importStar(__nccwpck_require__(16928)); const patternHelper = __importStar(__nccwpck_require__(40080)); const internal_match_kind_1 = __nccwpck_require__(57307); const internal_pattern_1 = __nccwpck_require__(32607); const internal_search_state_1 = __nccwpck_require__(58633); const IS_WINDOWS = process.platform === 'win32'; class DefaultGlobber { constructor(options) { this.patterns = []; this.searchPaths = []; this.options = globOptionsHelper.getOptions(options); } getSearchPaths() { // Return a copy return this.searchPaths.slice(); } glob() { var e_1, _a; return __awaiter(this, void 0, void 0, function* () { const result = []; try { for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { const itemPath = _c.value; result.push(itemPath); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); } finally { if (e_1) throw e_1.error; } } return result; }); } globGenerator() { return __asyncGenerator(this, arguments, function* globGenerator_1() { // Fill in defaults options const options = globOptionsHelper.getOptions(this.options); // Implicit descendants? const patterns = []; for (const pattern of this.patterns) { patterns.push(pattern); if (options.implicitDescendants && (pattern.trailingSeparator || pattern.segments[pattern.segments.length - 1] !== '**')) { patterns.push(new internal_pattern_1.Pattern(pattern.negate, true, pattern.segments.concat('**'))); } } // Push the search paths const stack = []; for (const searchPath of patternHelper.getSearchPaths(patterns)) { core.debug(`Search path '${searchPath}'`); // Exists? try { // Intentionally using lstat. Detection for broken symlink // will be performed later (if following symlinks). yield __await(fs.promises.lstat(searchPath)); } catch (err) { if (err.code === 'ENOENT') { continue; } throw err; } stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); } // Search const traversalChain = []; // used to detect cycles while (stack.length) { // Pop const item = stack.pop(); // Match? const match = patternHelper.match(patterns, item.path); const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); if (!match && !partialMatch) { continue; } // Stat const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) // Broken symlink, or symlink cycle detected, or no longer exists ); // Broken symlink, or symlink cycle detected, or no longer exists if (!stats) { continue; } // Directory if (stats.isDirectory()) { // Matched if (match & internal_match_kind_1.MatchKind.Directory) { yield yield __await(item.path); } // Descend? else if (!partialMatch) { continue; } // Push the child items in reverse const childLevel = item.level + 1; const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); stack.push(...childItems.reverse()); } // File else if (match & internal_match_kind_1.MatchKind.File) { yield yield __await(item.path); } } }); } /** * Constructs a DefaultGlobber */ static create(patterns, options) { return __awaiter(this, void 0, void 0, function* () { const result = new DefaultGlobber(options); if (IS_WINDOWS) { patterns = patterns.replace(/\r\n/g, '\n'); patterns = patterns.replace(/\r/g, '\n'); } const lines = patterns.split('\n').map(x => x.trim()); for (const line of lines) { // Empty or comment if (!line || line.startsWith('#')) { continue; } // Pattern else { result.patterns.push(new internal_pattern_1.Pattern(line)); } } result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); return result; }); } static stat(item, options, traversalChain) { return __awaiter(this, void 0, void 0, function* () { // Note: // `stat` returns info about the target of a symlink (or symlink chain) // `lstat` returns info about a symlink itself let stats; if (options.followSymbolicLinks) { try { // Use `stat` (following symlinks) stats = yield fs.promises.stat(item.path); } catch (err) { if (err.code === 'ENOENT') { if (options.omitBrokenSymbolicLinks) { core.debug(`Broken symlink '${item.path}'`); return undefined; } throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); } throw err; } } else { // Use `lstat` (not following symlinks) stats = yield fs.promises.lstat(item.path); } // Note, isDirectory() returns false for the lstat of a symlink if (stats.isDirectory() && options.followSymbolicLinks) { // Get the realpath const realPath = yield fs.promises.realpath(item.path); // Fixup the traversal chain to match the item level while (traversalChain.length >= item.level) { traversalChain.pop(); } // Test for a cycle if (traversalChain.some((x) => x === realPath)) { core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); return undefined; } // Update the traversal chain traversalChain.push(realPath); } return stats; }); } } exports.DefaultGlobber = DefaultGlobber; //# sourceMappingURL=internal-globber.js.map /***/ }), /***/ 57307: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MatchKind = void 0; /** * Indicates whether a pattern matches a path */ var MatchKind; (function (MatchKind) { /** Not matched */ MatchKind[MatchKind["None"] = 0] = "None"; /** Matched if the path is a directory */ MatchKind[MatchKind["Directory"] = 1] = "Directory"; /** Matched if the path is a regular file */ MatchKind[MatchKind["File"] = 2] = "File"; /** Matched */ MatchKind[MatchKind["All"] = 3] = "All"; })(MatchKind = exports.MatchKind || (exports.MatchKind = {})); //# sourceMappingURL=internal-match-kind.js.map /***/ }), /***/ 78195: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.safeTrimTrailingSeparator = exports.normalizeSeparators = exports.hasRoot = exports.hasAbsoluteRoot = exports.ensureAbsoluteRoot = exports.dirname = void 0; const path = __importStar(__nccwpck_require__(16928)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Similar to path.dirname except normalizes the path separators and slightly better handling for Windows UNC paths. * * For example, on Linux/macOS: * - `/ => /` * - `/hello => /` * * For example, on Windows: * - `C:\ => C:\` * - `C:\hello => C:\` * - `C: => C:` * - `C:hello => C:` * - `\ => \` * - `\hello => \` * - `\\hello => \\hello` * - `\\hello\world => \\hello\world` */ function dirname(p) { // Normalize slashes and trim unnecessary trailing slash p = safeTrimTrailingSeparator(p); // Windows UNC root, e.g. \\hello or \\hello\world if (IS_WINDOWS && /^\\\\[^\\]+(\\[^\\]+)?$/.test(p)) { return p; } // Get dirname let result = path.dirname(p); // Trim trailing slash for Windows UNC root, e.g. \\hello\world\ if (IS_WINDOWS && /^\\\\[^\\]+\\[^\\]+\\$/.test(result)) { result = safeTrimTrailingSeparator(result); } return result; } exports.dirname = dirname; /** * Roots the path if not already rooted. On Windows, relative roots like `\` * or `C:` are expanded based on the current working directory. */ function ensureAbsoluteRoot(root, itemPath) { assert_1.default(root, `ensureAbsoluteRoot parameter 'root' must not be empty`); assert_1.default(itemPath, `ensureAbsoluteRoot parameter 'itemPath' must not be empty`); // Already rooted if (hasAbsoluteRoot(itemPath)) { return itemPath; } // Windows if (IS_WINDOWS) { // Check for itemPath like C: or C:foo if (itemPath.match(/^[A-Z]:[^\\/]|^[A-Z]:$/i)) { let cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); // Drive letter matches cwd? Expand to cwd if (itemPath[0].toUpperCase() === cwd[0].toUpperCase()) { // Drive only, e.g. C: if (itemPath.length === 2) { // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}`; } // Drive + path, e.g. C:foo else { if (!cwd.endsWith('\\')) { cwd += '\\'; } // Preserve specified drive letter case (upper or lower) return `${itemPath[0]}:\\${cwd.substr(3)}${itemPath.substr(2)}`; } } // Different drive else { return `${itemPath[0]}:\\${itemPath.substr(2)}`; } } // Check for itemPath like \ or \foo else if (normalizeSeparators(itemPath).match(/^\\$|^\\[^\\]/)) { const cwd = process.cwd(); assert_1.default(cwd.match(/^[A-Z]:\\/i), `Expected current directory to start with an absolute drive root. Actual '${cwd}'`); return `${cwd[0]}:\\${itemPath.substr(1)}`; } } assert_1.default(hasAbsoluteRoot(root), `ensureAbsoluteRoot parameter 'root' must have an absolute root`); // Otherwise ensure root ends with a separator if (root.endsWith('/') || (IS_WINDOWS && root.endsWith('\\'))) { // Intentionally empty } else { // Append separator root += path.sep; } return root + itemPath; } exports.ensureAbsoluteRoot = ensureAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\\hello\share` and `C:\hello` (and using alternate separator). */ function hasAbsoluteRoot(itemPath) { assert_1.default(itemPath, `hasAbsoluteRoot parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \\hello\share or C:\hello return itemPath.startsWith('\\\\') || /^[A-Z]:\\/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasAbsoluteRoot = hasAbsoluteRoot; /** * On Linux/macOS, true if path starts with `/`. On Windows, true for paths like: * `\`, `\hello`, `\\hello\share`, `C:`, and `C:\hello` (and using alternate separator). */ function hasRoot(itemPath) { assert_1.default(itemPath, `isRooted parameter 'itemPath' must not be empty`); // Normalize separators itemPath = normalizeSeparators(itemPath); // Windows if (IS_WINDOWS) { // E.g. \ or \hello or \\hello // E.g. C: or C:\hello return itemPath.startsWith('\\') || /^[A-Z]:/i.test(itemPath); } // E.g. /hello return itemPath.startsWith('/'); } exports.hasRoot = hasRoot; /** * Removes redundant slashes and converts `/` to `\` on Windows */ function normalizeSeparators(p) { p = p || ''; // Windows if (IS_WINDOWS) { // Convert slashes on Windows p = p.replace(/\//g, '\\'); // Remove redundant slashes const isUnc = /^\\\\+[^\\]/.test(p); // e.g. \\hello return (isUnc ? '\\' : '') + p.replace(/\\\\+/g, '\\'); // preserve leading \\ for UNC } // Remove redundant slashes return p.replace(/\/\/+/g, '/'); } exports.normalizeSeparators = normalizeSeparators; /** * Normalizes the path separators and trims the trailing separator (when safe). * For example, `/foo/ => /foo` but `/ => /` */ function safeTrimTrailingSeparator(p) { // Short-circuit if empty if (!p) { return ''; } // Normalize separators p = normalizeSeparators(p); // No trailing slash if (!p.endsWith(path.sep)) { return p; } // Check '/' on Linux/macOS and '\' on Windows if (p === path.sep) { return p; } // On Windows check if drive root. E.g. C:\ if (IS_WINDOWS && /^[A-Z]:\\$/i.test(p)) { return p; } // Otherwise trim trailing slash return p.substr(0, p.length - 1); } exports.safeTrimTrailingSeparator = safeTrimTrailingSeparator; //# sourceMappingURL=internal-path-helper.js.map /***/ }), /***/ 75106: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Path = void 0; const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(78195)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const IS_WINDOWS = process.platform === 'win32'; /** * Helper class for parsing paths into segments */ class Path { /** * Constructs a Path * @param itemPath Path or array of segments */ constructor(itemPath) { this.segments = []; // String if (typeof itemPath === 'string') { assert_1.default(itemPath, `Parameter 'itemPath' must not be empty`); // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // Not rooted if (!pathHelper.hasRoot(itemPath)) { this.segments = itemPath.split(path.sep); } // Rooted else { // Add all segments, while not at the root let remaining = itemPath; let dir = pathHelper.dirname(remaining); while (dir !== remaining) { // Add the segment const basename = path.basename(remaining); this.segments.unshift(basename); // Truncate the last segment remaining = dir; dir = pathHelper.dirname(remaining); } // Remainder is the root this.segments.unshift(remaining); } } // Array else { // Must not be empty assert_1.default(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); // Each segment for (let i = 0; i < itemPath.length; i++) { let segment = itemPath[i]; // Must not be empty assert_1.default(segment, `Parameter 'itemPath' must not contain any empty segments`); // Normalize slashes segment = pathHelper.normalizeSeparators(itemPath[i]); // Root segment if (i === 0 && pathHelper.hasRoot(segment)) { segment = pathHelper.safeTrimTrailingSeparator(segment); assert_1.default(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); this.segments.push(segment); } // All other segments else { // Must not contain slash assert_1.default(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); this.segments.push(segment); } } } } /** * Converts the path to it's string representation */ toString() { // First segment let result = this.segments[0]; // All others let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); for (let i = 1; i < this.segments.length; i++) { if (skipSlash) { skipSlash = false; } else { result += path.sep; } result += this.segments[i]; } return result; } } exports.Path = Path; //# sourceMappingURL=internal-path.js.map /***/ }), /***/ 40080: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.partialMatch = exports.match = exports.getSearchPaths = void 0; const pathHelper = __importStar(__nccwpck_require__(78195)); const internal_match_kind_1 = __nccwpck_require__(57307); const IS_WINDOWS = process.platform === 'win32'; /** * Given an array of patterns, returns an array of paths to search. * Duplicates and paths under other included paths are filtered out. */ function getSearchPaths(patterns) { // Ignore negate patterns patterns = patterns.filter(x => !x.negate); // Create a map of all search paths const searchPathMap = {}; for (const pattern of patterns) { const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; searchPathMap[key] = 'candidate'; } const result = []; for (const pattern of patterns) { // Check if already included const key = IS_WINDOWS ? pattern.searchPath.toUpperCase() : pattern.searchPath; if (searchPathMap[key] === 'included') { continue; } // Check for an ancestor search path let foundAncestor = false; let tempKey = key; let parent = pathHelper.dirname(tempKey); while (parent !== tempKey) { if (searchPathMap[parent]) { foundAncestor = true; break; } tempKey = parent; parent = pathHelper.dirname(tempKey); } // Include the search pattern in the result if (!foundAncestor) { result.push(pattern.searchPath); searchPathMap[key] = 'included'; } } return result; } exports.getSearchPaths = getSearchPaths; /** * Matches the patterns against the path */ function match(patterns, itemPath) { let result = internal_match_kind_1.MatchKind.None; for (const pattern of patterns) { if (pattern.negate) { result &= ~pattern.match(itemPath); } else { result |= pattern.match(itemPath); } } return result; } exports.match = match; /** * Checks whether to descend further into the directory */ function partialMatch(patterns, itemPath) { return patterns.some(x => !x.negate && x.partialMatch(itemPath)); } exports.partialMatch = partialMatch; //# sourceMappingURL=internal-pattern-helper.js.map /***/ }), /***/ 32607: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Pattern = void 0; const os = __importStar(__nccwpck_require__(70857)); const path = __importStar(__nccwpck_require__(16928)); const pathHelper = __importStar(__nccwpck_require__(78195)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const minimatch_1 = __nccwpck_require__(43772); const internal_match_kind_1 = __nccwpck_require__(57307); const internal_path_1 = __nccwpck_require__(75106); const IS_WINDOWS = process.platform === 'win32'; class Pattern { constructor(patternOrNegate, isImplicitPattern = false, segments, homedir) { /** * Indicates whether matches should be excluded from the result set */ this.negate = false; // Pattern overload let pattern; if (typeof patternOrNegate === 'string') { pattern = patternOrNegate.trim(); } // Segments overload else { // Convert to pattern segments = segments || []; assert_1.default(segments.length, `Parameter 'segments' must not empty`); const root = Pattern.getLiteral(segments[0]); assert_1.default(root && pathHelper.hasAbsoluteRoot(root), `Parameter 'segments' first element must be a root path`); pattern = new internal_path_1.Path(segments).toString().trim(); if (patternOrNegate) { pattern = `!${pattern}`; } } // Negate while (pattern.startsWith('!')) { this.negate = !this.negate; pattern = pattern.substr(1).trim(); } // Normalize slashes and ensures absolute root pattern = Pattern.fixupPattern(pattern, homedir); // Segments this.segments = new internal_path_1.Path(pattern).segments; // Trailing slash indicates the pattern should only match directories, not regular files this.trailingSeparator = pathHelper .normalizeSeparators(pattern) .endsWith(path.sep); pattern = pathHelper.safeTrimTrailingSeparator(pattern); // Search path (literal path prior to the first glob segment) let foundGlob = false; const searchSegments = this.segments .map(x => Pattern.getLiteral(x)) .filter(x => !foundGlob && !(foundGlob = x === '')); this.searchPath = new internal_path_1.Path(searchSegments).toString(); // Root RegExp (required when determining partial match) this.rootRegExp = new RegExp(Pattern.regExpEscape(searchSegments[0]), IS_WINDOWS ? 'i' : ''); this.isImplicitPattern = isImplicitPattern; // Create minimatch const minimatchOptions = { dot: true, nobrace: true, nocase: IS_WINDOWS, nocomment: true, noext: true, nonegate: true }; pattern = IS_WINDOWS ? pattern.replace(/\\/g, '/') : pattern; this.minimatch = new minimatch_1.Minimatch(pattern, minimatchOptions); } /** * Matches the pattern against the specified path */ match(itemPath) { // Last segment is globstar? if (this.segments[this.segments.length - 1] === '**') { // Normalize slashes itemPath = pathHelper.normalizeSeparators(itemPath); // Append a trailing slash. Otherwise Minimatch will not match the directory immediately // preceding the globstar. For example, given the pattern `/foo/**`, Minimatch returns // false for `/foo` but returns true for `/foo/`. Append a trailing slash to handle that quirk. if (!itemPath.endsWith(path.sep) && this.isImplicitPattern === false) { // Note, this is safe because the constructor ensures the pattern has an absolute root. // For example, formats like C: and C:foo on Windows are resolved to an absolute root. itemPath = `${itemPath}${path.sep}`; } } else { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); } // Match if (this.minimatch.match(itemPath)) { return this.trailingSeparator ? internal_match_kind_1.MatchKind.Directory : internal_match_kind_1.MatchKind.All; } return internal_match_kind_1.MatchKind.None; } /** * Indicates whether the pattern may match descendants of the specified path */ partialMatch(itemPath) { // Normalize slashes and trim unnecessary trailing slash itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); // matchOne does not handle root path correctly if (pathHelper.dirname(itemPath) === itemPath) { return this.rootRegExp.test(itemPath); } return this.minimatch.matchOne(itemPath.split(IS_WINDOWS ? /\\+/ : /\/+/), this.minimatch.set[0], true); } /** * Escapes glob patterns within a path */ static globEscape(s) { return (IS_WINDOWS ? s : s.replace(/\\/g, '\\\\')) // escape '\' on Linux/macOS .replace(/(\[)(?=[^/]+\])/g, '[[]') // escape '[' when ']' follows within the path segment .replace(/\?/g, '[?]') // escape '?' .replace(/\*/g, '[*]'); // escape '*' } /** * Normalizes slashes and ensures absolute root */ static fixupPattern(pattern, homedir) { // Empty assert_1.default(pattern, 'pattern cannot be empty'); // Must not contain `.` segment, unless first segment // Must not contain `..` segment const literalSegments = new internal_path_1.Path(pattern).segments.map(x => Pattern.getLiteral(x)); assert_1.default(literalSegments.every((x, i) => (x !== '.' || i === 0) && x !== '..'), `Invalid pattern '${pattern}'. Relative pathing '.' and '..' is not allowed.`); // Must not contain globs in root, e.g. Windows UNC path \\foo\b*r assert_1.default(!pathHelper.hasRoot(pattern) || literalSegments[0], `Invalid pattern '${pattern}'. Root segment must not contain globs.`); // Normalize slashes pattern = pathHelper.normalizeSeparators(pattern); // Replace leading `.` segment if (pattern === '.' || pattern.startsWith(`.${path.sep}`)) { pattern = Pattern.globEscape(process.cwd()) + pattern.substr(1); } // Replace leading `~` segment else if (pattern === '~' || pattern.startsWith(`~${path.sep}`)) { homedir = homedir || os.homedir(); assert_1.default(homedir, 'Unable to determine HOME directory'); assert_1.default(pathHelper.hasAbsoluteRoot(homedir), `Expected HOME directory to be a rooted path. Actual '${homedir}'`); pattern = Pattern.globEscape(homedir) + pattern.substr(1); } // Replace relative drive root, e.g. pattern is C: or C:foo else if (IS_WINDOWS && (pattern.match(/^[A-Z]:$/i) || pattern.match(/^[A-Z]:[^\\]/i))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', pattern.substr(0, 2)); if (pattern.length > 2 && !root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(2); } // Replace relative root, e.g. pattern is \ or \foo else if (IS_WINDOWS && (pattern === '\\' || pattern.match(/^\\[^\\]/))) { let root = pathHelper.ensureAbsoluteRoot('C:\\dummy-root', '\\'); if (!root.endsWith('\\')) { root += '\\'; } pattern = Pattern.globEscape(root) + pattern.substr(1); } // Otherwise ensure absolute root else { pattern = pathHelper.ensureAbsoluteRoot(Pattern.globEscape(process.cwd()), pattern); } return pathHelper.normalizeSeparators(pattern); } /** * Attempts to unescape a pattern segment to create a literal path segment. * Otherwise returns empty string. */ static getLiteral(segment) { let literal = ''; for (let i = 0; i < segment.length; i++) { const c = segment[i]; // Escape if (c === '\\' && !IS_WINDOWS && i + 1 < segment.length) { literal += segment[++i]; continue; } // Wildcard else if (c === '*' || c === '?') { return ''; } // Character set else if (c === '[' && i + 1 < segment.length) { let set = ''; let closed = -1; for (let i2 = i + 1; i2 < segment.length; i2++) { const c2 = segment[i2]; // Escape if (c2 === '\\' && !IS_WINDOWS && i2 + 1 < segment.length) { set += segment[++i2]; continue; } // Closed else if (c2 === ']') { closed = i2; break; } // Otherwise else { set += c2; } } // Closed? if (closed >= 0) { // Cannot convert if (set.length > 1) { return ''; } // Convert to literal if (set) { literal += set; i = closed; continue; } } // Otherwise fall thru } // Append literal += c; } return literal; } /** * Escapes regexp special characters * https://javascript.info/regexp-escaping */ static regExpEscape(s) { return s.replace(/[[\\^$.|?*+()]/g, '\\$&'); } } exports.Pattern = Pattern; //# sourceMappingURL=internal-pattern.js.map /***/ }), /***/ 58633: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SearchState = void 0; class SearchState { constructor(path, level) { this.path = path; this.level = level; } } exports.SearchState = SearchState; //# sourceMappingURL=internal-search-state.js.map /***/ }), /***/ 68110: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. /// const listenersMap = new WeakMap(); const abortedMap = new WeakMap(); /** * An aborter instance implements AbortSignal interface, can abort HTTP requests. * * - Call AbortSignal.none to create a new AbortSignal instance that cannot be cancelled. * Use `AbortSignal.none` when you are required to pass a cancellation token but the operation * cannot or will not ever be cancelled. * * @example * Abort without timeout * ```ts * await doAsyncWork(AbortSignal.none); * ``` */ class AbortSignal { constructor() { /** * onabort event listener. */ this.onabort = null; listenersMap.set(this, []); abortedMap.set(this, false); } /** * Status of whether aborted or not. * * @readonly */ get aborted() { if (!abortedMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } return abortedMap.get(this); } /** * Creates a new AbortSignal instance that will never be aborted. * * @readonly */ static get none() { return new AbortSignal(); } /** * Added new "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be added */ addEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } const listeners = listenersMap.get(this); listeners.push(listener); } /** * Remove "abort" event listener, only support "abort" event. * * @param _type - Only support "abort" event * @param listener - The listener to be removed */ removeEventListener( // tslint:disable-next-line:variable-name _type, listener) { if (!listenersMap.has(this)) { throw new TypeError("Expected `this` to be an instance of AbortSignal."); } const listeners = listenersMap.get(this); const index = listeners.indexOf(listener); if (index > -1) { listeners.splice(index, 1); } } /** * Dispatches a synthetic event to the AbortSignal. */ dispatchEvent(_event) { throw new Error("This is a stub dispatchEvent implementation that should not be used. It only exists for type-checking purposes."); } } /** * Helper to trigger an abort event immediately, the onabort and all abort event listeners will be triggered. * Will try to trigger abort event for all linked AbortSignal nodes. * * - If there is a timeout, the timer will be cancelled. * - If aborted is true, nothing will happen. * * @internal */ // eslint-disable-next-line @azure/azure-sdk/ts-use-interface-parameters function abortSignal(signal) { if (signal.aborted) { return; } if (signal.onabort) { signal.onabort.call(signal); } const listeners = listenersMap.get(signal); if (listeners) { // Create a copy of listeners so mutations to the array // (e.g. via removeListener calls) don't affect the listeners // we invoke. listeners.slice().forEach((listener) => { listener.call(signal, { type: "abort" }); }); } abortedMap.set(signal, true); } // Copyright (c) Microsoft Corporation. /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ class AbortError extends Error { constructor(message) { super(message); this.name = "AbortError"; } } /** * An AbortController provides an AbortSignal and the associated controls to signal * that an asynchronous operation should be aborted. * * @example * Abort an operation when another event fires * ```ts * const controller = new AbortController(); * const signal = controller.signal; * doAsyncWork(signal); * button.addEventListener('click', () => controller.abort()); * ``` * * @example * Share aborter cross multiple operations in 30s * ```ts * // Upload the same data to 2 different data centers at the same time, * // abort another when any of them is finished * const controller = AbortController.withTimeout(30 * 1000); * doAsyncWork(controller.signal).then(controller.abort); * doAsyncWork(controller.signal).then(controller.abort); *``` * * @example * Cascaded aborting * ```ts * // All operations can't take more than 30 seconds * const aborter = Aborter.timeout(30 * 1000); * * // Following 2 operations can't take more than 25 seconds * await doAsyncWork(aborter.withTimeout(25 * 1000)); * await doAsyncWork(aborter.withTimeout(25 * 1000)); * ``` */ class AbortController { // eslint-disable-next-line @typescript-eslint/explicit-module-boundary-types constructor(parentSignals) { this._signal = new AbortSignal(); if (!parentSignals) { return; } // coerce parentSignals into an array if (!Array.isArray(parentSignals)) { // eslint-disable-next-line prefer-rest-params parentSignals = arguments; } for (const parentSignal of parentSignals) { // if the parent signal has already had abort() called, // then call abort on this signal as well. if (parentSignal.aborted) { this.abort(); } else { // when the parent signal aborts, this signal should as well. parentSignal.addEventListener("abort", () => { this.abort(); }); } } } /** * The AbortSignal associated with this controller that will signal aborted * when the abort method is called on this controller. * * @readonly */ get signal() { return this._signal; } /** * Signal that any operations passed this controller's associated abort signal * to cancel any remaining work and throw an `AbortError`. */ abort() { abortSignal(this._signal); } /** * Creates a new AbortSignal instance that will abort after the provided ms. * @param ms - Elapsed time in milliseconds to trigger an abort. */ static timeout(ms) { const signal = new AbortSignal(); const timer = setTimeout(abortSignal, ms, signal); // Prevent the active Timer from keeping the Node.js event loop active. if (typeof timer.unref === "function") { timer.unref(); } return signal; } } exports.AbortController = AbortController; exports.AbortError = AbortError; exports.AbortSignal = AbortSignal; //# sourceMappingURL=index.js.map /***/ }), /***/ 1012: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); var coreRestPipeline = __nccwpck_require__(20778); var tslib = __nccwpck_require__(61860); var coreAuth = __nccwpck_require__(50417); var coreUtil = __nccwpck_require__(87779); var coreHttpCompat = __nccwpck_require__(61584); var coreClient = __nccwpck_require__(60160); var coreXml = __nccwpck_require__(78756); var logger$1 = __nccwpck_require__(26515); var abortController = __nccwpck_require__(24517); var crypto = __nccwpck_require__(76982); var coreTracing = __nccwpck_require__(20623); var stream = __nccwpck_require__(2203); var coreLro = __nccwpck_require__(91754); var events = __nccwpck_require__(24434); var fs = __nccwpck_require__(79896); var util = __nccwpck_require__(39023); var buffer = __nccwpck_require__(20181); function _interopNamespaceDefault(e) { var n = Object.create(null); if (e) { Object.keys(e).forEach(function (k) { if (k !== 'default') { var d = Object.getOwnPropertyDescriptor(e, k); Object.defineProperty(n, k, d.get ? d : { enumerable: true, get: function () { return e[k]; } }); } }); } n.default = e; return Object.freeze(n); } var coreHttpCompat__namespace = /*#__PURE__*/_interopNamespaceDefault(coreHttpCompat); var coreClient__namespace = /*#__PURE__*/_interopNamespaceDefault(coreClient); var fs__namespace = /*#__PURE__*/_interopNamespaceDefault(fs); var util__namespace = /*#__PURE__*/_interopNamespaceDefault(util); // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * The `@azure/logger` configuration for this package. */ const logger = logger$1.createClientLogger("storage-blob"); // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * The base class from which all request policies derive. */ class BaseRequestPolicy { /** * The main method to implement that manipulates a request/response. */ constructor( /** * The next policy in the pipeline. Each policy is responsible for executing the next one if the request is to continue through the pipeline. */ _nextPolicy, /** * The options that can be passed to a given request policy. */ _options) { this._nextPolicy = _nextPolicy; this._options = _options; } /** * Get whether or not a log with the provided log level should be logged. * @param logLevel - The log level of the log that will be logged. * @returns Whether or not a log with the provided log level should be logged. */ shouldLog(logLevel) { return this._options.shouldLog(logLevel); } /** * Attempt to log the provided message to the provided logger. If no logger was provided or if * the log level does not meat the logger's threshold, then nothing will be logged. * @param logLevel - The log level of this log. * @param message - The message of this log. */ log(logLevel, message) { this._options.log(logLevel, message); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. const SDK_VERSION = "12.26.0"; const SERVICE_VERSION = "2025-01-05"; const BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES = 256 * 1024 * 1024; // 256MB const BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES = 4000 * 1024 * 1024; // 4000MB const BLOCK_BLOB_MAX_BLOCKS = 50000; const DEFAULT_BLOCK_BUFFER_SIZE_BYTES = 8 * 1024 * 1024; // 8MB const DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES = 4 * 1024 * 1024; // 4MB const DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS = 5; const REQUEST_TIMEOUT = 100 * 1000; // In ms /** * The OAuth scope to use with Azure Storage. */ const StorageOAuthScopes = "https://storage.azure.com/.default"; const URLConstants = { Parameters: { FORCE_BROWSER_NO_CACHE: "_", SIGNATURE: "sig", SNAPSHOT: "snapshot", VERSIONID: "versionid", TIMEOUT: "timeout", }, }; const HTTPURLConnection = { HTTP_ACCEPTED: 202, HTTP_CONFLICT: 409, HTTP_NOT_FOUND: 404, HTTP_PRECON_FAILED: 412, HTTP_RANGE_NOT_SATISFIABLE: 416, }; const HeaderConstants = { AUTHORIZATION: "Authorization", AUTHORIZATION_SCHEME: "Bearer", CONTENT_ENCODING: "Content-Encoding", CONTENT_ID: "Content-ID", CONTENT_LANGUAGE: "Content-Language", CONTENT_LENGTH: "Content-Length", CONTENT_MD5: "Content-Md5", CONTENT_TRANSFER_ENCODING: "Content-Transfer-Encoding", CONTENT_TYPE: "Content-Type", COOKIE: "Cookie", DATE: "date", IF_MATCH: "if-match", IF_MODIFIED_SINCE: "if-modified-since", IF_NONE_MATCH: "if-none-match", IF_UNMODIFIED_SINCE: "if-unmodified-since", PREFIX_FOR_STORAGE: "x-ms-", RANGE: "Range", USER_AGENT: "User-Agent", X_MS_CLIENT_REQUEST_ID: "x-ms-client-request-id", X_MS_COPY_SOURCE: "x-ms-copy-source", X_MS_DATE: "x-ms-date", X_MS_ERROR_CODE: "x-ms-error-code", X_MS_VERSION: "x-ms-version", X_MS_CopySourceErrorCode: "x-ms-copy-source-error-code", }; const ETagNone = ""; const ETagAny = "*"; const SIZE_1_MB = 1 * 1024 * 1024; const BATCH_MAX_REQUEST = 256; const BATCH_MAX_PAYLOAD_IN_BYTES = 4 * SIZE_1_MB; const HTTP_LINE_ENDING = "\r\n"; const HTTP_VERSION_1_1 = "HTTP/1.1"; const EncryptionAlgorithmAES25 = "AES256"; const DevelopmentConnectionString = `DefaultEndpointsProtocol=http;AccountName=devstoreaccount1;AccountKey=Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==;BlobEndpoint=http://127.0.0.1:10000/devstoreaccount1;`; const StorageBlobLoggingAllowedHeaderNames = [ "Access-Control-Allow-Origin", "Cache-Control", "Content-Length", "Content-Type", "Date", "Request-Id", "traceparent", "Transfer-Encoding", "User-Agent", "x-ms-client-request-id", "x-ms-date", "x-ms-error-code", "x-ms-request-id", "x-ms-return-client-request-id", "x-ms-version", "Accept-Ranges", "Content-Disposition", "Content-Encoding", "Content-Language", "Content-MD5", "Content-Range", "ETag", "Last-Modified", "Server", "Vary", "x-ms-content-crc64", "x-ms-copy-action", "x-ms-copy-completion-time", "x-ms-copy-id", "x-ms-copy-progress", "x-ms-copy-status", "x-ms-has-immutability-policy", "x-ms-has-legal-hold", "x-ms-lease-state", "x-ms-lease-status", "x-ms-range", "x-ms-request-server-encrypted", "x-ms-server-encrypted", "x-ms-snapshot", "x-ms-source-range", "If-Match", "If-Modified-Since", "If-None-Match", "If-Unmodified-Since", "x-ms-access-tier", "x-ms-access-tier-change-time", "x-ms-access-tier-inferred", "x-ms-account-kind", "x-ms-archive-status", "x-ms-blob-append-offset", "x-ms-blob-cache-control", "x-ms-blob-committed-block-count", "x-ms-blob-condition-appendpos", "x-ms-blob-condition-maxsize", "x-ms-blob-content-disposition", "x-ms-blob-content-encoding", "x-ms-blob-content-language", "x-ms-blob-content-length", "x-ms-blob-content-md5", "x-ms-blob-content-type", "x-ms-blob-public-access", "x-ms-blob-sequence-number", "x-ms-blob-type", "x-ms-copy-destination-snapshot", "x-ms-creation-time", "x-ms-default-encryption-scope", "x-ms-delete-snapshots", "x-ms-delete-type-permanent", "x-ms-deny-encryption-scope-override", "x-ms-encryption-algorithm", "x-ms-if-sequence-number-eq", "x-ms-if-sequence-number-le", "x-ms-if-sequence-number-lt", "x-ms-incremental-copy", "x-ms-lease-action", "x-ms-lease-break-period", "x-ms-lease-duration", "x-ms-lease-id", "x-ms-lease-time", "x-ms-page-write", "x-ms-proposed-lease-id", "x-ms-range-get-content-md5", "x-ms-rehydrate-priority", "x-ms-sequence-number-action", "x-ms-sku-name", "x-ms-source-content-md5", "x-ms-source-if-match", "x-ms-source-if-modified-since", "x-ms-source-if-none-match", "x-ms-source-if-unmodified-since", "x-ms-tag-count", "x-ms-encryption-key-sha256", "x-ms-copy-source-error-code", "x-ms-copy-source-status-code", "x-ms-if-tags", "x-ms-source-if-tags", ]; const StorageBlobLoggingAllowedQueryParameters = [ "comp", "maxresults", "rscc", "rscd", "rsce", "rscl", "rsct", "se", "si", "sip", "sp", "spr", "sr", "srt", "ss", "st", "sv", "include", "marker", "prefix", "copyid", "restype", "blockid", "blocklisttype", "delimiter", "prevsnapshot", "ske", "skoid", "sks", "skt", "sktid", "skv", "snapshot", ]; const BlobUsesCustomerSpecifiedEncryptionMsg = "BlobUsesCustomerSpecifiedEncryption"; const BlobDoesNotUseCustomerSpecifiedEncryption = "BlobDoesNotUseCustomerSpecifiedEncryption"; /// List of ports used for path style addressing. /// Path style addressing means that storage account is put in URI's Path segment in instead of in host. const PathStylePorts = [ "10000", "10001", "10002", "10003", "10004", "10100", "10101", "10102", "10103", "10104", "11000", "11001", "11002", "11003", "11004", "11100", "11101", "11102", "11103", "11104", ]; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Reserved URL characters must be properly escaped for Storage services like Blob or File. * * ## URL encode and escape strategy for JS SDKs * * When customers pass a URL string into XxxClient classes constructor, the URL string may already be URL encoded or not. * But before sending to Azure Storage server, the URL must be encoded. However, it's hard for a SDK to guess whether the URL * string has been encoded or not. We have 2 potential strategies, and chose strategy two for the XxxClient constructors. * * ### Strategy One: Assume the customer URL string is not encoded, and always encode URL string in SDK. * * This is what legacy V2 SDK does, simple and works for most of the cases. * - When customer URL string is "http://account.blob.core.windows.net/con/b:", * SDK will encode it to "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", * SDK will encode it to "http://account.blob.core.windows.net/con/b%253A" and send to server. A blob named "b%3A" will be created. * * But this strategy will make it not possible to create a blob with "?" in it's name. Because when customer URL string is * "http://account.blob.core.windows.net/con/blob?name", the "?name" will be treated as URL paramter instead of blob name. * If customer URL string is "http://account.blob.core.windows.net/con/blob%3Fname", a blob named "blob%3Fname" will be created. * V2 SDK doesn't have this issue because it doesn't allow customer pass in a full URL, it accepts a separate blob name and encodeURIComponent for it. * We cannot accept a SDK cannot create a blob name with "?". So we implement strategy two: * * ### Strategy Two: SDK doesn't assume the URL has been encoded or not. It will just escape the special characters. * * This is what V10 Blob Go SDK does. It accepts a URL type in Go, and call url.EscapedPath() to escape the special chars unescaped. * - When customer URL string is "http://account.blob.core.windows.net/con/b:", * SDK will escape ":" like "http://account.blob.core.windows.net/con/b%3A" and send to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%3A", * There is no special characters, so send "http://account.blob.core.windows.net/con/b%3A" to server. A blob named "b:" will be created. * - When customer URL string is "http://account.blob.core.windows.net/con/b%253A", * There is no special characters, so send "http://account.blob.core.windows.net/con/b%253A" to server. A blob named "b%3A" will be created. * * This strategy gives us flexibility to create with any special characters. But "%" will be treated as a special characters, if the URL string * is not encoded, there shouldn't a "%" in the URL string, otherwise the URL is not a valid URL. * If customer needs to create a blob with "%" in it's blob name, use "%25" instead of "%". Just like above 3rd sample. * And following URL strings are invalid: * - "http://account.blob.core.windows.net/con/b%" * - "http://account.blob.core.windows.net/con/b%2" * - "http://account.blob.core.windows.net/con/b%G" * * Another special character is "?", use "%2F" to represent a blob name with "?" in a URL string. * * ### Strategy for containerName, blobName or other specific XXXName parameters in methods such as `containerClient.getBlobClient(blobName)` * * We will apply strategy one, and call encodeURIComponent for these parameters like blobName. Because what customers passes in is a plain name instead of a URL. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * @see https://docs.microsoft.com/en-us/rest/api/storageservices/naming-and-referencing-shares--directories--files--and-metadata * * @param url - */ function escapeURLPath(url) { const urlParsed = new URL(url); let path = urlParsed.pathname; path = path || "/"; path = escape(path); urlParsed.pathname = path; return urlParsed.toString(); } function getProxyUriFromDevConnString(connectionString) { // Development Connection String // https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string#connect-to-the-emulator-account-using-the-well-known-account-name-and-key let proxyUri = ""; if (connectionString.search("DevelopmentStorageProxyUri=") !== -1) { // CONNECTION_STRING=UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://myProxyUri const matchCredentials = connectionString.split(";"); for (const element of matchCredentials) { if (element.trim().startsWith("DevelopmentStorageProxyUri=")) { proxyUri = element.trim().match("DevelopmentStorageProxyUri=(.*)")[1]; } } } return proxyUri; } function getValueInConnString(connectionString, argument) { const elements = connectionString.split(";"); for (const element of elements) { if (element.trim().startsWith(argument)) { return element.trim().match(argument + "=(.*)")[1]; } } return ""; } /** * Extracts the parts of an Azure Storage account connection string. * * @param connectionString - Connection string. * @returns String key value pairs of the storage account's url and credentials. */ function extractConnectionStringParts(connectionString) { let proxyUri = ""; if (connectionString.startsWith("UseDevelopmentStorage=true")) { // Development connection string proxyUri = getProxyUriFromDevConnString(connectionString); connectionString = DevelopmentConnectionString; } // Matching BlobEndpoint in the Account connection string let blobEndpoint = getValueInConnString(connectionString, "BlobEndpoint"); // Slicing off '/' at the end if exists // (The methods that use `extractConnectionStringParts` expect the url to not have `/` at the end) blobEndpoint = blobEndpoint.endsWith("/") ? blobEndpoint.slice(0, -1) : blobEndpoint; if (connectionString.search("DefaultEndpointsProtocol=") !== -1 && connectionString.search("AccountKey=") !== -1) { // Account connection string let defaultEndpointsProtocol = ""; let accountName = ""; let accountKey = Buffer.from("accountKey", "base64"); let endpointSuffix = ""; // Get account name and key accountName = getValueInConnString(connectionString, "AccountName"); accountKey = Buffer.from(getValueInConnString(connectionString, "AccountKey"), "base64"); if (!blobEndpoint) { // BlobEndpoint is not present in the Account connection string // Can be obtained from `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}` defaultEndpointsProtocol = getValueInConnString(connectionString, "DefaultEndpointsProtocol"); const protocol = defaultEndpointsProtocol.toLowerCase(); if (protocol !== "https" && protocol !== "http") { throw new Error("Invalid DefaultEndpointsProtocol in the provided Connection String. Expecting 'https' or 'http'"); } endpointSuffix = getValueInConnString(connectionString, "EndpointSuffix"); if (!endpointSuffix) { throw new Error("Invalid EndpointSuffix in the provided Connection String"); } blobEndpoint = `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; } if (!accountName) { throw new Error("Invalid AccountName in the provided Connection String"); } else if (accountKey.length === 0) { throw new Error("Invalid AccountKey in the provided Connection String"); } return { kind: "AccountConnString", url: blobEndpoint, accountName, accountKey, proxyUri, }; } else { // SAS connection string let accountSas = getValueInConnString(connectionString, "SharedAccessSignature"); let accountName = getValueInConnString(connectionString, "AccountName"); // if accountName is empty, try to read it from BlobEndpoint if (!accountName) { accountName = getAccountNameFromUrl(blobEndpoint); } if (!blobEndpoint) { throw new Error("Invalid BlobEndpoint in the provided SAS Connection String"); } else if (!accountSas) { throw new Error("Invalid SharedAccessSignature in the provided SAS Connection String"); } // client constructors assume accountSas does *not* start with ? if (accountSas.startsWith("?")) { accountSas = accountSas.substring(1); } return { kind: "SASConnString", url: blobEndpoint, accountName, accountSas }; } } /** * Internal escape method implemented Strategy Two mentioned in escapeURL() description. * * @param text - */ function escape(text) { return encodeURIComponent(text) .replace(/%2F/g, "/") // Don't escape for "/" .replace(/'/g, "%27") // Escape for "'" .replace(/\+/g, "%20") .replace(/%25/g, "%"); // Revert encoded "%" } /** * Append a string to URL path. Will remove duplicated "/" in front of the string * when URL path ends with a "/". * * @param url - Source URL string * @param name - String to be appended to URL * @returns An updated URL string */ function appendToURLPath(url, name) { const urlParsed = new URL(url); let path = urlParsed.pathname; path = path ? (path.endsWith("/") ? `${path}${name}` : `${path}/${name}`) : name; urlParsed.pathname = path; return urlParsed.toString(); } /** * Set URL parameter name and value. If name exists in URL parameters, old value * will be replaced by name key. If not provide value, the parameter will be deleted. * * @param url - Source URL string * @param name - Parameter name * @param value - Parameter value * @returns An updated URL string */ function setURLParameter(url, name, value) { const urlParsed = new URL(url); const encodedName = encodeURIComponent(name); const encodedValue = value ? encodeURIComponent(value) : undefined; // mutating searchParams will change the encoding, so we have to do this ourselves const searchString = urlParsed.search === "" ? "?" : urlParsed.search; const searchPieces = []; for (const pair of searchString.slice(1).split("&")) { if (pair) { const [key] = pair.split("=", 2); if (key !== encodedName) { searchPieces.push(pair); } } } if (encodedValue) { searchPieces.push(`${encodedName}=${encodedValue}`); } urlParsed.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return urlParsed.toString(); } /** * Get URL parameter by name. * * @param url - * @param name - */ function getURLParameter(url, name) { var _a; const urlParsed = new URL(url); return (_a = urlParsed.searchParams.get(name)) !== null && _a !== void 0 ? _a : undefined; } /** * Set URL host. * * @param url - Source URL string * @param host - New host string * @returns An updated URL string */ function setURLHost(url, host) { const urlParsed = new URL(url); urlParsed.hostname = host; return urlParsed.toString(); } /** * Get URL path from an URL string. * * @param url - Source URL string */ function getURLPath(url) { try { const urlParsed = new URL(url); return urlParsed.pathname; } catch (e) { return undefined; } } /** * Get URL scheme from an URL string. * * @param url - Source URL string */ function getURLScheme(url) { try { const urlParsed = new URL(url); return urlParsed.protocol.endsWith(":") ? urlParsed.protocol.slice(0, -1) : urlParsed.protocol; } catch (e) { return undefined; } } /** * Get URL path and query from an URL string. * * @param url - Source URL string */ function getURLPathAndQuery(url) { const urlParsed = new URL(url); const pathString = urlParsed.pathname; if (!pathString) { throw new RangeError("Invalid url without valid path."); } let queryString = urlParsed.search || ""; queryString = queryString.trim(); if (queryString !== "") { queryString = queryString.startsWith("?") ? queryString : `?${queryString}`; // Ensure query string start with '?' } return `${pathString}${queryString}`; } /** * Get URL query key value pairs from an URL string. * * @param url - */ function getURLQueries(url) { let queryString = new URL(url).search; if (!queryString) { return {}; } queryString = queryString.trim(); queryString = queryString.startsWith("?") ? queryString.substring(1) : queryString; let querySubStrings = queryString.split("&"); querySubStrings = querySubStrings.filter((value) => { const indexOfEqual = value.indexOf("="); const lastIndexOfEqual = value.lastIndexOf("="); return (indexOfEqual > 0 && indexOfEqual === lastIndexOfEqual && lastIndexOfEqual < value.length - 1); }); const queries = {}; for (const querySubString of querySubStrings) { const splitResults = querySubString.split("="); const key = splitResults[0]; const value = splitResults[1]; queries[key] = value; } return queries; } /** * Append a string to URL query. * * @param url - Source URL string. * @param queryParts - String to be appended to the URL query. * @returns An updated URL string. */ function appendToURLQuery(url, queryParts) { const urlParsed = new URL(url); let query = urlParsed.search; if (query) { query += "&" + queryParts; } else { query = queryParts; } urlParsed.search = query; return urlParsed.toString(); } /** * Rounds a date off to seconds. * * @param date - * @param withMilliseconds - If true, YYYY-MM-DDThh:mm:ss.fffffffZ will be returned; * If false, YYYY-MM-DDThh:mm:ssZ will be returned. * @returns Date string in ISO8061 format, with or without 7 milliseconds component */ function truncatedISO8061Date(date, withMilliseconds = true) { // Date.toISOString() will return like "2018-10-29T06:34:36.139Z" const dateString = date.toISOString(); return withMilliseconds ? dateString.substring(0, dateString.length - 1) + "0000" + "Z" : dateString.substring(0, dateString.length - 5) + "Z"; } /** * Base64 encode. * * @param content - */ function base64encode(content) { return !coreUtil.isNode ? btoa(content) : Buffer.from(content).toString("base64"); } /** * Generate a 64 bytes base64 block ID string. * * @param blockIndex - */ function generateBlockID(blockIDPrefix, blockIndex) { // To generate a 64 bytes base64 string, source string should be 48 const maxSourceStringLength = 48; // A blob can have a maximum of 100,000 uncommitted blocks at any given time const maxBlockIndexLength = 6; const maxAllowedBlockIDPrefixLength = maxSourceStringLength - maxBlockIndexLength; if (blockIDPrefix.length > maxAllowedBlockIDPrefixLength) { blockIDPrefix = blockIDPrefix.slice(0, maxAllowedBlockIDPrefixLength); } const res = blockIDPrefix + padStart(blockIndex.toString(), maxSourceStringLength - blockIDPrefix.length, "0"); return base64encode(res); } /** * Delay specified time interval. * * @param timeInMs - * @param aborter - * @param abortError - */ async function delay(timeInMs, aborter, abortError) { return new Promise((resolve, reject) => { /* eslint-disable-next-line prefer-const */ let timeout; const abortHandler = () => { if (timeout !== undefined) { clearTimeout(timeout); } reject(abortError); }; const resolveHandler = () => { if (aborter !== undefined) { aborter.removeEventListener("abort", abortHandler); } resolve(); }; timeout = setTimeout(resolveHandler, timeInMs); if (aborter !== undefined) { aborter.addEventListener("abort", abortHandler); } }); } /** * String.prototype.padStart() * * @param currentString - * @param targetLength - * @param padString - */ function padStart(currentString, targetLength, padString = " ") { // @ts-expect-error: TS doesn't know this code needs to run downlevel sometimes if (String.prototype.padStart) { return currentString.padStart(targetLength, padString); } padString = padString || " "; if (currentString.length > targetLength) { return currentString; } else { targetLength = targetLength - currentString.length; if (targetLength > padString.length) { padString += padString.repeat(targetLength / padString.length); } return padString.slice(0, targetLength) + currentString; } } /** * If two strings are equal when compared case insensitive. * * @param str1 - * @param str2 - */ function iEqual(str1, str2) { return str1.toLocaleLowerCase() === str2.toLocaleLowerCase(); } /** * Extracts account name from the url * @param url - url to extract the account name from * @returns with the account name */ function getAccountNameFromUrl(url) { const parsedUrl = new URL(url); let accountName; try { if (parsedUrl.hostname.split(".")[1] === "blob") { // `${defaultEndpointsProtocol}://${accountName}.blob.${endpointSuffix}`; accountName = parsedUrl.hostname.split(".")[0]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/ // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/ // .getPath() -> /devstoreaccount1/ accountName = parsedUrl.pathname.split("/")[1]; } else { // Custom domain case: "https://customdomain.com/containername/blob". accountName = ""; } return accountName; } catch (error) { throw new Error("Unable to extract accountName with provided information."); } } function isIpEndpointStyle(parsedUrl) { const host = parsedUrl.host; // Case 1: Ipv6, use a broad regex to find out candidates whose host contains two ':'. // Case 2: localhost(:port) or host.docker.internal, use broad regex to match port part. // Case 3: Ipv4, use broad regex which just check if host contains Ipv4. // For valid host please refer to https://man7.org/linux/man-pages/man7/hostname.7.html. return (/^.*:.*:.*$|^(localhost|host.docker.internal)(:[0-9]+)?$|^(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])(\.(\d|[1-9]\d|1\d\d|2[0-4]\d|25[0-5])){3}(:[0-9]+)?$/.test(host) || (Boolean(parsedUrl.port) && PathStylePorts.includes(parsedUrl.port))); } /** * Convert Tags to encoded string. * * @param tags - */ function toBlobTagsString(tags) { if (tags === undefined) { return undefined; } const tagPairs = []; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; tagPairs.push(`${encodeURIComponent(key)}=${encodeURIComponent(value)}`); } } return tagPairs.join("&"); } /** * Convert Tags type to BlobTags. * * @param tags - */ function toBlobTags(tags) { if (tags === undefined) { return undefined; } const res = { blobTagSet: [], }; for (const key in tags) { if (Object.prototype.hasOwnProperty.call(tags, key)) { const value = tags[key]; res.blobTagSet.push({ key, value, }); } } return res; } /** * Covert BlobTags to Tags type. * * @param tags - */ function toTags(tags) { if (tags === undefined) { return undefined; } const res = {}; for (const blobTag of tags.blobTagSet) { res[blobTag.key] = blobTag.value; } return res; } /** * Convert BlobQueryTextConfiguration to QuerySerialization type. * * @param textConfiguration - */ function toQuerySerialization(textConfiguration) { if (textConfiguration === undefined) { return undefined; } switch (textConfiguration.kind) { case "csv": return { format: { type: "delimited", delimitedTextConfiguration: { columnSeparator: textConfiguration.columnSeparator || ",", fieldQuote: textConfiguration.fieldQuote || "", recordSeparator: textConfiguration.recordSeparator, escapeChar: textConfiguration.escapeCharacter || "", headersPresent: textConfiguration.hasHeaders || false, }, }, }; case "json": return { format: { type: "json", jsonTextConfiguration: { recordSeparator: textConfiguration.recordSeparator, }, }, }; case "arrow": return { format: { type: "arrow", arrowConfiguration: { schema: textConfiguration.schema, }, }, }; case "parquet": return { format: { type: "parquet", }, }; default: throw Error("Invalid BlobQueryTextConfiguration."); } } function parseObjectReplicationRecord(objectReplicationRecord) { if (!objectReplicationRecord) { return undefined; } if ("policy-id" in objectReplicationRecord) { // If the dictionary contains a key with policy id, we are not required to do any parsing since // the policy id should already be stored in the ObjectReplicationDestinationPolicyId. return undefined; } const orProperties = []; for (const key in objectReplicationRecord) { const ids = key.split("_"); const policyPrefix = "or-"; if (ids[0].startsWith(policyPrefix)) { ids[0] = ids[0].substring(policyPrefix.length); } const rule = { ruleId: ids[1], replicationStatus: objectReplicationRecord[key], }; const policyIndex = orProperties.findIndex((policy) => policy.policyId === ids[0]); if (policyIndex > -1) { orProperties[policyIndex].rules.push(rule); } else { orProperties.push({ policyId: ids[0], rules: [rule], }); } } return orProperties; } function httpAuthorizationToString(httpAuthorization) { return httpAuthorization ? httpAuthorization.scheme + " " + httpAuthorization.value : undefined; } function BlobNameToString(name) { if (name.encoded) { return decodeURIComponent(name.content); } else { return name.content; } } function ConvertInternalResponseOfListBlobFlat(internalResponse) { return Object.assign(Object.assign({}, internalResponse), { segment: { blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); return blobItem; }), } }); } function ConvertInternalResponseOfListBlobHierarchy(internalResponse) { var _a; return Object.assign(Object.assign({}, internalResponse), { segment: { blobPrefixes: (_a = internalResponse.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }), blobItems: internalResponse.segment.blobItems.map((blobItemInteral) => { const blobItem = Object.assign(Object.assign({}, blobItemInteral), { name: BlobNameToString(blobItemInteral.name) }); return blobItem; }), } }); } function* ExtractPageRangeInfoItems(getPageRangesSegment) { let pageRange = []; let clearRange = []; if (getPageRangesSegment.pageRange) pageRange = getPageRangesSegment.pageRange; if (getPageRangesSegment.clearRange) clearRange = getPageRangesSegment.clearRange; let pageRangeIndex = 0; let clearRangeIndex = 0; while (pageRangeIndex < pageRange.length && clearRangeIndex < clearRange.length) { if (pageRange[pageRangeIndex].start < clearRange[clearRangeIndex].start) { yield { start: pageRange[pageRangeIndex].start, end: pageRange[pageRangeIndex].end, isClear: false, }; ++pageRangeIndex; } else { yield { start: clearRange[clearRangeIndex].start, end: clearRange[clearRangeIndex].end, isClear: true, }; ++clearRangeIndex; } } for (; pageRangeIndex < pageRange.length; ++pageRangeIndex) { yield { start: pageRange[pageRangeIndex].start, end: pageRange[pageRangeIndex].end, isClear: false, }; } for (; clearRangeIndex < clearRange.length; ++clearRangeIndex) { yield { start: clearRange[clearRangeIndex].start, end: clearRange[clearRangeIndex].end, isClear: true, }; } } /** * Escape the blobName but keep path separator ('/'). */ function EscapePath(blobName) { const split = blobName.split("/"); for (let i = 0; i < split.length; i++) { split[i] = encodeURIComponent(split[i]); } return split.join("/"); } /** * A typesafe helper for ensuring that a given response object has * the original _response attached. * @param response - A response object from calling a client operation * @returns The same object, but with known _response property */ function assertResponse(response) { if (`_response` in response) { return response; } throw new TypeError(`Unexpected response object ${response}`); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * RetryPolicy types. */ exports.StorageRetryPolicyType = void 0; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. */ StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; /** * Linear retry. Retry time delay grows linearly. */ StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; })(exports.StorageRetryPolicyType || (exports.StorageRetryPolicyType = {})); // Default values of StorageRetryOptions const DEFAULT_RETRY_OPTIONS$1 = { maxRetryDelayInMs: 120 * 1000, maxTries: 4, retryDelayInMs: 4 * 1000, retryPolicyType: exports.StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const RETRY_ABORT_ERROR$1 = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ class StorageRetryPolicy extends BaseRequestPolicy { /** * Creates an instance of RetryPolicy. * * @param nextPolicy - * @param options - * @param retryOptions - */ constructor(nextPolicy, options, retryOptions = DEFAULT_RETRY_OPTIONS$1) { super(nextPolicy, options); // Initialize retry options this.retryOptions = { retryPolicyType: retryOptions.retryPolicyType ? retryOptions.retryPolicyType : DEFAULT_RETRY_OPTIONS$1.retryPolicyType, maxTries: retryOptions.maxTries && retryOptions.maxTries >= 1 ? Math.floor(retryOptions.maxTries) : DEFAULT_RETRY_OPTIONS$1.maxTries, tryTimeoutInMs: retryOptions.tryTimeoutInMs && retryOptions.tryTimeoutInMs >= 0 ? retryOptions.tryTimeoutInMs : DEFAULT_RETRY_OPTIONS$1.tryTimeoutInMs, retryDelayInMs: retryOptions.retryDelayInMs && retryOptions.retryDelayInMs >= 0 ? Math.min(retryOptions.retryDelayInMs, retryOptions.maxRetryDelayInMs ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs) : DEFAULT_RETRY_OPTIONS$1.retryDelayInMs, maxRetryDelayInMs: retryOptions.maxRetryDelayInMs && retryOptions.maxRetryDelayInMs >= 0 ? retryOptions.maxRetryDelayInMs : DEFAULT_RETRY_OPTIONS$1.maxRetryDelayInMs, secondaryHost: retryOptions.secondaryHost ? retryOptions.secondaryHost : DEFAULT_RETRY_OPTIONS$1.secondaryHost, }; } /** * Sends request. * * @param request - */ async sendRequest(request) { return this.attemptSendRequest(request, false, 1); } /** * Decide and perform next retry. Won't mutate request parameter. * * @param request - * @param secondaryHas404 - If attempt was against the secondary & it returned a StatusNotFound (404), then * the resource was not found. This may be due to replication delay. So, in this * case, we'll never try the secondary again for this operation. * @param attempt - How many retries has been attempted to performed, starting from 1, which includes * the attempt will be performed by this method call. */ async attemptSendRequest(request, secondaryHas404, attempt) { const newRequest = request.clone(); const isPrimaryRetry = secondaryHas404 || !this.retryOptions.secondaryHost || !(request.method === "GET" || request.method === "HEAD" || request.method === "OPTIONS") || attempt % 2 === 1; if (!isPrimaryRetry) { newRequest.url = setURLHost(newRequest.url, this.retryOptions.secondaryHost); } // Set the server-side timeout query parameter "timeout=[seconds]" if (this.retryOptions.tryTimeoutInMs) { newRequest.url = setURLParameter(newRequest.url, URLConstants.Parameters.TIMEOUT, Math.floor(this.retryOptions.tryTimeoutInMs / 1000).toString()); } let response; try { logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await this._nextPolicy.sendRequest(newRequest); if (!this.shouldRetry(isPrimaryRetry, attempt, response)) { return response; } secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); } catch (err) { logger.error(`RetryPolicy: Caught error, message: ${err.message}, code: ${err.code}`); if (!this.shouldRetry(isPrimaryRetry, attempt, response, err)) { throw err; } } await this.delay(isPrimaryRetry, attempt, request.abortSignal); return this.attemptSendRequest(request, secondaryHas404, ++attempt); } /** * Decide whether to retry according to last HTTP response and retry counters. * * @param isPrimaryRetry - * @param attempt - * @param response - * @param err - */ shouldRetry(isPrimaryRetry, attempt, response, err) { if (attempt >= this.retryOptions.maxTries) { logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${this.retryOptions .maxTries}, no further try.`); return false; } // Handle network failures, you may need to customize the list when you implement // your own http client const retriableErrors = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET", "ENOENT", "ENOTFOUND", "TIMEOUT", "EPIPE", "REQUEST_SEND_ERROR", // For default xhr based http client provided in ms-rest-js ]; if (err) { for (const retriableError of retriableErrors) { if (err.name.toUpperCase().includes(retriableError) || err.message.toUpperCase().includes(retriableError) || (err.code && err.code.toString().toUpperCase() === retriableError)) { logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } } } // If attempt was against the secondary & it returned a StatusNotFound (404), then // the resource was not found. This may be due to replication delay. So, in this // case, we'll never try the secondary again for this operation. if (response || err) { const statusCode = response ? response.status : err ? err.statusCode : 0; if (!isPrimaryRetry && statusCode === 404) { logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } // Server internal error or server timeout if (statusCode === 503 || statusCode === 500) { logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. // if (response) { // // Retry select Copy Source Error Codes. // if (response?.status >= 400) { // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); // if (copySourceError !== undefined) { // switch (copySourceError) { // case "InternalError": // case "OperationTimedOut": // case "ServerBusy": // return true; // } // } // } // } if ((err === null || err === void 0 ? void 0 : err.code) === "PARSE_ERROR" && (err === null || err === void 0 ? void 0 : err.message.startsWith(`Error "Error: Unclosed root tag`))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } return false; } /** * Delay a calculated time between retries. * * @param isPrimaryRetry - * @param attempt - * @param abortSignal - */ async delay(isPrimaryRetry, attempt, abortSignal) { let delayTimeInMs = 0; if (isPrimaryRetry) { switch (this.retryOptions.retryPolicyType) { case exports.StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * this.retryOptions.retryDelayInMs, this.retryOptions.maxRetryDelayInMs); break; case exports.StorageRetryPolicyType.FIXED: delayTimeInMs = this.retryOptions.retryDelayInMs; break; } } else { delayTimeInMs = Math.random() * 1000; } logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delay(delayTimeInMs, abortSignal, RETRY_ABORT_ERROR$1); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * StorageRetryPolicyFactory is a factory class helping generating {@link StorageRetryPolicy} objects. */ class StorageRetryPolicyFactory { /** * Creates an instance of StorageRetryPolicyFactory. * @param retryOptions - */ constructor(retryOptions) { this.retryOptions = retryOptions; } /** * Creates a StorageRetryPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageRetryPolicy(nextPolicy, options, this.retryOptions); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Credential policy used to sign HTTP(S) requests before sending. This is an * abstract class. */ class CredentialPolicy extends BaseRequestPolicy { /** * Sends out request. * * @param request - */ sendRequest(request) { return this._nextPolicy.sendRequest(this.signRequest(request)); } /** * Child classes must implement this method with request signing. This method * will be executed in {@link sendRequest}. * * @param request - */ signRequest(request) { // Child classes must override this method with request signing. This method // will be executed in sendRequest(). return request; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /* * We need to imitate .Net culture-aware sorting, which is used in storage service. * Below tables contain sort-keys for en-US culture. */ const table_lv0 = new Uint32Array([ 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x71c, 0x0, 0x71f, 0x721, 0x723, 0x725, 0x0, 0x0, 0x0, 0x72d, 0x803, 0x0, 0x0, 0x733, 0x0, 0xd03, 0xd1a, 0xd1c, 0xd1e, 0xd20, 0xd22, 0xd24, 0xd26, 0xd28, 0xd2a, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x0, 0x0, 0x743, 0x744, 0x748, 0xe02, 0xe09, 0xe0a, 0xe1a, 0xe21, 0xe23, 0xe25, 0xe2c, 0xe32, 0xe35, 0xe36, 0xe48, 0xe51, 0xe70, 0xe7c, 0xe7e, 0xe89, 0xe8a, 0xe91, 0xe99, 0xe9f, 0xea2, 0xea4, 0xea6, 0xea7, 0xea9, 0x0, 0x74c, 0x0, 0x750, 0x0, ]); const table_lv2 = new Uint32Array([ 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x12, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ]); const table_lv4 = new Uint32Array([ 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8012, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8212, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, 0x0, ]); function compareHeader(lhs, rhs) { if (isLessThan(lhs, rhs)) return -1; return 1; } function isLessThan(lhs, rhs) { const tables = [table_lv0, table_lv2, table_lv4]; let curr_level = 0; let i = 0; let j = 0; while (curr_level < tables.length) { if (curr_level === tables.length - 1 && i !== j) { return i > j; } const weight1 = i < lhs.length ? tables[curr_level][lhs[i].charCodeAt(0)] : 0x1; const weight2 = j < rhs.length ? tables[curr_level][rhs[j].charCodeAt(0)] : 0x1; if (weight1 === 0x1 && weight2 === 0x1) { i = 0; j = 0; ++curr_level; } else if (weight1 === weight2) { ++i; ++j; } else if (weight1 === 0) { ++i; } else if (weight2 === 0) { ++j; } else { return weight1 < weight2; } } return false; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * StorageSharedKeyCredentialPolicy is a policy used to sign HTTP request with a shared key. */ class StorageSharedKeyCredentialPolicy extends CredentialPolicy { /** * Creates an instance of StorageSharedKeyCredentialPolicy. * @param nextPolicy - * @param options - * @param factory - */ constructor(nextPolicy, options, factory) { super(nextPolicy, options); this.factory = factory; } /** * Signs request. * * @param request - */ signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && (typeof request.body === "string" || request.body !== undefined) && request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), this.getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), this.getHeaderValueToSign(request, HeaderConstants.DATE), this.getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), this.getHeaderValueToSign(request, HeaderConstants.IF_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), this.getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), this.getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + this.getCanonicalizedHeadersString(request) + this.getCanonicalizedResourceString(request); const signature = this.factory.computeHMACSHA256(stringToSign); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${this.factory.accountName}:${signature}`); // console.log(`[URL]:${request.url}`); // console.log(`[HEADERS]:${request.headers.toString()}`); // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); return request; } /** * Retrieve header value according to shared key sign rules. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key * * @param request - * @param headerName - */ getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) { return ""; } // When using version 2015-02-21 or later, if Content-Length is zero, then // set the Content-Length part of the StringToSign to an empty string. // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; } /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. * 2. Convert each HTTP header name to lowercase. * 3. Sort the headers lexicographically by header name, in ascending order. * Each header may appear only once in the string. * 4. Replace any linear whitespace in the header value with a single space. * 5. Trim any whitespace around the colon in the header. * 6. Finally, append a new-line character to each canonicalized header in the resulting list. * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. * * @param request - */ getCanonicalizedHeadersString(request) { let headersArray = request.headers.headersArray().filter((value) => { return value.name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE); }); headersArray.sort((a, b) => { return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); }); // Remove duplicate headers headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { return false; } return true; }); let canonicalizedHeadersStringToSign = ""; headersArray.forEach((header) => { canonicalizedHeadersStringToSign += `${header.name .toLowerCase() .trimRight()}:${header.value.trimLeft()}\n`; }); return canonicalizedHeadersStringToSign; } /** * Retrieves the webResource canonicalized resource string. * * @param request - */ getCanonicalizedResourceString(request) { const path = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${this.factory.accountName}${path}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; for (const key in queries) { if (Object.prototype.hasOwnProperty.call(queries, key)) { const lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } } queryKeys.sort(); for (const key of queryKeys) { canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } return canonicalizedResourceString; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Credential is an abstract class for Azure Storage HTTP requests signing. This * class will host an credentialPolicyCreator factory which generates CredentialPolicy. */ class Credential { /** * Creates a RequestPolicy object. * * @param _nextPolicy - * @param _options - */ create(_nextPolicy, _options) { throw new Error("Method should be implemented in children classes."); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * StorageSharedKeyCredential for account key authorization of Azure Storage service. */ class StorageSharedKeyCredential extends Credential { /** * Creates an instance of StorageSharedKeyCredential. * @param accountName - * @param accountKey - */ constructor(accountName, accountKey) { super(); this.accountName = accountName; this.accountKey = Buffer.from(accountKey, "base64"); } /** * Creates a StorageSharedKeyCredentialPolicy object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageSharedKeyCredentialPolicy(nextPolicy, options, this); } /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ computeHMACSHA256(stringToSign) { return crypto.createHmac("sha256", this.accountKey).update(stringToSign, "utf8").digest("base64"); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * AnonymousCredentialPolicy is used with HTTP(S) requests that read public resources * or for use with Shared Access Signatures (SAS). */ class AnonymousCredentialPolicy extends CredentialPolicy { /** * Creates an instance of AnonymousCredentialPolicy. * @param nextPolicy - * @param options - */ // The base class has a protected constructor. Adding a public one to enable constructing of this class. /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ constructor(nextPolicy, options) { super(nextPolicy, options); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * AnonymousCredential provides a credentialPolicyCreator member used to create * AnonymousCredentialPolicy objects. AnonymousCredentialPolicy is used with * HTTP(S) requests that read public resources or for use with Shared Access * Signatures (SAS). */ class AnonymousCredential extends Credential { /** * Creates an {@link AnonymousCredentialPolicy} object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new AnonymousCredentialPolicy(nextPolicy, options); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. let _defaultHttpClient; function getCachedDefaultHttpClient() { if (!_defaultHttpClient) { _defaultHttpClient = coreRestPipeline.createDefaultHttpClient(); } return _defaultHttpClient; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * The programmatic identifier of the StorageBrowserPolicy. */ const storageBrowserPolicyName = "storageBrowserPolicy"; /** * storageBrowserPolicy is a policy used to prevent browsers from caching requests * and to remove cookies and explicit content-length headers. */ function storageBrowserPolicy() { return { name: storageBrowserPolicyName, async sendRequest(request, next) { if (coreUtil.isNode) { return next(request); } if (request.method === "GET" || request.method === "HEAD") { request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); } request.headers.delete(HeaderConstants.COOKIE); // According to XHR standards, content-length should be fully controlled by browsers request.headers.delete(HeaderConstants.CONTENT_LENGTH); return next(request); }, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Name of the {@link storageRetryPolicy} */ const storageRetryPolicyName = "storageRetryPolicy"; /** * RetryPolicy types. */ var StorageRetryPolicyType; (function (StorageRetryPolicyType) { /** * Exponential retry. Retry time delay grows exponentially. */ StorageRetryPolicyType[StorageRetryPolicyType["EXPONENTIAL"] = 0] = "EXPONENTIAL"; /** * Linear retry. Retry time delay grows linearly. */ StorageRetryPolicyType[StorageRetryPolicyType["FIXED"] = 1] = "FIXED"; })(StorageRetryPolicyType || (StorageRetryPolicyType = {})); // Default values of StorageRetryOptions const DEFAULT_RETRY_OPTIONS = { maxRetryDelayInMs: 120 * 1000, maxTries: 4, retryDelayInMs: 4 * 1000, retryPolicyType: StorageRetryPolicyType.EXPONENTIAL, secondaryHost: "", tryTimeoutInMs: undefined, // Use server side default timeout strategy }; const retriableErrors = [ "ETIMEDOUT", "ESOCKETTIMEDOUT", "ECONNREFUSED", "ECONNRESET", "ENOENT", "ENOTFOUND", "TIMEOUT", "EPIPE", "REQUEST_SEND_ERROR", ]; const RETRY_ABORT_ERROR = new abortController.AbortError("The operation was aborted."); /** * Retry policy with exponential retry and linear retry implemented. */ function storageRetryPolicy(options = {}) { var _a, _b, _c, _d, _e, _f; const retryPolicyType = (_a = options.retryPolicyType) !== null && _a !== void 0 ? _a : DEFAULT_RETRY_OPTIONS.retryPolicyType; const maxTries = (_b = options.maxTries) !== null && _b !== void 0 ? _b : DEFAULT_RETRY_OPTIONS.maxTries; const retryDelayInMs = (_c = options.retryDelayInMs) !== null && _c !== void 0 ? _c : DEFAULT_RETRY_OPTIONS.retryDelayInMs; const maxRetryDelayInMs = (_d = options.maxRetryDelayInMs) !== null && _d !== void 0 ? _d : DEFAULT_RETRY_OPTIONS.maxRetryDelayInMs; const secondaryHost = (_e = options.secondaryHost) !== null && _e !== void 0 ? _e : DEFAULT_RETRY_OPTIONS.secondaryHost; const tryTimeoutInMs = (_f = options.tryTimeoutInMs) !== null && _f !== void 0 ? _f : DEFAULT_RETRY_OPTIONS.tryTimeoutInMs; function shouldRetry({ isPrimaryRetry, attempt, response, error, }) { var _a, _b; if (attempt >= maxTries) { logger.info(`RetryPolicy: Attempt(s) ${attempt} >= maxTries ${maxTries}, no further try.`); return false; } if (error) { for (const retriableError of retriableErrors) { if (error.name.toUpperCase().includes(retriableError) || error.message.toUpperCase().includes(retriableError) || (error.code && error.code.toString().toUpperCase() === retriableError)) { logger.info(`RetryPolicy: Network error ${retriableError} found, will retry.`); return true; } } if ((error === null || error === void 0 ? void 0 : error.code) === "PARSE_ERROR" && (error === null || error === void 0 ? void 0 : error.message.startsWith(`Error "Error: Unclosed root tag`))) { logger.info("RetryPolicy: Incomplete XML response likely due to service timeout, will retry."); return true; } } // If attempt was against the secondary & it returned a StatusNotFound (404), then // the resource was not found. This may be due to replication delay. So, in this // case, we'll never try the secondary again for this operation. if (response || error) { const statusCode = (_b = (_a = response === null || response === void 0 ? void 0 : response.status) !== null && _a !== void 0 ? _a : error === null || error === void 0 ? void 0 : error.statusCode) !== null && _b !== void 0 ? _b : 0; if (!isPrimaryRetry && statusCode === 404) { logger.info(`RetryPolicy: Secondary access with 404, will retry.`); return true; } // Server internal error or server timeout if (statusCode === 503 || statusCode === 500) { logger.info(`RetryPolicy: Will retry for status code ${statusCode}.`); return true; } } // [Copy source error code] Feature is pending on service side, skip retry on copy source error for now. // if (response) { // // Retry select Copy Source Error Codes. // if (response?.status >= 400) { // const copySourceError = response.headers.get(HeaderConstants.X_MS_CopySourceErrorCode); // if (copySourceError !== undefined) { // switch (copySourceError) { // case "InternalError": // case "OperationTimedOut": // case "ServerBusy": // return true; // } // } // } // } return false; } function calculateDelay(isPrimaryRetry, attempt) { let delayTimeInMs = 0; if (isPrimaryRetry) { switch (retryPolicyType) { case StorageRetryPolicyType.EXPONENTIAL: delayTimeInMs = Math.min((Math.pow(2, attempt - 1) - 1) * retryDelayInMs, maxRetryDelayInMs); break; case StorageRetryPolicyType.FIXED: delayTimeInMs = retryDelayInMs; break; } } else { delayTimeInMs = Math.random() * 1000; } logger.info(`RetryPolicy: Delay for ${delayTimeInMs}ms`); return delayTimeInMs; } return { name: storageRetryPolicyName, async sendRequest(request, next) { // Set the server-side timeout query parameter "timeout=[seconds]" if (tryTimeoutInMs) { request.url = setURLParameter(request.url, URLConstants.Parameters.TIMEOUT, String(Math.floor(tryTimeoutInMs / 1000))); } const primaryUrl = request.url; const secondaryUrl = secondaryHost ? setURLHost(request.url, secondaryHost) : undefined; let secondaryHas404 = false; let attempt = 1; let retryAgain = true; let response; let error; while (retryAgain) { const isPrimaryRetry = secondaryHas404 || !secondaryUrl || !["GET", "HEAD", "OPTIONS"].includes(request.method) || attempt % 2 === 1; request.url = isPrimaryRetry ? primaryUrl : secondaryUrl; response = undefined; error = undefined; try { logger.info(`RetryPolicy: =====> Try=${attempt} ${isPrimaryRetry ? "Primary" : "Secondary"}`); response = await next(request); secondaryHas404 = secondaryHas404 || (!isPrimaryRetry && response.status === 404); } catch (e) { if (coreRestPipeline.isRestError(e)) { logger.error(`RetryPolicy: Caught error, message: ${e.message}, code: ${e.code}`); error = e; } else { logger.error(`RetryPolicy: Caught error, message: ${coreUtil.getErrorMessage(e)}`); throw e; } } retryAgain = shouldRetry({ isPrimaryRetry, attempt, response, error }); if (retryAgain) { await delay(calculateDelay(isPrimaryRetry, attempt), request.abortSignal, RETRY_ABORT_ERROR); } attempt++; } if (response) { return response; } throw error !== null && error !== void 0 ? error : new coreRestPipeline.RestError("RetryPolicy failed without known error."); }, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * The programmatic identifier of the storageSharedKeyCredentialPolicy. */ const storageSharedKeyCredentialPolicyName = "storageSharedKeyCredentialPolicy"; /** * storageSharedKeyCredentialPolicy handles signing requests using storage account keys. */ function storageSharedKeyCredentialPolicy(options) { function signRequest(request) { request.headers.set(HeaderConstants.X_MS_DATE, new Date().toUTCString()); if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } const stringToSign = [ request.method.toUpperCase(), getHeaderValueToSign(request, HeaderConstants.CONTENT_LANGUAGE), getHeaderValueToSign(request, HeaderConstants.CONTENT_ENCODING), getHeaderValueToSign(request, HeaderConstants.CONTENT_LENGTH), getHeaderValueToSign(request, HeaderConstants.CONTENT_MD5), getHeaderValueToSign(request, HeaderConstants.CONTENT_TYPE), getHeaderValueToSign(request, HeaderConstants.DATE), getHeaderValueToSign(request, HeaderConstants.IF_MODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.IF_MATCH), getHeaderValueToSign(request, HeaderConstants.IF_NONE_MATCH), getHeaderValueToSign(request, HeaderConstants.IF_UNMODIFIED_SINCE), getHeaderValueToSign(request, HeaderConstants.RANGE), ].join("\n") + "\n" + getCanonicalizedHeadersString(request) + getCanonicalizedResourceString(request); const signature = crypto.createHmac("sha256", options.accountKey) .update(stringToSign, "utf8") .digest("base64"); request.headers.set(HeaderConstants.AUTHORIZATION, `SharedKey ${options.accountName}:${signature}`); // console.log(`[URL]:${request.url}`); // console.log(`[HEADERS]:${request.headers.toString()}`); // console.log(`[STRING TO SIGN]:${JSON.stringify(stringToSign)}`); // console.log(`[KEY]: ${request.headers.get(HeaderConstants.AUTHORIZATION)}`); } /** * Retrieve header value according to shared key sign rules. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key */ function getHeaderValueToSign(request, headerName) { const value = request.headers.get(headerName); if (!value) { return ""; } // When using version 2015-02-21 or later, if Content-Length is zero, then // set the Content-Length part of the StringToSign to an empty string. // https://docs.microsoft.com/en-us/rest/api/storageservices/authenticate-with-shared-key if (headerName === HeaderConstants.CONTENT_LENGTH && value === "0") { return ""; } return value; } /** * To construct the CanonicalizedHeaders portion of the signature string, follow these steps: * 1. Retrieve all headers for the resource that begin with x-ms-, including the x-ms-date header. * 2. Convert each HTTP header name to lowercase. * 3. Sort the headers lexicographically by header name, in ascending order. * Each header may appear only once in the string. * 4. Replace any linear whitespace in the header value with a single space. * 5. Trim any whitespace around the colon in the header. * 6. Finally, append a new-line character to each canonicalized header in the resulting list. * Construct the CanonicalizedHeaders string by concatenating all headers in this list into a single string. * */ function getCanonicalizedHeadersString(request) { let headersArray = []; for (const [name, value] of request.headers) { if (name.toLowerCase().startsWith(HeaderConstants.PREFIX_FOR_STORAGE)) { headersArray.push({ name, value }); } } headersArray.sort((a, b) => { return compareHeader(a.name.toLowerCase(), b.name.toLowerCase()); }); // Remove duplicate headers headersArray = headersArray.filter((value, index, array) => { if (index > 0 && value.name.toLowerCase() === array[index - 1].name.toLowerCase()) { return false; } return true; }); let canonicalizedHeadersStringToSign = ""; headersArray.forEach((header) => { canonicalizedHeadersStringToSign += `${header.name .toLowerCase() .trimRight()}:${header.value.trimLeft()}\n`; }); return canonicalizedHeadersStringToSign; } function getCanonicalizedResourceString(request) { const path = getURLPath(request.url) || "/"; let canonicalizedResourceString = ""; canonicalizedResourceString += `/${options.accountName}${path}`; const queries = getURLQueries(request.url); const lowercaseQueries = {}; if (queries) { const queryKeys = []; for (const key in queries) { if (Object.prototype.hasOwnProperty.call(queries, key)) { const lowercaseKey = key.toLowerCase(); lowercaseQueries[lowercaseKey] = queries[key]; queryKeys.push(lowercaseKey); } } queryKeys.sort(); for (const key of queryKeys) { canonicalizedResourceString += `\n${key}:${decodeURIComponent(lowercaseQueries[key])}`; } } return canonicalizedResourceString; } return { name: storageSharedKeyCredentialPolicyName, async sendRequest(request, next) { signRequest(request); return next(request); }, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * StorageBrowserPolicy will handle differences between Node.js and browser runtime, including: * * 1. Browsers cache GET/HEAD requests by adding conditional headers such as 'IF_MODIFIED_SINCE'. * StorageBrowserPolicy is a policy used to add a timestamp query to GET/HEAD request URL * thus avoid the browser cache. * * 2. Remove cookie header for security * * 3. Remove content-length header to avoid browsers warning */ class StorageBrowserPolicy extends BaseRequestPolicy { /** * Creates an instance of StorageBrowserPolicy. * @param nextPolicy - * @param options - */ // The base class has a protected constructor. Adding a public one to enable constructing of this class. /* eslint-disable-next-line @typescript-eslint/no-useless-constructor*/ constructor(nextPolicy, options) { super(nextPolicy, options); } /** * Sends out request. * * @param request - */ async sendRequest(request) { if (coreUtil.isNode) { return this._nextPolicy.sendRequest(request); } if (request.method.toUpperCase() === "GET" || request.method.toUpperCase() === "HEAD") { request.url = setURLParameter(request.url, URLConstants.Parameters.FORCE_BROWSER_NO_CACHE, new Date().getTime().toString()); } request.headers.remove(HeaderConstants.COOKIE); // According to XHR standards, content-length should be fully controlled by browsers request.headers.remove(HeaderConstants.CONTENT_LENGTH); return this._nextPolicy.sendRequest(request); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * StorageBrowserPolicyFactory is a factory class helping generating StorageBrowserPolicy objects. */ class StorageBrowserPolicyFactory { /** * Creates a StorageBrowserPolicyFactory object. * * @param nextPolicy - * @param options - */ create(nextPolicy, options) { return new StorageBrowserPolicy(nextPolicy, options); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * The programmatic identifier of the storageCorrectContentLengthPolicy. */ const storageCorrectContentLengthPolicyName = "StorageCorrectContentLengthPolicy"; /** * storageCorrectContentLengthPolicy to correctly set Content-Length header with request body length. */ function storageCorrectContentLengthPolicy() { function correctContentLength(request) { if (request.body && (typeof request.body === "string" || Buffer.isBuffer(request.body)) && request.body.length > 0) { request.headers.set(HeaderConstants.CONTENT_LENGTH, Buffer.byteLength(request.body)); } } return { name: storageCorrectContentLengthPolicyName, async sendRequest(request, next) { correctContentLength(request); return next(request); }, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A helper to decide if a given argument satisfies the Pipeline contract * @param pipeline - An argument that may be a Pipeline * @returns true when the argument satisfies the Pipeline contract */ function isPipelineLike(pipeline) { if (!pipeline || typeof pipeline !== "object") { return false; } const castPipeline = pipeline; return (Array.isArray(castPipeline.factories) && typeof castPipeline.options === "object" && typeof castPipeline.toServiceClientOptions === "function"); } /** * A Pipeline class containing HTTP request policies. * You can create a default Pipeline by calling {@link newPipeline}. * Or you can create a Pipeline with your own policies by the constructor of Pipeline. * * Refer to {@link newPipeline} and provided policies before implementing your * customized Pipeline. */ class Pipeline { /** * Creates an instance of Pipeline. Customize HTTPClient by implementing IHttpClient interface. * * @param factories - * @param options - */ constructor(factories, options = {}) { this.factories = factories; this.options = options; } /** * Transfer Pipeline object to ServiceClientOptions object which is required by * ServiceClient constructor. * * @returns The ServiceClientOptions object from this Pipeline. */ toServiceClientOptions() { return { httpClient: this.options.httpClient, requestPolicyFactories: this.factories, }; } } /** * Creates a new Pipeline object with Credential provided. * * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. * @param pipelineOptions - Optional. Options. * @returns A new Pipeline object. */ function newPipeline(credential, pipelineOptions = {}) { if (!credential) { credential = new AnonymousCredential(); } const pipeline = new Pipeline([], pipelineOptions); pipeline._credential = credential; return pipeline; } function processDownlevelPipeline(pipeline) { const knownFactoryFunctions = [ isAnonymousCredential, isStorageSharedKeyCredential, isCoreHttpBearerTokenFactory, isStorageBrowserPolicyFactory, isStorageRetryPolicyFactory, isStorageTelemetryPolicyFactory, isCoreHttpPolicyFactory, ]; if (pipeline.factories.length) { const novelFactories = pipeline.factories.filter((factory) => { return !knownFactoryFunctions.some((knownFactory) => knownFactory(factory)); }); if (novelFactories.length) { const hasInjector = novelFactories.some((factory) => isInjectorPolicyFactory(factory)); // if there are any left over, wrap in a requestPolicyFactoryPolicy return { wrappedPolicies: coreHttpCompat.createRequestPolicyFactoryPolicy(novelFactories), afterRetry: hasInjector, }; } } return undefined; } function getCoreClientOptions(pipeline) { var _a; const _b = pipeline.options, { httpClient: v1Client } = _b, restOptions = tslib.__rest(_b, ["httpClient"]); let httpClient = pipeline._coreHttpClient; if (!httpClient) { httpClient = v1Client ? coreHttpCompat.convertHttpClient(v1Client) : getCachedDefaultHttpClient(); pipeline._coreHttpClient = httpClient; } let corePipeline = pipeline._corePipeline; if (!corePipeline) { const packageDetails = `azsdk-js-azure-storage-blob/${SDK_VERSION}`; const userAgentPrefix = restOptions.userAgentOptions && restOptions.userAgentOptions.userAgentPrefix ? `${restOptions.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; corePipeline = coreClient.createClientPipeline(Object.assign(Object.assign({}, restOptions), { loggingOptions: { additionalAllowedHeaderNames: StorageBlobLoggingAllowedHeaderNames, additionalAllowedQueryParameters: StorageBlobLoggingAllowedQueryParameters, logger: logger.info, }, userAgentOptions: { userAgentPrefix, }, serializationOptions: { stringifyXML: coreXml.stringifyXML, serializerOptions: { xml: { // Use customized XML char key of "#" so we can deserialize metadata // with "_" key xmlCharKey: "#", }, }, }, deserializationOptions: { parseXML: coreXml.parseXML, serializerOptions: { xml: { // Use customized XML char key of "#" so we can deserialize metadata // with "_" key xmlCharKey: "#", }, }, } })); corePipeline.removePolicy({ phase: "Retry" }); corePipeline.removePolicy({ name: coreRestPipeline.decompressResponsePolicyName }); corePipeline.addPolicy(storageCorrectContentLengthPolicy()); corePipeline.addPolicy(storageRetryPolicy(restOptions.retryOptions), { phase: "Retry" }); corePipeline.addPolicy(storageBrowserPolicy()); const downlevelResults = processDownlevelPipeline(pipeline); if (downlevelResults) { corePipeline.addPolicy(downlevelResults.wrappedPolicies, downlevelResults.afterRetry ? { afterPhase: "Retry" } : undefined); } const credential = getCredentialFromPipeline(pipeline); if (coreAuth.isTokenCredential(credential)) { corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ credential, scopes: (_a = restOptions.audience) !== null && _a !== void 0 ? _a : StorageOAuthScopes, challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, }), { phase: "Sign" }); } else if (credential instanceof StorageSharedKeyCredential) { corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ accountName: credential.accountName, accountKey: credential.accountKey, }), { phase: "Sign" }); } pipeline._corePipeline = corePipeline; } return Object.assign(Object.assign({}, restOptions), { allowInsecureConnection: true, httpClient, pipeline: corePipeline }); } function getCredentialFromPipeline(pipeline) { // see if we squirreled one away on the type itself if (pipeline._credential) { return pipeline._credential; } // if it came from another package, loop over the factories and look for one like before let credential = new AnonymousCredential(); for (const factory of pipeline.factories) { if (coreAuth.isTokenCredential(factory.credential)) { // Only works if the factory has been attached a "credential" property. // We do that in newPipeline() when using TokenCredential. credential = factory.credential; } else if (isStorageSharedKeyCredential(factory)) { return factory; } } return credential; } function isStorageSharedKeyCredential(factory) { if (factory instanceof StorageSharedKeyCredential) { return true; } return factory.constructor.name === "StorageSharedKeyCredential"; } function isAnonymousCredential(factory) { if (factory instanceof AnonymousCredential) { return true; } return factory.constructor.name === "AnonymousCredential"; } function isCoreHttpBearerTokenFactory(factory) { return coreAuth.isTokenCredential(factory.credential); } function isStorageBrowserPolicyFactory(factory) { if (factory instanceof StorageBrowserPolicyFactory) { return true; } return factory.constructor.name === "StorageBrowserPolicyFactory"; } function isStorageRetryPolicyFactory(factory) { if (factory instanceof StorageRetryPolicyFactory) { return true; } return factory.constructor.name === "StorageRetryPolicyFactory"; } function isStorageTelemetryPolicyFactory(factory) { return factory.constructor.name === "TelemetryPolicyFactory"; } function isInjectorPolicyFactory(factory) { return factory.constructor.name === "InjectorPolicyFactory"; } function isCoreHttpPolicyFactory(factory) { const knownPolicies = [ "GenerateClientRequestIdPolicy", "TracingPolicy", "LogPolicy", "ProxyPolicy", "DisableResponseDecompressionPolicy", "KeepAlivePolicy", "DeserializationPolicy", ]; const mockHttpClient = { sendRequest: async (request) => { return { request, headers: request.headers.clone(), status: 500, }; }, }; const mockRequestPolicyOptions = { log(_logLevel, _message) { /* do nothing */ }, shouldLog(_logLevel) { return false; }, }; const policyInstance = factory.create(mockHttpClient, mockRequestPolicyOptions); const policyName = policyInstance.constructor.name; // bundlers sometimes add a custom suffix to the class name to make it unique return knownPolicies.some((knownPolicyName) => { return policyName.startsWith(knownPolicyName); }); } /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const BlobServiceProperties = { serializedName: "BlobServiceProperties", xmlName: "StorageServiceProperties", type: { name: "Composite", className: "BlobServiceProperties", modelProperties: { blobAnalyticsLogging: { serializedName: "Logging", xmlName: "Logging", type: { name: "Composite", className: "Logging", }, }, hourMetrics: { serializedName: "HourMetrics", xmlName: "HourMetrics", type: { name: "Composite", className: "Metrics", }, }, minuteMetrics: { serializedName: "MinuteMetrics", xmlName: "MinuteMetrics", type: { name: "Composite", className: "Metrics", }, }, cors: { serializedName: "Cors", xmlName: "Cors", xmlIsWrapped: true, xmlElementName: "CorsRule", type: { name: "Sequence", element: { type: { name: "Composite", className: "CorsRule", }, }, }, }, defaultServiceVersion: { serializedName: "DefaultServiceVersion", xmlName: "DefaultServiceVersion", type: { name: "String", }, }, deleteRetentionPolicy: { serializedName: "DeleteRetentionPolicy", xmlName: "DeleteRetentionPolicy", type: { name: "Composite", className: "RetentionPolicy", }, }, staticWebsite: { serializedName: "StaticWebsite", xmlName: "StaticWebsite", type: { name: "Composite", className: "StaticWebsite", }, }, }, }, }; const Logging = { serializedName: "Logging", type: { name: "Composite", className: "Logging", modelProperties: { version: { serializedName: "Version", required: true, xmlName: "Version", type: { name: "String", }, }, deleteProperty: { serializedName: "Delete", required: true, xmlName: "Delete", type: { name: "Boolean", }, }, read: { serializedName: "Read", required: true, xmlName: "Read", type: { name: "Boolean", }, }, write: { serializedName: "Write", required: true, xmlName: "Write", type: { name: "Boolean", }, }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy", }, }, }, }, }; const RetentionPolicy = { serializedName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy", modelProperties: { enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean", }, }, days: { constraints: { InclusiveMinimum: 1, }, serializedName: "Days", xmlName: "Days", type: { name: "Number", }, }, }, }, }; const Metrics = { serializedName: "Metrics", type: { name: "Composite", className: "Metrics", modelProperties: { version: { serializedName: "Version", xmlName: "Version", type: { name: "String", }, }, enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean", }, }, includeAPIs: { serializedName: "IncludeAPIs", xmlName: "IncludeAPIs", type: { name: "Boolean", }, }, retentionPolicy: { serializedName: "RetentionPolicy", xmlName: "RetentionPolicy", type: { name: "Composite", className: "RetentionPolicy", }, }, }, }, }; const CorsRule = { serializedName: "CorsRule", type: { name: "Composite", className: "CorsRule", modelProperties: { allowedOrigins: { serializedName: "AllowedOrigins", required: true, xmlName: "AllowedOrigins", type: { name: "String", }, }, allowedMethods: { serializedName: "AllowedMethods", required: true, xmlName: "AllowedMethods", type: { name: "String", }, }, allowedHeaders: { serializedName: "AllowedHeaders", required: true, xmlName: "AllowedHeaders", type: { name: "String", }, }, exposedHeaders: { serializedName: "ExposedHeaders", required: true, xmlName: "ExposedHeaders", type: { name: "String", }, }, maxAgeInSeconds: { constraints: { InclusiveMinimum: 0, }, serializedName: "MaxAgeInSeconds", required: true, xmlName: "MaxAgeInSeconds", type: { name: "Number", }, }, }, }, }; const StaticWebsite = { serializedName: "StaticWebsite", type: { name: "Composite", className: "StaticWebsite", modelProperties: { enabled: { serializedName: "Enabled", required: true, xmlName: "Enabled", type: { name: "Boolean", }, }, indexDocument: { serializedName: "IndexDocument", xmlName: "IndexDocument", type: { name: "String", }, }, errorDocument404Path: { serializedName: "ErrorDocument404Path", xmlName: "ErrorDocument404Path", type: { name: "String", }, }, defaultIndexDocumentPath: { serializedName: "DefaultIndexDocumentPath", xmlName: "DefaultIndexDocumentPath", type: { name: "String", }, }, }, }, }; const StorageError = { serializedName: "StorageError", type: { name: "Composite", className: "StorageError", modelProperties: { message: { serializedName: "Message", xmlName: "Message", type: { name: "String", }, }, code: { serializedName: "Code", xmlName: "Code", type: { name: "String", }, }, authenticationErrorDetail: { serializedName: "AuthenticationErrorDetail", xmlName: "AuthenticationErrorDetail", type: { name: "String", }, }, }, }, }; const BlobServiceStatistics = { serializedName: "BlobServiceStatistics", xmlName: "StorageServiceStats", type: { name: "Composite", className: "BlobServiceStatistics", modelProperties: { geoReplication: { serializedName: "GeoReplication", xmlName: "GeoReplication", type: { name: "Composite", className: "GeoReplication", }, }, }, }, }; const GeoReplication = { serializedName: "GeoReplication", type: { name: "Composite", className: "GeoReplication", modelProperties: { status: { serializedName: "Status", required: true, xmlName: "Status", type: { name: "Enum", allowedValues: ["live", "bootstrap", "unavailable"], }, }, lastSyncOn: { serializedName: "LastSyncTime", required: true, xmlName: "LastSyncTime", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ListContainersSegmentResponse = { serializedName: "ListContainersSegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListContainersSegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String", }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String", }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String", }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number", }, }, containerItems: { serializedName: "ContainerItems", required: true, xmlName: "Containers", xmlIsWrapped: true, xmlElementName: "Container", type: { name: "Sequence", element: { type: { name: "Composite", className: "ContainerItem", }, }, }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String", }, }, }, }, }; const ContainerItem = { serializedName: "ContainerItem", xmlName: "Container", type: { name: "Composite", className: "ContainerItem", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String", }, }, deleted: { serializedName: "Deleted", xmlName: "Deleted", type: { name: "Boolean", }, }, version: { serializedName: "Version", xmlName: "Version", type: { name: "String", }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", className: "ContainerProperties", }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, }, }, }; const ContainerProperties = { serializedName: "ContainerProperties", type: { name: "Composite", className: "ContainerProperties", modelProperties: { lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { name: "DateTimeRfc1123", }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { name: "String", }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, leaseState: { serializedName: "LeaseState", xmlName: "LeaseState", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, publicAccess: { serializedName: "PublicAccess", xmlName: "PublicAccess", type: { name: "Enum", allowedValues: ["container", "blob"], }, }, hasImmutabilityPolicy: { serializedName: "HasImmutabilityPolicy", xmlName: "HasImmutabilityPolicy", type: { name: "Boolean", }, }, hasLegalHold: { serializedName: "HasLegalHold", xmlName: "HasLegalHold", type: { name: "Boolean", }, }, defaultEncryptionScope: { serializedName: "DefaultEncryptionScope", xmlName: "DefaultEncryptionScope", type: { name: "String", }, }, preventEncryptionScopeOverride: { serializedName: "DenyEncryptionScopeOverride", xmlName: "DenyEncryptionScopeOverride", type: { name: "Boolean", }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { name: "DateTimeRfc1123", }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { name: "Number", }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "ImmutableStorageWithVersioningEnabled", xmlName: "ImmutableStorageWithVersioningEnabled", type: { name: "Boolean", }, }, }, }, }; const KeyInfo = { serializedName: "KeyInfo", type: { name: "Composite", className: "KeyInfo", modelProperties: { startsOn: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "String", }, }, expiresOn: { serializedName: "Expiry", required: true, xmlName: "Expiry", type: { name: "String", }, }, }, }, }; const UserDelegationKey = { serializedName: "UserDelegationKey", type: { name: "Composite", className: "UserDelegationKey", modelProperties: { signedObjectId: { serializedName: "SignedOid", required: true, xmlName: "SignedOid", type: { name: "String", }, }, signedTenantId: { serializedName: "SignedTid", required: true, xmlName: "SignedTid", type: { name: "String", }, }, signedStartsOn: { serializedName: "SignedStart", required: true, xmlName: "SignedStart", type: { name: "String", }, }, signedExpiresOn: { serializedName: "SignedExpiry", required: true, xmlName: "SignedExpiry", type: { name: "String", }, }, signedService: { serializedName: "SignedService", required: true, xmlName: "SignedService", type: { name: "String", }, }, signedVersion: { serializedName: "SignedVersion", required: true, xmlName: "SignedVersion", type: { name: "String", }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { name: "String", }, }, }, }, }; const FilterBlobSegment = { serializedName: "FilterBlobSegment", xmlName: "EnumerationResults", type: { name: "Composite", className: "FilterBlobSegment", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String", }, }, where: { serializedName: "Where", required: true, xmlName: "Where", type: { name: "String", }, }, blobs: { serializedName: "Blobs", required: true, xmlName: "Blobs", xmlIsWrapped: true, xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "FilterBlobItem", }, }, }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String", }, }, }, }, }; const FilterBlobItem = { serializedName: "FilterBlobItem", xmlName: "Blob", type: { name: "Composite", className: "FilterBlobItem", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String", }, }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", type: { name: "String", }, }, tags: { serializedName: "Tags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags", }, }, }, }, }; const BlobTags = { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags", modelProperties: { blobTagSet: { serializedName: "BlobTagSet", required: true, xmlName: "TagSet", xmlIsWrapped: true, xmlElementName: "Tag", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobTag", }, }, }, }, }, }, }; const BlobTag = { serializedName: "BlobTag", xmlName: "Tag", type: { name: "Composite", className: "BlobTag", modelProperties: { key: { serializedName: "Key", required: true, xmlName: "Key", type: { name: "String", }, }, value: { serializedName: "Value", required: true, xmlName: "Value", type: { name: "String", }, }, }, }, }; const SignedIdentifier = { serializedName: "SignedIdentifier", xmlName: "SignedIdentifier", type: { name: "Composite", className: "SignedIdentifier", modelProperties: { id: { serializedName: "Id", required: true, xmlName: "Id", type: { name: "String", }, }, accessPolicy: { serializedName: "AccessPolicy", xmlName: "AccessPolicy", type: { name: "Composite", className: "AccessPolicy", }, }, }, }, }; const AccessPolicy = { serializedName: "AccessPolicy", type: { name: "Composite", className: "AccessPolicy", modelProperties: { startsOn: { serializedName: "Start", xmlName: "Start", type: { name: "String", }, }, expiresOn: { serializedName: "Expiry", xmlName: "Expiry", type: { name: "String", }, }, permissions: { serializedName: "Permission", xmlName: "Permission", type: { name: "String", }, }, }, }, }; const ListBlobsFlatSegmentResponse = { serializedName: "ListBlobsFlatSegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListBlobsFlatSegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String", }, }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", xmlIsAttribute: true, type: { name: "String", }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String", }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String", }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number", }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", className: "BlobFlatListSegment", }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String", }, }, }, }, }; const BlobFlatListSegment = { serializedName: "BlobFlatListSegment", xmlName: "Blobs", type: { name: "Composite", className: "BlobFlatListSegment", modelProperties: { blobItems: { serializedName: "BlobItems", required: true, xmlName: "BlobItems", xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobItemInternal", }, }, }, }, }, }, }; const BlobItemInternal = { serializedName: "BlobItemInternal", xmlName: "Blob", type: { name: "Composite", className: "BlobItemInternal", modelProperties: { name: { serializedName: "Name", xmlName: "Name", type: { name: "Composite", className: "BlobName", }, }, deleted: { serializedName: "Deleted", required: true, xmlName: "Deleted", type: { name: "Boolean", }, }, snapshot: { serializedName: "Snapshot", required: true, xmlName: "Snapshot", type: { name: "String", }, }, versionId: { serializedName: "VersionId", xmlName: "VersionId", type: { name: "String", }, }, isCurrentVersion: { serializedName: "IsCurrentVersion", xmlName: "IsCurrentVersion", type: { name: "Boolean", }, }, properties: { serializedName: "Properties", xmlName: "Properties", type: { name: "Composite", className: "BlobPropertiesInternal", }, }, metadata: { serializedName: "Metadata", xmlName: "Metadata", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, blobTags: { serializedName: "BlobTags", xmlName: "Tags", type: { name: "Composite", className: "BlobTags", }, }, objectReplicationMetadata: { serializedName: "ObjectReplicationMetadata", xmlName: "OrMetadata", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, hasVersionsOnly: { serializedName: "HasVersionsOnly", xmlName: "HasVersionsOnly", type: { name: "Boolean", }, }, }, }, }; const BlobName = { serializedName: "BlobName", type: { name: "Composite", className: "BlobName", modelProperties: { encoded: { serializedName: "Encoded", xmlName: "Encoded", xmlIsAttribute: true, type: { name: "Boolean", }, }, content: { serializedName: "content", xmlName: "content", xmlIsMsText: true, type: { name: "String", }, }, }, }, }; const BlobPropertiesInternal = { serializedName: "BlobPropertiesInternal", xmlName: "Properties", type: { name: "Composite", className: "BlobPropertiesInternal", modelProperties: { createdOn: { serializedName: "Creation-Time", xmlName: "Creation-Time", type: { name: "DateTimeRfc1123", }, }, lastModified: { serializedName: "Last-Modified", required: true, xmlName: "Last-Modified", type: { name: "DateTimeRfc1123", }, }, etag: { serializedName: "Etag", required: true, xmlName: "Etag", type: { name: "String", }, }, contentLength: { serializedName: "Content-Length", xmlName: "Content-Length", type: { name: "Number", }, }, contentType: { serializedName: "Content-Type", xmlName: "Content-Type", type: { name: "String", }, }, contentEncoding: { serializedName: "Content-Encoding", xmlName: "Content-Encoding", type: { name: "String", }, }, contentLanguage: { serializedName: "Content-Language", xmlName: "Content-Language", type: { name: "String", }, }, contentMD5: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { name: "ByteArray", }, }, contentDisposition: { serializedName: "Content-Disposition", xmlName: "Content-Disposition", type: { name: "String", }, }, cacheControl: { serializedName: "Cache-Control", xmlName: "Cache-Control", type: { name: "String", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, blobType: { serializedName: "BlobType", xmlName: "BlobType", type: { name: "Enum", allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], }, }, leaseStatus: { serializedName: "LeaseStatus", xmlName: "LeaseStatus", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, leaseState: { serializedName: "LeaseState", xmlName: "LeaseState", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseDuration: { serializedName: "LeaseDuration", xmlName: "LeaseDuration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, copyId: { serializedName: "CopyId", xmlName: "CopyId", type: { name: "String", }, }, copyStatus: { serializedName: "CopyStatus", xmlName: "CopyStatus", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, copySource: { serializedName: "CopySource", xmlName: "CopySource", type: { name: "String", }, }, copyProgress: { serializedName: "CopyProgress", xmlName: "CopyProgress", type: { name: "String", }, }, copyCompletedOn: { serializedName: "CopyCompletionTime", xmlName: "CopyCompletionTime", type: { name: "DateTimeRfc1123", }, }, copyStatusDescription: { serializedName: "CopyStatusDescription", xmlName: "CopyStatusDescription", type: { name: "String", }, }, serverEncrypted: { serializedName: "ServerEncrypted", xmlName: "ServerEncrypted", type: { name: "Boolean", }, }, incrementalCopy: { serializedName: "IncrementalCopy", xmlName: "IncrementalCopy", type: { name: "Boolean", }, }, destinationSnapshot: { serializedName: "DestinationSnapshot", xmlName: "DestinationSnapshot", type: { name: "String", }, }, deletedOn: { serializedName: "DeletedTime", xmlName: "DeletedTime", type: { name: "DateTimeRfc1123", }, }, remainingRetentionDays: { serializedName: "RemainingRetentionDays", xmlName: "RemainingRetentionDays", type: { name: "Number", }, }, accessTier: { serializedName: "AccessTier", xmlName: "AccessTier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold", ], }, }, accessTierInferred: { serializedName: "AccessTierInferred", xmlName: "AccessTierInferred", type: { name: "Boolean", }, }, archiveStatus: { serializedName: "ArchiveStatus", xmlName: "ArchiveStatus", type: { name: "Enum", allowedValues: [ "rehydrate-pending-to-hot", "rehydrate-pending-to-cool", "rehydrate-pending-to-cold", ], }, }, customerProvidedKeySha256: { serializedName: "CustomerProvidedKeySha256", xmlName: "CustomerProvidedKeySha256", type: { name: "String", }, }, encryptionScope: { serializedName: "EncryptionScope", xmlName: "EncryptionScope", type: { name: "String", }, }, accessTierChangedOn: { serializedName: "AccessTierChangeTime", xmlName: "AccessTierChangeTime", type: { name: "DateTimeRfc1123", }, }, tagCount: { serializedName: "TagCount", xmlName: "TagCount", type: { name: "Number", }, }, expiresOn: { serializedName: "Expiry-Time", xmlName: "Expiry-Time", type: { name: "DateTimeRfc1123", }, }, isSealed: { serializedName: "Sealed", xmlName: "Sealed", type: { name: "Boolean", }, }, rehydratePriority: { serializedName: "RehydratePriority", xmlName: "RehydratePriority", type: { name: "Enum", allowedValues: ["High", "Standard"], }, }, lastAccessedOn: { serializedName: "LastAccessTime", xmlName: "LastAccessTime", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyExpiresOn: { serializedName: "ImmutabilityPolicyUntilDate", xmlName: "ImmutabilityPolicyUntilDate", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyMode: { serializedName: "ImmutabilityPolicyMode", xmlName: "ImmutabilityPolicyMode", type: { name: "Enum", allowedValues: ["Mutable", "Unlocked", "Locked"], }, }, legalHold: { serializedName: "LegalHold", xmlName: "LegalHold", type: { name: "Boolean", }, }, }, }, }; const ListBlobsHierarchySegmentResponse = { serializedName: "ListBlobsHierarchySegmentResponse", xmlName: "EnumerationResults", type: { name: "Composite", className: "ListBlobsHierarchySegmentResponse", modelProperties: { serviceEndpoint: { serializedName: "ServiceEndpoint", required: true, xmlName: "ServiceEndpoint", xmlIsAttribute: true, type: { name: "String", }, }, containerName: { serializedName: "ContainerName", required: true, xmlName: "ContainerName", xmlIsAttribute: true, type: { name: "String", }, }, prefix: { serializedName: "Prefix", xmlName: "Prefix", type: { name: "String", }, }, marker: { serializedName: "Marker", xmlName: "Marker", type: { name: "String", }, }, maxPageSize: { serializedName: "MaxResults", xmlName: "MaxResults", type: { name: "Number", }, }, delimiter: { serializedName: "Delimiter", xmlName: "Delimiter", type: { name: "String", }, }, segment: { serializedName: "Segment", xmlName: "Blobs", type: { name: "Composite", className: "BlobHierarchyListSegment", }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String", }, }, }, }, }; const BlobHierarchyListSegment = { serializedName: "BlobHierarchyListSegment", xmlName: "Blobs", type: { name: "Composite", className: "BlobHierarchyListSegment", modelProperties: { blobPrefixes: { serializedName: "BlobPrefixes", xmlName: "BlobPrefixes", xmlElementName: "BlobPrefix", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobPrefix", }, }, }, }, blobItems: { serializedName: "BlobItems", required: true, xmlName: "BlobItems", xmlElementName: "Blob", type: { name: "Sequence", element: { type: { name: "Composite", className: "BlobItemInternal", }, }, }, }, }, }, }; const BlobPrefix = { serializedName: "BlobPrefix", type: { name: "Composite", className: "BlobPrefix", modelProperties: { name: { serializedName: "Name", xmlName: "Name", type: { name: "Composite", className: "BlobName", }, }, }, }, }; const BlockLookupList = { serializedName: "BlockLookupList", xmlName: "BlockList", type: { name: "Composite", className: "BlockLookupList", modelProperties: { committed: { serializedName: "Committed", xmlName: "Committed", xmlElementName: "Committed", type: { name: "Sequence", element: { type: { name: "String", }, }, }, }, uncommitted: { serializedName: "Uncommitted", xmlName: "Uncommitted", xmlElementName: "Uncommitted", type: { name: "Sequence", element: { type: { name: "String", }, }, }, }, latest: { serializedName: "Latest", xmlName: "Latest", xmlElementName: "Latest", type: { name: "Sequence", element: { type: { name: "String", }, }, }, }, }, }, }; const BlockList = { serializedName: "BlockList", type: { name: "Composite", className: "BlockList", modelProperties: { committedBlocks: { serializedName: "CommittedBlocks", xmlName: "CommittedBlocks", xmlIsWrapped: true, xmlElementName: "Block", type: { name: "Sequence", element: { type: { name: "Composite", className: "Block", }, }, }, }, uncommittedBlocks: { serializedName: "UncommittedBlocks", xmlName: "UncommittedBlocks", xmlIsWrapped: true, xmlElementName: "Block", type: { name: "Sequence", element: { type: { name: "Composite", className: "Block", }, }, }, }, }, }, }; const Block = { serializedName: "Block", type: { name: "Composite", className: "Block", modelProperties: { name: { serializedName: "Name", required: true, xmlName: "Name", type: { name: "String", }, }, size: { serializedName: "Size", required: true, xmlName: "Size", type: { name: "Number", }, }, }, }, }; const PageList = { serializedName: "PageList", type: { name: "Composite", className: "PageList", modelProperties: { pageRange: { serializedName: "PageRange", xmlName: "PageRange", xmlElementName: "PageRange", type: { name: "Sequence", element: { type: { name: "Composite", className: "PageRange", }, }, }, }, clearRange: { serializedName: "ClearRange", xmlName: "ClearRange", xmlElementName: "ClearRange", type: { name: "Sequence", element: { type: { name: "Composite", className: "ClearRange", }, }, }, }, continuationToken: { serializedName: "NextMarker", xmlName: "NextMarker", type: { name: "String", }, }, }, }, }; const PageRange = { serializedName: "PageRange", xmlName: "PageRange", type: { name: "Composite", className: "PageRange", modelProperties: { start: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "Number", }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { name: "Number", }, }, }, }, }; const ClearRange = { serializedName: "ClearRange", xmlName: "ClearRange", type: { name: "Composite", className: "ClearRange", modelProperties: { start: { serializedName: "Start", required: true, xmlName: "Start", type: { name: "Number", }, }, end: { serializedName: "End", required: true, xmlName: "End", type: { name: "Number", }, }, }, }, }; const QueryRequest = { serializedName: "QueryRequest", xmlName: "QueryRequest", type: { name: "Composite", className: "QueryRequest", modelProperties: { queryType: { serializedName: "QueryType", required: true, xmlName: "QueryType", type: { name: "String", }, }, expression: { serializedName: "Expression", required: true, xmlName: "Expression", type: { name: "String", }, }, inputSerialization: { serializedName: "InputSerialization", xmlName: "InputSerialization", type: { name: "Composite", className: "QuerySerialization", }, }, outputSerialization: { serializedName: "OutputSerialization", xmlName: "OutputSerialization", type: { name: "Composite", className: "QuerySerialization", }, }, }, }, }; const QuerySerialization = { serializedName: "QuerySerialization", type: { name: "Composite", className: "QuerySerialization", modelProperties: { format: { serializedName: "Format", xmlName: "Format", type: { name: "Composite", className: "QueryFormat", }, }, }, }, }; const QueryFormat = { serializedName: "QueryFormat", type: { name: "Composite", className: "QueryFormat", modelProperties: { type: { serializedName: "Type", required: true, xmlName: "Type", type: { name: "Enum", allowedValues: ["delimited", "json", "arrow", "parquet"], }, }, delimitedTextConfiguration: { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", className: "DelimitedTextConfiguration", }, }, jsonTextConfiguration: { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", className: "JsonTextConfiguration", }, }, arrowConfiguration: { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", className: "ArrowConfiguration", }, }, parquetTextConfiguration: { serializedName: "ParquetTextConfiguration", xmlName: "ParquetTextConfiguration", type: { name: "Dictionary", value: { type: { name: "any" } }, }, }, }, }, }; const DelimitedTextConfiguration = { serializedName: "DelimitedTextConfiguration", xmlName: "DelimitedTextConfiguration", type: { name: "Composite", className: "DelimitedTextConfiguration", modelProperties: { columnSeparator: { serializedName: "ColumnSeparator", xmlName: "ColumnSeparator", type: { name: "String", }, }, fieldQuote: { serializedName: "FieldQuote", xmlName: "FieldQuote", type: { name: "String", }, }, recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { name: "String", }, }, escapeChar: { serializedName: "EscapeChar", xmlName: "EscapeChar", type: { name: "String", }, }, headersPresent: { serializedName: "HeadersPresent", xmlName: "HasHeaders", type: { name: "Boolean", }, }, }, }, }; const JsonTextConfiguration = { serializedName: "JsonTextConfiguration", xmlName: "JsonTextConfiguration", type: { name: "Composite", className: "JsonTextConfiguration", modelProperties: { recordSeparator: { serializedName: "RecordSeparator", xmlName: "RecordSeparator", type: { name: "String", }, }, }, }, }; const ArrowConfiguration = { serializedName: "ArrowConfiguration", xmlName: "ArrowConfiguration", type: { name: "Composite", className: "ArrowConfiguration", modelProperties: { schema: { serializedName: "Schema", required: true, xmlName: "Schema", xmlIsWrapped: true, xmlElementName: "Field", type: { name: "Sequence", element: { type: { name: "Composite", className: "ArrowField", }, }, }, }, }, }, }; const ArrowField = { serializedName: "ArrowField", xmlName: "Field", type: { name: "Composite", className: "ArrowField", modelProperties: { type: { serializedName: "Type", required: true, xmlName: "Type", type: { name: "String", }, }, name: { serializedName: "Name", xmlName: "Name", type: { name: "String", }, }, precision: { serializedName: "Precision", xmlName: "Precision", type: { name: "Number", }, }, scale: { serializedName: "Scale", xmlName: "Scale", type: { name: "Number", }, }, }, }, }; const ServiceSetPropertiesHeaders = { serializedName: "Service_setPropertiesHeaders", type: { name: "Composite", className: "ServiceSetPropertiesHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceSetPropertiesExceptionHeaders = { serializedName: "Service_setPropertiesExceptionHeaders", type: { name: "Composite", className: "ServiceSetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetPropertiesHeaders = { serializedName: "Service_getPropertiesHeaders", type: { name: "Composite", className: "ServiceGetPropertiesHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetPropertiesExceptionHeaders = { serializedName: "Service_getPropertiesExceptionHeaders", type: { name: "Composite", className: "ServiceGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetStatisticsHeaders = { serializedName: "Service_getStatisticsHeaders", type: { name: "Composite", className: "ServiceGetStatisticsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetStatisticsExceptionHeaders = { serializedName: "Service_getStatisticsExceptionHeaders", type: { name: "Composite", className: "ServiceGetStatisticsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceListContainersSegmentHeaders = { serializedName: "Service_listContainersSegmentHeaders", type: { name: "Composite", className: "ServiceListContainersSegmentHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceListContainersSegmentExceptionHeaders = { serializedName: "Service_listContainersSegmentExceptionHeaders", type: { name: "Composite", className: "ServiceListContainersSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetUserDelegationKeyHeaders = { serializedName: "Service_getUserDelegationKeyHeaders", type: { name: "Composite", className: "ServiceGetUserDelegationKeyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetUserDelegationKeyExceptionHeaders = { serializedName: "Service_getUserDelegationKeyExceptionHeaders", type: { name: "Composite", className: "ServiceGetUserDelegationKeyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetAccountInfoHeaders = { serializedName: "Service_getAccountInfoHeaders", type: { name: "Composite", className: "ServiceGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", ], }, }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage", ], }, }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceGetAccountInfoExceptionHeaders = { serializedName: "Service_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "ServiceGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceSubmitBatchHeaders = { serializedName: "Service_submitBatchHeaders", type: { name: "Composite", className: "ServiceSubmitBatchHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceSubmitBatchExceptionHeaders = { serializedName: "Service_submitBatchExceptionHeaders", type: { name: "Composite", className: "ServiceSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceFilterBlobsHeaders = { serializedName: "Service_filterBlobsHeaders", type: { name: "Composite", className: "ServiceFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ServiceFilterBlobsExceptionHeaders = { serializedName: "Service_filterBlobsExceptionHeaders", type: { name: "Composite", className: "ServiceFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerCreateHeaders = { serializedName: "Container_createHeaders", type: { name: "Composite", className: "ContainerCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerCreateExceptionHeaders = { serializedName: "Container_createExceptionHeaders", type: { name: "Composite", className: "ContainerCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerGetPropertiesHeaders = { serializedName: "Container_getPropertiesHeaders", type: { name: "Composite", className: "ContainerGetPropertiesHeaders", modelProperties: { metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"], }, }, hasImmutabilityPolicy: { serializedName: "x-ms-has-immutability-policy", xmlName: "x-ms-has-immutability-policy", type: { name: "Boolean", }, }, hasLegalHold: { serializedName: "x-ms-has-legal-hold", xmlName: "x-ms-has-legal-hold", type: { name: "Boolean", }, }, defaultEncryptionScope: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { name: "String", }, }, denyEncryptionScopeOverride: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { name: "Boolean", }, }, isImmutableStorageWithVersioningEnabled: { serializedName: "x-ms-immutable-storage-with-versioning-enabled", xmlName: "x-ms-immutable-storage-with-versioning-enabled", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerGetPropertiesExceptionHeaders = { serializedName: "Container_getPropertiesExceptionHeaders", type: { name: "Composite", className: "ContainerGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerDeleteHeaders = { serializedName: "Container_deleteHeaders", type: { name: "Composite", className: "ContainerDeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerDeleteExceptionHeaders = { serializedName: "Container_deleteExceptionHeaders", type: { name: "Composite", className: "ContainerDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerSetMetadataHeaders = { serializedName: "Container_setMetadataHeaders", type: { name: "Composite", className: "ContainerSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerSetMetadataExceptionHeaders = { serializedName: "Container_setMetadataExceptionHeaders", type: { name: "Composite", className: "ContainerSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerGetAccessPolicyHeaders = { serializedName: "Container_getAccessPolicyHeaders", type: { name: "Composite", className: "ContainerGetAccessPolicyHeaders", modelProperties: { blobPublicAccess: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"], }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerGetAccessPolicyExceptionHeaders = { serializedName: "Container_getAccessPolicyExceptionHeaders", type: { name: "Composite", className: "ContainerGetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerSetAccessPolicyHeaders = { serializedName: "Container_setAccessPolicyHeaders", type: { name: "Composite", className: "ContainerSetAccessPolicyHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerSetAccessPolicyExceptionHeaders = { serializedName: "Container_setAccessPolicyExceptionHeaders", type: { name: "Composite", className: "ContainerSetAccessPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerRestoreHeaders = { serializedName: "Container_restoreHeaders", type: { name: "Composite", className: "ContainerRestoreHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerRestoreExceptionHeaders = { serializedName: "Container_restoreExceptionHeaders", type: { name: "Composite", className: "ContainerRestoreExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerRenameHeaders = { serializedName: "Container_renameHeaders", type: { name: "Composite", className: "ContainerRenameHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerRenameExceptionHeaders = { serializedName: "Container_renameExceptionHeaders", type: { name: "Composite", className: "ContainerRenameExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerSubmitBatchHeaders = { serializedName: "Container_submitBatchHeaders", type: { name: "Composite", className: "ContainerSubmitBatchHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, }, }, }; const ContainerSubmitBatchExceptionHeaders = { serializedName: "Container_submitBatchExceptionHeaders", type: { name: "Composite", className: "ContainerSubmitBatchExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerFilterBlobsHeaders = { serializedName: "Container_filterBlobsHeaders", type: { name: "Composite", className: "ContainerFilterBlobsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerFilterBlobsExceptionHeaders = { serializedName: "Container_filterBlobsExceptionHeaders", type: { name: "Composite", className: "ContainerFilterBlobsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerAcquireLeaseHeaders = { serializedName: "Container_acquireLeaseHeaders", type: { name: "Composite", className: "ContainerAcquireLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerAcquireLeaseExceptionHeaders = { serializedName: "Container_acquireLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerReleaseLeaseHeaders = { serializedName: "Container_releaseLeaseHeaders", type: { name: "Composite", className: "ContainerReleaseLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerReleaseLeaseExceptionHeaders = { serializedName: "Container_releaseLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerRenewLeaseHeaders = { serializedName: "Container_renewLeaseHeaders", type: { name: "Composite", className: "ContainerRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerRenewLeaseExceptionHeaders = { serializedName: "Container_renewLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerBreakLeaseHeaders = { serializedName: "Container_breakLeaseHeaders", type: { name: "Composite", className: "ContainerBreakLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerBreakLeaseExceptionHeaders = { serializedName: "Container_breakLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerChangeLeaseHeaders = { serializedName: "Container_changeLeaseHeaders", type: { name: "Composite", className: "ContainerChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const ContainerChangeLeaseExceptionHeaders = { serializedName: "Container_changeLeaseExceptionHeaders", type: { name: "Composite", className: "ContainerChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerListBlobFlatSegmentHeaders = { serializedName: "Container_listBlobFlatSegmentHeaders", type: { name: "Composite", className: "ContainerListBlobFlatSegmentHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerListBlobFlatSegmentExceptionHeaders = { serializedName: "Container_listBlobFlatSegmentExceptionHeaders", type: { name: "Composite", className: "ContainerListBlobFlatSegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerListBlobHierarchySegmentHeaders = { serializedName: "Container_listBlobHierarchySegmentHeaders", type: { name: "Composite", className: "ContainerListBlobHierarchySegmentHeaders", modelProperties: { contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerListBlobHierarchySegmentExceptionHeaders = { serializedName: "Container_listBlobHierarchySegmentExceptionHeaders", type: { name: "Composite", className: "ContainerListBlobHierarchySegmentExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const ContainerGetAccountInfoHeaders = { serializedName: "Container_getAccountInfoHeaders", type: { name: "Composite", className: "ContainerGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", ], }, }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage", ], }, }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean", }, }, }, }, }; const ContainerGetAccountInfoExceptionHeaders = { serializedName: "Container_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "ContainerGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobDownloadHeaders = { serializedName: "Blob_downloadHeaders", type: { name: "Composite", className: "BlobDownloadHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { name: "DateTimeRfc1123", }, }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { name: "String", }, }, objectReplicationRules: { serializedName: "x-ms-or", headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number", }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { name: "String", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String", }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String", }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String", }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123", }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String", }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String", }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { name: "Boolean", }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number", }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray", }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { name: "Number", }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean", }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: ["Mutable", "Unlocked", "Locked"], }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, }, }, }; const BlobDownloadExceptionHeaders = { serializedName: "Blob_downloadExceptionHeaders", type: { name: "Composite", className: "BlobDownloadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobGetPropertiesHeaders = { serializedName: "Blob_getPropertiesHeaders", type: { name: "Composite", className: "BlobGetPropertiesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, createdOn: { serializedName: "x-ms-creation-time", xmlName: "x-ms-creation-time", type: { name: "DateTimeRfc1123", }, }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, objectReplicationPolicyId: { serializedName: "x-ms-or-policy-id", xmlName: "x-ms-or-policy-id", type: { name: "String", }, }, objectReplicationRules: { serializedName: "x-ms-or", headerCollectionPrefix: "x-ms-or-", xmlName: "x-ms-or", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], }, }, copyCompletedOn: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123", }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String", }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String", }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, isIncrementalCopy: { serializedName: "x-ms-incremental-copy", xmlName: "x-ms-incremental-copy", type: { name: "Boolean", }, }, destinationSnapshot: { serializedName: "x-ms-copy-destination-snapshot", xmlName: "x-ms-copy-destination-snapshot", type: { name: "String", }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number", }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String", }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String", }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String", }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String", }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number", }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, accessTier: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { name: "String", }, }, accessTierInferred: { serializedName: "x-ms-access-tier-inferred", xmlName: "x-ms-access-tier-inferred", type: { name: "Boolean", }, }, archiveStatus: { serializedName: "x-ms-archive-status", xmlName: "x-ms-archive-status", type: { name: "String", }, }, accessTierChangedOn: { serializedName: "x-ms-access-tier-change-time", xmlName: "x-ms-access-tier-change-time", type: { name: "DateTimeRfc1123", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, isCurrentVersion: { serializedName: "x-ms-is-current-version", xmlName: "x-ms-is-current-version", type: { name: "Boolean", }, }, tagCount: { serializedName: "x-ms-tag-count", xmlName: "x-ms-tag-count", type: { name: "Number", }, }, expiresOn: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { name: "DateTimeRfc1123", }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean", }, }, rehydratePriority: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", allowedValues: ["High", "Standard"], }, }, lastAccessed: { serializedName: "x-ms-last-access-time", xmlName: "x-ms-last-access-time", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyExpiresOn: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: ["Mutable", "Unlocked", "Locked"], }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobGetPropertiesExceptionHeaders = { serializedName: "Blob_getPropertiesExceptionHeaders", type: { name: "Composite", className: "BlobGetPropertiesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobDeleteHeaders = { serializedName: "Blob_deleteHeaders", type: { name: "Composite", className: "BlobDeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobDeleteExceptionHeaders = { serializedName: "Blob_deleteExceptionHeaders", type: { name: "Composite", className: "BlobDeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobUndeleteHeaders = { serializedName: "Blob_undeleteHeaders", type: { name: "Composite", className: "BlobUndeleteHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobUndeleteExceptionHeaders = { serializedName: "Blob_undeleteExceptionHeaders", type: { name: "Composite", className: "BlobUndeleteExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetExpiryHeaders = { serializedName: "Blob_setExpiryHeaders", type: { name: "Composite", className: "BlobSetExpiryHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobSetExpiryExceptionHeaders = { serializedName: "Blob_setExpiryExceptionHeaders", type: { name: "Composite", className: "BlobSetExpiryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetHttpHeadersHeaders = { serializedName: "Blob_setHttpHeadersHeaders", type: { name: "Composite", className: "BlobSetHttpHeadersHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetHttpHeadersExceptionHeaders = { serializedName: "Blob_setHttpHeadersExceptionHeaders", type: { name: "Composite", className: "BlobSetHttpHeadersExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetImmutabilityPolicyHeaders = { serializedName: "Blob_setImmutabilityPolicyHeaders", type: { name: "Composite", className: "BlobSetImmutabilityPolicyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyExpiry: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123", }, }, immutabilityPolicyMode: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: ["Mutable", "Unlocked", "Locked"], }, }, }, }, }; const BlobSetImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_setImmutabilityPolicyExceptionHeaders", type: { name: "Composite", className: "BlobSetImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobDeleteImmutabilityPolicyHeaders = { serializedName: "Blob_deleteImmutabilityPolicyHeaders", type: { name: "Composite", className: "BlobDeleteImmutabilityPolicyHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobDeleteImmutabilityPolicyExceptionHeaders = { serializedName: "Blob_deleteImmutabilityPolicyExceptionHeaders", type: { name: "Composite", className: "BlobDeleteImmutabilityPolicyExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetLegalHoldHeaders = { serializedName: "Blob_setLegalHoldHeaders", type: { name: "Composite", className: "BlobSetLegalHoldHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, legalHold: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean", }, }, }, }, }; const BlobSetLegalHoldExceptionHeaders = { serializedName: "Blob_setLegalHoldExceptionHeaders", type: { name: "Composite", className: "BlobSetLegalHoldExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetMetadataHeaders = { serializedName: "Blob_setMetadataHeaders", type: { name: "Composite", className: "BlobSetMetadataHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetMetadataExceptionHeaders = { serializedName: "Blob_setMetadataExceptionHeaders", type: { name: "Composite", className: "BlobSetMetadataExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobAcquireLeaseHeaders = { serializedName: "Blob_acquireLeaseHeaders", type: { name: "Composite", className: "BlobAcquireLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobAcquireLeaseExceptionHeaders = { serializedName: "Blob_acquireLeaseExceptionHeaders", type: { name: "Composite", className: "BlobAcquireLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobReleaseLeaseHeaders = { serializedName: "Blob_releaseLeaseHeaders", type: { name: "Composite", className: "BlobReleaseLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobReleaseLeaseExceptionHeaders = { serializedName: "Blob_releaseLeaseExceptionHeaders", type: { name: "Composite", className: "BlobReleaseLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobRenewLeaseHeaders = { serializedName: "Blob_renewLeaseHeaders", type: { name: "Composite", className: "BlobRenewLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobRenewLeaseExceptionHeaders = { serializedName: "Blob_renewLeaseExceptionHeaders", type: { name: "Composite", className: "BlobRenewLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobChangeLeaseHeaders = { serializedName: "Blob_changeLeaseHeaders", type: { name: "Composite", className: "BlobChangeLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, leaseId: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobChangeLeaseExceptionHeaders = { serializedName: "Blob_changeLeaseExceptionHeaders", type: { name: "Composite", className: "BlobChangeLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobBreakLeaseHeaders = { serializedName: "Blob_breakLeaseHeaders", type: { name: "Composite", className: "BlobBreakLeaseHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, leaseTime: { serializedName: "x-ms-lease-time", xmlName: "x-ms-lease-time", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, }, }, }; const BlobBreakLeaseExceptionHeaders = { serializedName: "Blob_breakLeaseExceptionHeaders", type: { name: "Composite", className: "BlobBreakLeaseExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobCreateSnapshotHeaders = { serializedName: "Blob_createSnapshotHeaders", type: { name: "Composite", className: "BlobCreateSnapshotHeaders", modelProperties: { snapshot: { serializedName: "x-ms-snapshot", xmlName: "x-ms-snapshot", type: { name: "String", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobCreateSnapshotExceptionHeaders = { serializedName: "Blob_createSnapshotExceptionHeaders", type: { name: "Composite", className: "BlobCreateSnapshotExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobStartCopyFromURLHeaders = { serializedName: "Blob_startCopyFromURLHeaders", type: { name: "Composite", className: "BlobStartCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobStartCopyFromURLExceptionHeaders = { serializedName: "Blob_startCopyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobStartCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobCopyFromURLHeaders = { serializedName: "Blob_copyFromURLHeaders", type: { name: "Composite", className: "BlobCopyFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyStatus: { defaultValue: "success", isConstant: true, serializedName: "x-ms-copy-status", type: { name: "String", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobCopyFromURLExceptionHeaders = { serializedName: "Blob_copyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobAbortCopyFromURLHeaders = { serializedName: "Blob_abortCopyFromURLHeaders", type: { name: "Composite", className: "BlobAbortCopyFromURLHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobAbortCopyFromURLExceptionHeaders = { serializedName: "Blob_abortCopyFromURLExceptionHeaders", type: { name: "Composite", className: "BlobAbortCopyFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetTierHeaders = { serializedName: "Blob_setTierHeaders", type: { name: "Composite", className: "BlobSetTierHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetTierExceptionHeaders = { serializedName: "Blob_setTierExceptionHeaders", type: { name: "Composite", className: "BlobSetTierExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobGetAccountInfoHeaders = { serializedName: "Blob_getAccountInfoHeaders", type: { name: "Composite", className: "BlobGetAccountInfoHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, skuName: { serializedName: "x-ms-sku-name", xmlName: "x-ms-sku-name", type: { name: "Enum", allowedValues: [ "Standard_LRS", "Standard_GRS", "Standard_RAGRS", "Standard_ZRS", "Premium_LRS", ], }, }, accountKind: { serializedName: "x-ms-account-kind", xmlName: "x-ms-account-kind", type: { name: "Enum", allowedValues: [ "Storage", "BlobStorage", "StorageV2", "FileStorage", "BlockBlobStorage", ], }, }, isHierarchicalNamespaceEnabled: { serializedName: "x-ms-is-hns-enabled", xmlName: "x-ms-is-hns-enabled", type: { name: "Boolean", }, }, }, }, }; const BlobGetAccountInfoExceptionHeaders = { serializedName: "Blob_getAccountInfoExceptionHeaders", type: { name: "Composite", className: "BlobGetAccountInfoExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobQueryHeaders = { serializedName: "Blob_queryHeaders", type: { name: "Composite", className: "BlobQueryHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, metadata: { serializedName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", xmlName: "x-ms-meta", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, contentLength: { serializedName: "content-length", xmlName: "content-length", type: { name: "Number", }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, contentRange: { serializedName: "content-range", xmlName: "content-range", type: { name: "String", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, contentEncoding: { serializedName: "content-encoding", xmlName: "content-encoding", type: { name: "String", }, }, cacheControl: { serializedName: "cache-control", xmlName: "cache-control", type: { name: "String", }, }, contentDisposition: { serializedName: "content-disposition", xmlName: "content-disposition", type: { name: "String", }, }, contentLanguage: { serializedName: "content-language", xmlName: "content-language", type: { name: "String", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, blobType: { serializedName: "x-ms-blob-type", xmlName: "x-ms-blob-type", type: { name: "Enum", allowedValues: ["BlockBlob", "PageBlob", "AppendBlob"], }, }, copyCompletionTime: { serializedName: "x-ms-copy-completion-time", xmlName: "x-ms-copy-completion-time", type: { name: "DateTimeRfc1123", }, }, copyStatusDescription: { serializedName: "x-ms-copy-status-description", xmlName: "x-ms-copy-status-description", type: { name: "String", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyProgress: { serializedName: "x-ms-copy-progress", xmlName: "x-ms-copy-progress", type: { name: "String", }, }, copySource: { serializedName: "x-ms-copy-source", xmlName: "x-ms-copy-source", type: { name: "String", }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, leaseDuration: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Enum", allowedValues: ["infinite", "fixed"], }, }, leaseState: { serializedName: "x-ms-lease-state", xmlName: "x-ms-lease-state", type: { name: "Enum", allowedValues: [ "available", "leased", "expired", "breaking", "broken", ], }, }, leaseStatus: { serializedName: "x-ms-lease-status", xmlName: "x-ms-lease-status", type: { name: "Enum", allowedValues: ["locked", "unlocked"], }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, acceptRanges: { serializedName: "accept-ranges", xmlName: "accept-ranges", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number", }, }, isServerEncrypted: { serializedName: "x-ms-server-encrypted", xmlName: "x-ms-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, blobContentMD5: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, contentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, }, }, }; const BlobQueryExceptionHeaders = { serializedName: "Blob_queryExceptionHeaders", type: { name: "Composite", className: "BlobQueryExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobGetTagsHeaders = { serializedName: "Blob_getTagsHeaders", type: { name: "Composite", className: "BlobGetTagsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobGetTagsExceptionHeaders = { serializedName: "Blob_getTagsExceptionHeaders", type: { name: "Composite", className: "BlobGetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetTagsHeaders = { serializedName: "Blob_setTagsHeaders", type: { name: "Composite", className: "BlobSetTagsHeaders", modelProperties: { clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlobSetTagsExceptionHeaders = { serializedName: "Blob_setTagsExceptionHeaders", type: { name: "Composite", className: "BlobSetTagsExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobCreateHeaders = { serializedName: "PageBlob_createHeaders", type: { name: "Composite", className: "PageBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobCreateExceptionHeaders = { serializedName: "PageBlob_createExceptionHeaders", type: { name: "Composite", className: "PageBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUploadPagesHeaders = { serializedName: "PageBlob_uploadPagesHeaders", type: { name: "Composite", className: "PageBlobUploadPagesHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUploadPagesExceptionHeaders = { serializedName: "PageBlob_uploadPagesExceptionHeaders", type: { name: "Composite", className: "PageBlobUploadPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobClearPagesHeaders = { serializedName: "PageBlob_clearPagesHeaders", type: { name: "Composite", className: "PageBlobClearPagesHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobClearPagesExceptionHeaders = { serializedName: "PageBlob_clearPagesExceptionHeaders", type: { name: "Composite", className: "PageBlobClearPagesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUploadPagesFromURLHeaders = { serializedName: "PageBlob_uploadPagesFromURLHeaders", type: { name: "Composite", className: "PageBlobUploadPagesFromURLHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUploadPagesFromURLExceptionHeaders = { serializedName: "PageBlob_uploadPagesFromURLExceptionHeaders", type: { name: "Composite", className: "PageBlobUploadPagesFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobGetPageRangesHeaders = { serializedName: "PageBlob_getPageRangesHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobGetPageRangesExceptionHeaders = { serializedName: "PageBlob_getPageRangesExceptionHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobGetPageRangesDiffHeaders = { serializedName: "PageBlob_getPageRangesDiffHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesDiffHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobGetPageRangesDiffExceptionHeaders = { serializedName: "PageBlob_getPageRangesDiffExceptionHeaders", type: { name: "Composite", className: "PageBlobGetPageRangesDiffExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobResizeHeaders = { serializedName: "PageBlob_resizeHeaders", type: { name: "Composite", className: "PageBlobResizeHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobResizeExceptionHeaders = { serializedName: "PageBlob_resizeExceptionHeaders", type: { name: "Composite", className: "PageBlobResizeExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUpdateSequenceNumberHeaders = { serializedName: "PageBlob_updateSequenceNumberHeaders", type: { name: "Composite", className: "PageBlobUpdateSequenceNumberHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, blobSequenceNumber: { serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobUpdateSequenceNumberExceptionHeaders = { serializedName: "PageBlob_updateSequenceNumberExceptionHeaders", type: { name: "Composite", className: "PageBlobUpdateSequenceNumberExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobCopyIncrementalHeaders = { serializedName: "PageBlob_copyIncrementalHeaders", type: { name: "Composite", className: "PageBlobCopyIncrementalHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, copyId: { serializedName: "x-ms-copy-id", xmlName: "x-ms-copy-id", type: { name: "String", }, }, copyStatus: { serializedName: "x-ms-copy-status", xmlName: "x-ms-copy-status", type: { name: "Enum", allowedValues: ["pending", "success", "aborted", "failed"], }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const PageBlobCopyIncrementalExceptionHeaders = { serializedName: "PageBlob_copyIncrementalExceptionHeaders", type: { name: "Composite", className: "PageBlobCopyIncrementalExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobCreateHeaders = { serializedName: "AppendBlob_createHeaders", type: { name: "Composite", className: "AppendBlobCreateHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobCreateExceptionHeaders = { serializedName: "AppendBlob_createExceptionHeaders", type: { name: "Composite", className: "AppendBlobCreateExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobAppendBlockHeaders = { serializedName: "AppendBlob_appendBlockHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { name: "String", }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobAppendBlockExceptionHeaders = { serializedName: "AppendBlob_appendBlockExceptionHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobAppendBlockFromUrlHeaders = { serializedName: "AppendBlob_appendBlockFromUrlHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, blobAppendOffset: { serializedName: "x-ms-blob-append-offset", xmlName: "x-ms-blob-append-offset", type: { name: "String", }, }, blobCommittedBlockCount: { serializedName: "x-ms-blob-committed-block-count", xmlName: "x-ms-blob-committed-block-count", type: { name: "Number", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobAppendBlockFromUrlExceptionHeaders = { serializedName: "AppendBlob_appendBlockFromUrlExceptionHeaders", type: { name: "Composite", className: "AppendBlobAppendBlockFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const AppendBlobSealHeaders = { serializedName: "AppendBlob_sealHeaders", type: { name: "Composite", className: "AppendBlobSealHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isSealed: { serializedName: "x-ms-blob-sealed", xmlName: "x-ms-blob-sealed", type: { name: "Boolean", }, }, }, }, }; const AppendBlobSealExceptionHeaders = { serializedName: "AppendBlob_sealExceptionHeaders", type: { name: "Composite", className: "AppendBlobSealExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobUploadHeaders = { serializedName: "BlockBlob_uploadHeaders", type: { name: "Composite", className: "BlockBlobUploadHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobUploadExceptionHeaders = { serializedName: "BlockBlob_uploadExceptionHeaders", type: { name: "Composite", className: "BlockBlobUploadExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobPutBlobFromUrlHeaders = { serializedName: "BlockBlob_putBlobFromUrlHeaders", type: { name: "Composite", className: "BlockBlobPutBlobFromUrlHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobPutBlobFromUrlExceptionHeaders = { serializedName: "BlockBlob_putBlobFromUrlExceptionHeaders", type: { name: "Composite", className: "BlockBlobPutBlobFromUrlExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobStageBlockHeaders = { serializedName: "BlockBlob_stageBlockHeaders", type: { name: "Composite", className: "BlockBlobStageBlockHeaders", modelProperties: { contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobStageBlockExceptionHeaders = { serializedName: "BlockBlob_stageBlockExceptionHeaders", type: { name: "Composite", className: "BlockBlobStageBlockExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobStageBlockFromURLHeaders = { serializedName: "BlockBlob_stageBlockFromURLHeaders", type: { name: "Composite", className: "BlockBlobStageBlockFromURLHeaders", modelProperties: { contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobStageBlockFromURLExceptionHeaders = { serializedName: "BlockBlob_stageBlockFromURLExceptionHeaders", type: { name: "Composite", className: "BlockBlobStageBlockFromURLExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobCommitBlockListHeaders = { serializedName: "BlockBlob_commitBlockListHeaders", type: { name: "Composite", className: "BlockBlobCommitBlockListHeaders", modelProperties: { etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, contentMD5: { serializedName: "content-md5", xmlName: "content-md5", type: { name: "ByteArray", }, }, xMsContentCrc64: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, versionId: { serializedName: "x-ms-version-id", xmlName: "x-ms-version-id", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, isServerEncrypted: { serializedName: "x-ms-request-server-encrypted", xmlName: "x-ms-request-server-encrypted", type: { name: "Boolean", }, }, encryptionKeySha256: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, encryptionScope: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobCommitBlockListExceptionHeaders = { serializedName: "BlockBlob_commitBlockListExceptionHeaders", type: { name: "Composite", className: "BlockBlobCommitBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobGetBlockListHeaders = { serializedName: "BlockBlob_getBlockListHeaders", type: { name: "Composite", className: "BlockBlobGetBlockListHeaders", modelProperties: { lastModified: { serializedName: "last-modified", xmlName: "last-modified", type: { name: "DateTimeRfc1123", }, }, etag: { serializedName: "etag", xmlName: "etag", type: { name: "String", }, }, contentType: { serializedName: "content-type", xmlName: "content-type", type: { name: "String", }, }, blobContentLength: { serializedName: "x-ms-blob-content-length", xmlName: "x-ms-blob-content-length", type: { name: "Number", }, }, clientRequestId: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, requestId: { serializedName: "x-ms-request-id", xmlName: "x-ms-request-id", type: { name: "String", }, }, version: { serializedName: "x-ms-version", xmlName: "x-ms-version", type: { name: "String", }, }, date: { serializedName: "date", xmlName: "date", type: { name: "DateTimeRfc1123", }, }, errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; const BlockBlobGetBlockListExceptionHeaders = { serializedName: "BlockBlob_getBlockListExceptionHeaders", type: { name: "Composite", className: "BlockBlobGetBlockListExceptionHeaders", modelProperties: { errorCode: { serializedName: "x-ms-error-code", xmlName: "x-ms-error-code", type: { name: "String", }, }, }, }, }; var Mappers = /*#__PURE__*/Object.freeze({ __proto__: null, AccessPolicy: AccessPolicy, AppendBlobAppendBlockExceptionHeaders: AppendBlobAppendBlockExceptionHeaders, AppendBlobAppendBlockFromUrlExceptionHeaders: AppendBlobAppendBlockFromUrlExceptionHeaders, AppendBlobAppendBlockFromUrlHeaders: AppendBlobAppendBlockFromUrlHeaders, AppendBlobAppendBlockHeaders: AppendBlobAppendBlockHeaders, AppendBlobCreateExceptionHeaders: AppendBlobCreateExceptionHeaders, AppendBlobCreateHeaders: AppendBlobCreateHeaders, AppendBlobSealExceptionHeaders: AppendBlobSealExceptionHeaders, AppendBlobSealHeaders: AppendBlobSealHeaders, ArrowConfiguration: ArrowConfiguration, ArrowField: ArrowField, BlobAbortCopyFromURLExceptionHeaders: BlobAbortCopyFromURLExceptionHeaders, BlobAbortCopyFromURLHeaders: BlobAbortCopyFromURLHeaders, BlobAcquireLeaseExceptionHeaders: BlobAcquireLeaseExceptionHeaders, BlobAcquireLeaseHeaders: BlobAcquireLeaseHeaders, BlobBreakLeaseExceptionHeaders: BlobBreakLeaseExceptionHeaders, BlobBreakLeaseHeaders: BlobBreakLeaseHeaders, BlobChangeLeaseExceptionHeaders: BlobChangeLeaseExceptionHeaders, BlobChangeLeaseHeaders: BlobChangeLeaseHeaders, BlobCopyFromURLExceptionHeaders: BlobCopyFromURLExceptionHeaders, BlobCopyFromURLHeaders: BlobCopyFromURLHeaders, BlobCreateSnapshotExceptionHeaders: BlobCreateSnapshotExceptionHeaders, BlobCreateSnapshotHeaders: BlobCreateSnapshotHeaders, BlobDeleteExceptionHeaders: BlobDeleteExceptionHeaders, BlobDeleteHeaders: BlobDeleteHeaders, BlobDeleteImmutabilityPolicyExceptionHeaders: BlobDeleteImmutabilityPolicyExceptionHeaders, BlobDeleteImmutabilityPolicyHeaders: BlobDeleteImmutabilityPolicyHeaders, BlobDownloadExceptionHeaders: BlobDownloadExceptionHeaders, BlobDownloadHeaders: BlobDownloadHeaders, BlobFlatListSegment: BlobFlatListSegment, BlobGetAccountInfoExceptionHeaders: BlobGetAccountInfoExceptionHeaders, BlobGetAccountInfoHeaders: BlobGetAccountInfoHeaders, BlobGetPropertiesExceptionHeaders: BlobGetPropertiesExceptionHeaders, BlobGetPropertiesHeaders: BlobGetPropertiesHeaders, BlobGetTagsExceptionHeaders: BlobGetTagsExceptionHeaders, BlobGetTagsHeaders: BlobGetTagsHeaders, BlobHierarchyListSegment: BlobHierarchyListSegment, BlobItemInternal: BlobItemInternal, BlobName: BlobName, BlobPrefix: BlobPrefix, BlobPropertiesInternal: BlobPropertiesInternal, BlobQueryExceptionHeaders: BlobQueryExceptionHeaders, BlobQueryHeaders: BlobQueryHeaders, BlobReleaseLeaseExceptionHeaders: BlobReleaseLeaseExceptionHeaders, BlobReleaseLeaseHeaders: BlobReleaseLeaseHeaders, BlobRenewLeaseExceptionHeaders: BlobRenewLeaseExceptionHeaders, BlobRenewLeaseHeaders: BlobRenewLeaseHeaders, BlobServiceProperties: BlobServiceProperties, BlobServiceStatistics: BlobServiceStatistics, BlobSetExpiryExceptionHeaders: BlobSetExpiryExceptionHeaders, BlobSetExpiryHeaders: BlobSetExpiryHeaders, BlobSetHttpHeadersExceptionHeaders: BlobSetHttpHeadersExceptionHeaders, BlobSetHttpHeadersHeaders: BlobSetHttpHeadersHeaders, BlobSetImmutabilityPolicyExceptionHeaders: BlobSetImmutabilityPolicyExceptionHeaders, BlobSetImmutabilityPolicyHeaders: BlobSetImmutabilityPolicyHeaders, BlobSetLegalHoldExceptionHeaders: BlobSetLegalHoldExceptionHeaders, BlobSetLegalHoldHeaders: BlobSetLegalHoldHeaders, BlobSetMetadataExceptionHeaders: BlobSetMetadataExceptionHeaders, BlobSetMetadataHeaders: BlobSetMetadataHeaders, BlobSetTagsExceptionHeaders: BlobSetTagsExceptionHeaders, BlobSetTagsHeaders: BlobSetTagsHeaders, BlobSetTierExceptionHeaders: BlobSetTierExceptionHeaders, BlobSetTierHeaders: BlobSetTierHeaders, BlobStartCopyFromURLExceptionHeaders: BlobStartCopyFromURLExceptionHeaders, BlobStartCopyFromURLHeaders: BlobStartCopyFromURLHeaders, BlobTag: BlobTag, BlobTags: BlobTags, BlobUndeleteExceptionHeaders: BlobUndeleteExceptionHeaders, BlobUndeleteHeaders: BlobUndeleteHeaders, Block: Block, BlockBlobCommitBlockListExceptionHeaders: BlockBlobCommitBlockListExceptionHeaders, BlockBlobCommitBlockListHeaders: BlockBlobCommitBlockListHeaders, BlockBlobGetBlockListExceptionHeaders: BlockBlobGetBlockListExceptionHeaders, BlockBlobGetBlockListHeaders: BlockBlobGetBlockListHeaders, BlockBlobPutBlobFromUrlExceptionHeaders: BlockBlobPutBlobFromUrlExceptionHeaders, BlockBlobPutBlobFromUrlHeaders: BlockBlobPutBlobFromUrlHeaders, BlockBlobStageBlockExceptionHeaders: BlockBlobStageBlockExceptionHeaders, BlockBlobStageBlockFromURLExceptionHeaders: BlockBlobStageBlockFromURLExceptionHeaders, BlockBlobStageBlockFromURLHeaders: BlockBlobStageBlockFromURLHeaders, BlockBlobStageBlockHeaders: BlockBlobStageBlockHeaders, BlockBlobUploadExceptionHeaders: BlockBlobUploadExceptionHeaders, BlockBlobUploadHeaders: BlockBlobUploadHeaders, BlockList: BlockList, BlockLookupList: BlockLookupList, ClearRange: ClearRange, ContainerAcquireLeaseExceptionHeaders: ContainerAcquireLeaseExceptionHeaders, ContainerAcquireLeaseHeaders: ContainerAcquireLeaseHeaders, ContainerBreakLeaseExceptionHeaders: ContainerBreakLeaseExceptionHeaders, ContainerBreakLeaseHeaders: ContainerBreakLeaseHeaders, ContainerChangeLeaseExceptionHeaders: ContainerChangeLeaseExceptionHeaders, ContainerChangeLeaseHeaders: ContainerChangeLeaseHeaders, ContainerCreateExceptionHeaders: ContainerCreateExceptionHeaders, ContainerCreateHeaders: ContainerCreateHeaders, ContainerDeleteExceptionHeaders: ContainerDeleteExceptionHeaders, ContainerDeleteHeaders: ContainerDeleteHeaders, ContainerFilterBlobsExceptionHeaders: ContainerFilterBlobsExceptionHeaders, ContainerFilterBlobsHeaders: ContainerFilterBlobsHeaders, ContainerGetAccessPolicyExceptionHeaders: ContainerGetAccessPolicyExceptionHeaders, ContainerGetAccessPolicyHeaders: ContainerGetAccessPolicyHeaders, ContainerGetAccountInfoExceptionHeaders: ContainerGetAccountInfoExceptionHeaders, ContainerGetAccountInfoHeaders: ContainerGetAccountInfoHeaders, ContainerGetPropertiesExceptionHeaders: ContainerGetPropertiesExceptionHeaders, ContainerGetPropertiesHeaders: ContainerGetPropertiesHeaders, ContainerItem: ContainerItem, ContainerListBlobFlatSegmentExceptionHeaders: ContainerListBlobFlatSegmentExceptionHeaders, ContainerListBlobFlatSegmentHeaders: ContainerListBlobFlatSegmentHeaders, ContainerListBlobHierarchySegmentExceptionHeaders: ContainerListBlobHierarchySegmentExceptionHeaders, ContainerListBlobHierarchySegmentHeaders: ContainerListBlobHierarchySegmentHeaders, ContainerProperties: ContainerProperties, ContainerReleaseLeaseExceptionHeaders: ContainerReleaseLeaseExceptionHeaders, ContainerReleaseLeaseHeaders: ContainerReleaseLeaseHeaders, ContainerRenameExceptionHeaders: ContainerRenameExceptionHeaders, ContainerRenameHeaders: ContainerRenameHeaders, ContainerRenewLeaseExceptionHeaders: ContainerRenewLeaseExceptionHeaders, ContainerRenewLeaseHeaders: ContainerRenewLeaseHeaders, ContainerRestoreExceptionHeaders: ContainerRestoreExceptionHeaders, ContainerRestoreHeaders: ContainerRestoreHeaders, ContainerSetAccessPolicyExceptionHeaders: ContainerSetAccessPolicyExceptionHeaders, ContainerSetAccessPolicyHeaders: ContainerSetAccessPolicyHeaders, ContainerSetMetadataExceptionHeaders: ContainerSetMetadataExceptionHeaders, ContainerSetMetadataHeaders: ContainerSetMetadataHeaders, ContainerSubmitBatchExceptionHeaders: ContainerSubmitBatchExceptionHeaders, ContainerSubmitBatchHeaders: ContainerSubmitBatchHeaders, CorsRule: CorsRule, DelimitedTextConfiguration: DelimitedTextConfiguration, FilterBlobItem: FilterBlobItem, FilterBlobSegment: FilterBlobSegment, GeoReplication: GeoReplication, JsonTextConfiguration: JsonTextConfiguration, KeyInfo: KeyInfo, ListBlobsFlatSegmentResponse: ListBlobsFlatSegmentResponse, ListBlobsHierarchySegmentResponse: ListBlobsHierarchySegmentResponse, ListContainersSegmentResponse: ListContainersSegmentResponse, Logging: Logging, Metrics: Metrics, PageBlobClearPagesExceptionHeaders: PageBlobClearPagesExceptionHeaders, PageBlobClearPagesHeaders: PageBlobClearPagesHeaders, PageBlobCopyIncrementalExceptionHeaders: PageBlobCopyIncrementalExceptionHeaders, PageBlobCopyIncrementalHeaders: PageBlobCopyIncrementalHeaders, PageBlobCreateExceptionHeaders: PageBlobCreateExceptionHeaders, PageBlobCreateHeaders: PageBlobCreateHeaders, PageBlobGetPageRangesDiffExceptionHeaders: PageBlobGetPageRangesDiffExceptionHeaders, PageBlobGetPageRangesDiffHeaders: PageBlobGetPageRangesDiffHeaders, PageBlobGetPageRangesExceptionHeaders: PageBlobGetPageRangesExceptionHeaders, PageBlobGetPageRangesHeaders: PageBlobGetPageRangesHeaders, PageBlobResizeExceptionHeaders: PageBlobResizeExceptionHeaders, PageBlobResizeHeaders: PageBlobResizeHeaders, PageBlobUpdateSequenceNumberExceptionHeaders: PageBlobUpdateSequenceNumberExceptionHeaders, PageBlobUpdateSequenceNumberHeaders: PageBlobUpdateSequenceNumberHeaders, PageBlobUploadPagesExceptionHeaders: PageBlobUploadPagesExceptionHeaders, PageBlobUploadPagesFromURLExceptionHeaders: PageBlobUploadPagesFromURLExceptionHeaders, PageBlobUploadPagesFromURLHeaders: PageBlobUploadPagesFromURLHeaders, PageBlobUploadPagesHeaders: PageBlobUploadPagesHeaders, PageList: PageList, PageRange: PageRange, QueryFormat: QueryFormat, QueryRequest: QueryRequest, QuerySerialization: QuerySerialization, RetentionPolicy: RetentionPolicy, ServiceFilterBlobsExceptionHeaders: ServiceFilterBlobsExceptionHeaders, ServiceFilterBlobsHeaders: ServiceFilterBlobsHeaders, ServiceGetAccountInfoExceptionHeaders: ServiceGetAccountInfoExceptionHeaders, ServiceGetAccountInfoHeaders: ServiceGetAccountInfoHeaders, ServiceGetPropertiesExceptionHeaders: ServiceGetPropertiesExceptionHeaders, ServiceGetPropertiesHeaders: ServiceGetPropertiesHeaders, ServiceGetStatisticsExceptionHeaders: ServiceGetStatisticsExceptionHeaders, ServiceGetStatisticsHeaders: ServiceGetStatisticsHeaders, ServiceGetUserDelegationKeyExceptionHeaders: ServiceGetUserDelegationKeyExceptionHeaders, ServiceGetUserDelegationKeyHeaders: ServiceGetUserDelegationKeyHeaders, ServiceListContainersSegmentExceptionHeaders: ServiceListContainersSegmentExceptionHeaders, ServiceListContainersSegmentHeaders: ServiceListContainersSegmentHeaders, ServiceSetPropertiesExceptionHeaders: ServiceSetPropertiesExceptionHeaders, ServiceSetPropertiesHeaders: ServiceSetPropertiesHeaders, ServiceSubmitBatchExceptionHeaders: ServiceSubmitBatchExceptionHeaders, ServiceSubmitBatchHeaders: ServiceSubmitBatchHeaders, SignedIdentifier: SignedIdentifier, StaticWebsite: StaticWebsite, StorageError: StorageError, UserDelegationKey: UserDelegationKey }); /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ const contentType = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Content-Type", type: { name: "String", }, }, }; const blobServiceProperties = { parameterPath: "blobServiceProperties", mapper: BlobServiceProperties, }; const accept = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String", }, }, }; const url = { parameterPath: "url", mapper: { serializedName: "url", required: true, xmlName: "url", type: { name: "String", }, }, skipEncoding: true, }; const restype = { parameterPath: "restype", mapper: { defaultValue: "service", isConstant: true, serializedName: "restype", type: { name: "String", }, }, }; const comp = { parameterPath: "comp", mapper: { defaultValue: "properties", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const timeoutInSeconds = { parameterPath: ["options", "timeoutInSeconds"], mapper: { constraints: { InclusiveMinimum: 0, }, serializedName: "timeout", xmlName: "timeout", type: { name: "Number", }, }, }; const version = { parameterPath: "version", mapper: { defaultValue: "2025-01-05", isConstant: true, serializedName: "x-ms-version", type: { name: "String", }, }, }; const requestId = { parameterPath: ["options", "requestId"], mapper: { serializedName: "x-ms-client-request-id", xmlName: "x-ms-client-request-id", type: { name: "String", }, }, }; const accept1 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String", }, }, }; const comp1 = { parameterPath: "comp", mapper: { defaultValue: "stats", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const comp2 = { parameterPath: "comp", mapper: { defaultValue: "list", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const prefix = { parameterPath: ["options", "prefix"], mapper: { serializedName: "prefix", xmlName: "prefix", type: { name: "String", }, }, }; const marker = { parameterPath: ["options", "marker"], mapper: { serializedName: "marker", xmlName: "marker", type: { name: "String", }, }, }; const maxPageSize = { parameterPath: ["options", "maxPageSize"], mapper: { constraints: { InclusiveMinimum: 1, }, serializedName: "maxresults", xmlName: "maxresults", type: { name: "Number", }, }, }; const include = { parameterPath: ["options", "include"], mapper: { serializedName: "include", xmlName: "include", xmlElementName: "ListContainersIncludeType", type: { name: "Sequence", element: { type: { name: "Enum", allowedValues: ["metadata", "deleted", "system"], }, }, }, }, collectionFormat: "CSV", }; const keyInfo = { parameterPath: "keyInfo", mapper: KeyInfo, }; const comp3 = { parameterPath: "comp", mapper: { defaultValue: "userdelegationkey", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const restype1 = { parameterPath: "restype", mapper: { defaultValue: "account", isConstant: true, serializedName: "restype", type: { name: "String", }, }, }; const body = { parameterPath: "body", mapper: { serializedName: "body", required: true, xmlName: "body", type: { name: "Stream", }, }, }; const comp4 = { parameterPath: "comp", mapper: { defaultValue: "batch", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const contentLength = { parameterPath: "contentLength", mapper: { serializedName: "Content-Length", required: true, xmlName: "Content-Length", type: { name: "Number", }, }, }; const multipartContentType = { parameterPath: "multipartContentType", mapper: { serializedName: "Content-Type", required: true, xmlName: "Content-Type", type: { name: "String", }, }, }; const comp5 = { parameterPath: "comp", mapper: { defaultValue: "blobs", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const where = { parameterPath: ["options", "where"], mapper: { serializedName: "where", xmlName: "where", type: { name: "String", }, }, }; const restype2 = { parameterPath: "restype", mapper: { defaultValue: "container", isConstant: true, serializedName: "restype", type: { name: "String", }, }, }; const metadata = { parameterPath: ["options", "metadata"], mapper: { serializedName: "x-ms-meta", xmlName: "x-ms-meta", headerCollectionPrefix: "x-ms-meta-", type: { name: "Dictionary", value: { type: { name: "String" } }, }, }, }; const access = { parameterPath: ["options", "access"], mapper: { serializedName: "x-ms-blob-public-access", xmlName: "x-ms-blob-public-access", type: { name: "Enum", allowedValues: ["container", "blob"], }, }, }; const defaultEncryptionScope = { parameterPath: [ "options", "containerEncryptionScope", "defaultEncryptionScope", ], mapper: { serializedName: "x-ms-default-encryption-scope", xmlName: "x-ms-default-encryption-scope", type: { name: "String", }, }, }; const preventEncryptionScopeOverride = { parameterPath: [ "options", "containerEncryptionScope", "preventEncryptionScopeOverride", ], mapper: { serializedName: "x-ms-deny-encryption-scope-override", xmlName: "x-ms-deny-encryption-scope-override", type: { name: "Boolean", }, }, }; const leaseId = { parameterPath: ["options", "leaseAccessConditions", "leaseId"], mapper: { serializedName: "x-ms-lease-id", xmlName: "x-ms-lease-id", type: { name: "String", }, }, }; const ifModifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifModifiedSince"], mapper: { serializedName: "If-Modified-Since", xmlName: "If-Modified-Since", type: { name: "DateTimeRfc1123", }, }, }; const ifUnmodifiedSince = { parameterPath: ["options", "modifiedAccessConditions", "ifUnmodifiedSince"], mapper: { serializedName: "If-Unmodified-Since", xmlName: "If-Unmodified-Since", type: { name: "DateTimeRfc1123", }, }, }; const comp6 = { parameterPath: "comp", mapper: { defaultValue: "metadata", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const comp7 = { parameterPath: "comp", mapper: { defaultValue: "acl", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const containerAcl = { parameterPath: ["options", "containerAcl"], mapper: { serializedName: "containerAcl", xmlName: "SignedIdentifiers", xmlIsWrapped: true, xmlElementName: "SignedIdentifier", type: { name: "Sequence", element: { type: { name: "Composite", className: "SignedIdentifier", }, }, }, }, }; const comp8 = { parameterPath: "comp", mapper: { defaultValue: "undelete", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const deletedContainerName = { parameterPath: ["options", "deletedContainerName"], mapper: { serializedName: "x-ms-deleted-container-name", xmlName: "x-ms-deleted-container-name", type: { name: "String", }, }, }; const deletedContainerVersion = { parameterPath: ["options", "deletedContainerVersion"], mapper: { serializedName: "x-ms-deleted-container-version", xmlName: "x-ms-deleted-container-version", type: { name: "String", }, }, }; const comp9 = { parameterPath: "comp", mapper: { defaultValue: "rename", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const sourceContainerName = { parameterPath: "sourceContainerName", mapper: { serializedName: "x-ms-source-container-name", required: true, xmlName: "x-ms-source-container-name", type: { name: "String", }, }, }; const sourceLeaseId = { parameterPath: ["options", "sourceLeaseId"], mapper: { serializedName: "x-ms-source-lease-id", xmlName: "x-ms-source-lease-id", type: { name: "String", }, }, }; const comp10 = { parameterPath: "comp", mapper: { defaultValue: "lease", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const action = { parameterPath: "action", mapper: { defaultValue: "acquire", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String", }, }, }; const duration = { parameterPath: ["options", "duration"], mapper: { serializedName: "x-ms-lease-duration", xmlName: "x-ms-lease-duration", type: { name: "Number", }, }, }; const proposedLeaseId = { parameterPath: ["options", "proposedLeaseId"], mapper: { serializedName: "x-ms-proposed-lease-id", xmlName: "x-ms-proposed-lease-id", type: { name: "String", }, }, }; const action1 = { parameterPath: "action", mapper: { defaultValue: "release", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String", }, }, }; const leaseId1 = { parameterPath: "leaseId", mapper: { serializedName: "x-ms-lease-id", required: true, xmlName: "x-ms-lease-id", type: { name: "String", }, }, }; const action2 = { parameterPath: "action", mapper: { defaultValue: "renew", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String", }, }, }; const action3 = { parameterPath: "action", mapper: { defaultValue: "break", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String", }, }, }; const breakPeriod = { parameterPath: ["options", "breakPeriod"], mapper: { serializedName: "x-ms-lease-break-period", xmlName: "x-ms-lease-break-period", type: { name: "Number", }, }, }; const action4 = { parameterPath: "action", mapper: { defaultValue: "change", isConstant: true, serializedName: "x-ms-lease-action", type: { name: "String", }, }, }; const proposedLeaseId1 = { parameterPath: "proposedLeaseId", mapper: { serializedName: "x-ms-proposed-lease-id", required: true, xmlName: "x-ms-proposed-lease-id", type: { name: "String", }, }, }; const include1 = { parameterPath: ["options", "include"], mapper: { serializedName: "include", xmlName: "include", xmlElementName: "ListBlobsIncludeItem", type: { name: "Sequence", element: { type: { name: "Enum", allowedValues: [ "copy", "deleted", "metadata", "snapshots", "uncommittedblobs", "versions", "tags", "immutabilitypolicy", "legalhold", "deletedwithversions", ], }, }, }, }, collectionFormat: "CSV", }; const delimiter = { parameterPath: "delimiter", mapper: { serializedName: "delimiter", required: true, xmlName: "delimiter", type: { name: "String", }, }, }; const snapshot = { parameterPath: ["options", "snapshot"], mapper: { serializedName: "snapshot", xmlName: "snapshot", type: { name: "String", }, }, }; const versionId = { parameterPath: ["options", "versionId"], mapper: { serializedName: "versionid", xmlName: "versionid", type: { name: "String", }, }, }; const range = { parameterPath: ["options", "range"], mapper: { serializedName: "x-ms-range", xmlName: "x-ms-range", type: { name: "String", }, }, }; const rangeGetContentMD5 = { parameterPath: ["options", "rangeGetContentMD5"], mapper: { serializedName: "x-ms-range-get-content-md5", xmlName: "x-ms-range-get-content-md5", type: { name: "Boolean", }, }, }; const rangeGetContentCRC64 = { parameterPath: ["options", "rangeGetContentCRC64"], mapper: { serializedName: "x-ms-range-get-content-crc64", xmlName: "x-ms-range-get-content-crc64", type: { name: "Boolean", }, }, }; const encryptionKey = { parameterPath: ["options", "cpkInfo", "encryptionKey"], mapper: { serializedName: "x-ms-encryption-key", xmlName: "x-ms-encryption-key", type: { name: "String", }, }, }; const encryptionKeySha256 = { parameterPath: ["options", "cpkInfo", "encryptionKeySha256"], mapper: { serializedName: "x-ms-encryption-key-sha256", xmlName: "x-ms-encryption-key-sha256", type: { name: "String", }, }, }; const encryptionAlgorithm = { parameterPath: ["options", "cpkInfo", "encryptionAlgorithm"], mapper: { serializedName: "x-ms-encryption-algorithm", xmlName: "x-ms-encryption-algorithm", type: { name: "String", }, }, }; const ifMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifMatch"], mapper: { serializedName: "If-Match", xmlName: "If-Match", type: { name: "String", }, }, }; const ifNoneMatch = { parameterPath: ["options", "modifiedAccessConditions", "ifNoneMatch"], mapper: { serializedName: "If-None-Match", xmlName: "If-None-Match", type: { name: "String", }, }, }; const ifTags = { parameterPath: ["options", "modifiedAccessConditions", "ifTags"], mapper: { serializedName: "x-ms-if-tags", xmlName: "x-ms-if-tags", type: { name: "String", }, }, }; const deleteSnapshots = { parameterPath: ["options", "deleteSnapshots"], mapper: { serializedName: "x-ms-delete-snapshots", xmlName: "x-ms-delete-snapshots", type: { name: "Enum", allowedValues: ["include", "only"], }, }, }; const blobDeleteType = { parameterPath: ["options", "blobDeleteType"], mapper: { serializedName: "deletetype", xmlName: "deletetype", type: { name: "String", }, }, }; const comp11 = { parameterPath: "comp", mapper: { defaultValue: "expiry", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const expiryOptions = { parameterPath: "expiryOptions", mapper: { serializedName: "x-ms-expiry-option", required: true, xmlName: "x-ms-expiry-option", type: { name: "String", }, }, }; const expiresOn = { parameterPath: ["options", "expiresOn"], mapper: { serializedName: "x-ms-expiry-time", xmlName: "x-ms-expiry-time", type: { name: "String", }, }, }; const blobCacheControl = { parameterPath: ["options", "blobHttpHeaders", "blobCacheControl"], mapper: { serializedName: "x-ms-blob-cache-control", xmlName: "x-ms-blob-cache-control", type: { name: "String", }, }, }; const blobContentType = { parameterPath: ["options", "blobHttpHeaders", "blobContentType"], mapper: { serializedName: "x-ms-blob-content-type", xmlName: "x-ms-blob-content-type", type: { name: "String", }, }, }; const blobContentMD5 = { parameterPath: ["options", "blobHttpHeaders", "blobContentMD5"], mapper: { serializedName: "x-ms-blob-content-md5", xmlName: "x-ms-blob-content-md5", type: { name: "ByteArray", }, }, }; const blobContentEncoding = { parameterPath: ["options", "blobHttpHeaders", "blobContentEncoding"], mapper: { serializedName: "x-ms-blob-content-encoding", xmlName: "x-ms-blob-content-encoding", type: { name: "String", }, }, }; const blobContentLanguage = { parameterPath: ["options", "blobHttpHeaders", "blobContentLanguage"], mapper: { serializedName: "x-ms-blob-content-language", xmlName: "x-ms-blob-content-language", type: { name: "String", }, }, }; const blobContentDisposition = { parameterPath: ["options", "blobHttpHeaders", "blobContentDisposition"], mapper: { serializedName: "x-ms-blob-content-disposition", xmlName: "x-ms-blob-content-disposition", type: { name: "String", }, }, }; const comp12 = { parameterPath: "comp", mapper: { defaultValue: "immutabilityPolicies", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const immutabilityPolicyExpiry = { parameterPath: ["options", "immutabilityPolicyExpiry"], mapper: { serializedName: "x-ms-immutability-policy-until-date", xmlName: "x-ms-immutability-policy-until-date", type: { name: "DateTimeRfc1123", }, }, }; const immutabilityPolicyMode = { parameterPath: ["options", "immutabilityPolicyMode"], mapper: { serializedName: "x-ms-immutability-policy-mode", xmlName: "x-ms-immutability-policy-mode", type: { name: "Enum", allowedValues: ["Mutable", "Unlocked", "Locked"], }, }, }; const comp13 = { parameterPath: "comp", mapper: { defaultValue: "legalhold", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const legalHold = { parameterPath: "legalHold", mapper: { serializedName: "x-ms-legal-hold", required: true, xmlName: "x-ms-legal-hold", type: { name: "Boolean", }, }, }; const encryptionScope = { parameterPath: ["options", "encryptionScope"], mapper: { serializedName: "x-ms-encryption-scope", xmlName: "x-ms-encryption-scope", type: { name: "String", }, }, }; const comp14 = { parameterPath: "comp", mapper: { defaultValue: "snapshot", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const tier = { parameterPath: ["options", "tier"], mapper: { serializedName: "x-ms-access-tier", xmlName: "x-ms-access-tier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold", ], }, }, }; const rehydratePriority = { parameterPath: ["options", "rehydratePriority"], mapper: { serializedName: "x-ms-rehydrate-priority", xmlName: "x-ms-rehydrate-priority", type: { name: "Enum", allowedValues: ["High", "Standard"], }, }, }; const sourceIfModifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfModifiedSince", ], mapper: { serializedName: "x-ms-source-if-modified-since", xmlName: "x-ms-source-if-modified-since", type: { name: "DateTimeRfc1123", }, }, }; const sourceIfUnmodifiedSince = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfUnmodifiedSince", ], mapper: { serializedName: "x-ms-source-if-unmodified-since", xmlName: "x-ms-source-if-unmodified-since", type: { name: "DateTimeRfc1123", }, }, }; const sourceIfMatch = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfMatch"], mapper: { serializedName: "x-ms-source-if-match", xmlName: "x-ms-source-if-match", type: { name: "String", }, }, }; const sourceIfNoneMatch = { parameterPath: [ "options", "sourceModifiedAccessConditions", "sourceIfNoneMatch", ], mapper: { serializedName: "x-ms-source-if-none-match", xmlName: "x-ms-source-if-none-match", type: { name: "String", }, }, }; const sourceIfTags = { parameterPath: ["options", "sourceModifiedAccessConditions", "sourceIfTags"], mapper: { serializedName: "x-ms-source-if-tags", xmlName: "x-ms-source-if-tags", type: { name: "String", }, }, }; const copySource = { parameterPath: "copySource", mapper: { serializedName: "x-ms-copy-source", required: true, xmlName: "x-ms-copy-source", type: { name: "String", }, }, }; const blobTagsString = { parameterPath: ["options", "blobTagsString"], mapper: { serializedName: "x-ms-tags", xmlName: "x-ms-tags", type: { name: "String", }, }, }; const sealBlob = { parameterPath: ["options", "sealBlob"], mapper: { serializedName: "x-ms-seal-blob", xmlName: "x-ms-seal-blob", type: { name: "Boolean", }, }, }; const legalHold1 = { parameterPath: ["options", "legalHold"], mapper: { serializedName: "x-ms-legal-hold", xmlName: "x-ms-legal-hold", type: { name: "Boolean", }, }, }; const xMsRequiresSync = { parameterPath: "xMsRequiresSync", mapper: { defaultValue: "true", isConstant: true, serializedName: "x-ms-requires-sync", type: { name: "String", }, }, }; const sourceContentMD5 = { parameterPath: ["options", "sourceContentMD5"], mapper: { serializedName: "x-ms-source-content-md5", xmlName: "x-ms-source-content-md5", type: { name: "ByteArray", }, }, }; const copySourceAuthorization = { parameterPath: ["options", "copySourceAuthorization"], mapper: { serializedName: "x-ms-copy-source-authorization", xmlName: "x-ms-copy-source-authorization", type: { name: "String", }, }, }; const copySourceTags = { parameterPath: ["options", "copySourceTags"], mapper: { serializedName: "x-ms-copy-source-tag-option", xmlName: "x-ms-copy-source-tag-option", type: { name: "Enum", allowedValues: ["REPLACE", "COPY"], }, }, }; const comp15 = { parameterPath: "comp", mapper: { defaultValue: "copy", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const copyActionAbortConstant = { parameterPath: "copyActionAbortConstant", mapper: { defaultValue: "abort", isConstant: true, serializedName: "x-ms-copy-action", type: { name: "String", }, }, }; const copyId = { parameterPath: "copyId", mapper: { serializedName: "copyid", required: true, xmlName: "copyid", type: { name: "String", }, }, }; const comp16 = { parameterPath: "comp", mapper: { defaultValue: "tier", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const tier1 = { parameterPath: "tier", mapper: { serializedName: "x-ms-access-tier", required: true, xmlName: "x-ms-access-tier", type: { name: "Enum", allowedValues: [ "P4", "P6", "P10", "P15", "P20", "P30", "P40", "P50", "P60", "P70", "P80", "Hot", "Cool", "Archive", "Cold", ], }, }, }; const queryRequest = { parameterPath: ["options", "queryRequest"], mapper: QueryRequest, }; const comp17 = { parameterPath: "comp", mapper: { defaultValue: "query", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const comp18 = { parameterPath: "comp", mapper: { defaultValue: "tags", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const tags = { parameterPath: ["options", "tags"], mapper: BlobTags, }; const transactionalContentMD5 = { parameterPath: ["options", "transactionalContentMD5"], mapper: { serializedName: "Content-MD5", xmlName: "Content-MD5", type: { name: "ByteArray", }, }, }; const transactionalContentCrc64 = { parameterPath: ["options", "transactionalContentCrc64"], mapper: { serializedName: "x-ms-content-crc64", xmlName: "x-ms-content-crc64", type: { name: "ByteArray", }, }, }; const blobType = { parameterPath: "blobType", mapper: { defaultValue: "PageBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String", }, }, }; const blobContentLength = { parameterPath: "blobContentLength", mapper: { serializedName: "x-ms-blob-content-length", required: true, xmlName: "x-ms-blob-content-length", type: { name: "Number", }, }, }; const blobSequenceNumber = { parameterPath: ["options", "blobSequenceNumber"], mapper: { defaultValue: 0, serializedName: "x-ms-blob-sequence-number", xmlName: "x-ms-blob-sequence-number", type: { name: "Number", }, }, }; const contentType1 = { parameterPath: ["options", "contentType"], mapper: { defaultValue: "application/octet-stream", isConstant: true, serializedName: "Content-Type", type: { name: "String", }, }, }; const body1 = { parameterPath: "body", mapper: { serializedName: "body", required: true, xmlName: "body", type: { name: "Stream", }, }, }; const accept2 = { parameterPath: "accept", mapper: { defaultValue: "application/xml", isConstant: true, serializedName: "Accept", type: { name: "String", }, }, }; const comp19 = { parameterPath: "comp", mapper: { defaultValue: "page", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const pageWrite = { parameterPath: "pageWrite", mapper: { defaultValue: "update", isConstant: true, serializedName: "x-ms-page-write", type: { name: "String", }, }, }; const ifSequenceNumberLessThanOrEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberLessThanOrEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-le", xmlName: "x-ms-if-sequence-number-le", type: { name: "Number", }, }, }; const ifSequenceNumberLessThan = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberLessThan", ], mapper: { serializedName: "x-ms-if-sequence-number-lt", xmlName: "x-ms-if-sequence-number-lt", type: { name: "Number", }, }, }; const ifSequenceNumberEqualTo = { parameterPath: [ "options", "sequenceNumberAccessConditions", "ifSequenceNumberEqualTo", ], mapper: { serializedName: "x-ms-if-sequence-number-eq", xmlName: "x-ms-if-sequence-number-eq", type: { name: "Number", }, }, }; const pageWrite1 = { parameterPath: "pageWrite", mapper: { defaultValue: "clear", isConstant: true, serializedName: "x-ms-page-write", type: { name: "String", }, }, }; const sourceUrl = { parameterPath: "sourceUrl", mapper: { serializedName: "x-ms-copy-source", required: true, xmlName: "x-ms-copy-source", type: { name: "String", }, }, }; const sourceRange = { parameterPath: "sourceRange", mapper: { serializedName: "x-ms-source-range", required: true, xmlName: "x-ms-source-range", type: { name: "String", }, }, }; const sourceContentCrc64 = { parameterPath: ["options", "sourceContentCrc64"], mapper: { serializedName: "x-ms-source-content-crc64", xmlName: "x-ms-source-content-crc64", type: { name: "ByteArray", }, }, }; const range1 = { parameterPath: "range", mapper: { serializedName: "x-ms-range", required: true, xmlName: "x-ms-range", type: { name: "String", }, }, }; const comp20 = { parameterPath: "comp", mapper: { defaultValue: "pagelist", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const prevsnapshot = { parameterPath: ["options", "prevsnapshot"], mapper: { serializedName: "prevsnapshot", xmlName: "prevsnapshot", type: { name: "String", }, }, }; const prevSnapshotUrl = { parameterPath: ["options", "prevSnapshotUrl"], mapper: { serializedName: "x-ms-previous-snapshot-url", xmlName: "x-ms-previous-snapshot-url", type: { name: "String", }, }, }; const sequenceNumberAction = { parameterPath: "sequenceNumberAction", mapper: { serializedName: "x-ms-sequence-number-action", required: true, xmlName: "x-ms-sequence-number-action", type: { name: "Enum", allowedValues: ["max", "update", "increment"], }, }, }; const comp21 = { parameterPath: "comp", mapper: { defaultValue: "incrementalcopy", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const blobType1 = { parameterPath: "blobType", mapper: { defaultValue: "AppendBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String", }, }, }; const comp22 = { parameterPath: "comp", mapper: { defaultValue: "appendblock", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const maxSize = { parameterPath: ["options", "appendPositionAccessConditions", "maxSize"], mapper: { serializedName: "x-ms-blob-condition-maxsize", xmlName: "x-ms-blob-condition-maxsize", type: { name: "Number", }, }, }; const appendPosition = { parameterPath: [ "options", "appendPositionAccessConditions", "appendPosition", ], mapper: { serializedName: "x-ms-blob-condition-appendpos", xmlName: "x-ms-blob-condition-appendpos", type: { name: "Number", }, }, }; const sourceRange1 = { parameterPath: ["options", "sourceRange"], mapper: { serializedName: "x-ms-source-range", xmlName: "x-ms-source-range", type: { name: "String", }, }, }; const comp23 = { parameterPath: "comp", mapper: { defaultValue: "seal", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const blobType2 = { parameterPath: "blobType", mapper: { defaultValue: "BlockBlob", isConstant: true, serializedName: "x-ms-blob-type", type: { name: "String", }, }, }; const copySourceBlobProperties = { parameterPath: ["options", "copySourceBlobProperties"], mapper: { serializedName: "x-ms-copy-source-blob-properties", xmlName: "x-ms-copy-source-blob-properties", type: { name: "Boolean", }, }, }; const comp24 = { parameterPath: "comp", mapper: { defaultValue: "block", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const blockId = { parameterPath: "blockId", mapper: { serializedName: "blockid", required: true, xmlName: "blockid", type: { name: "String", }, }, }; const blocks = { parameterPath: "blocks", mapper: BlockLookupList, }; const comp25 = { parameterPath: "comp", mapper: { defaultValue: "blocklist", isConstant: true, serializedName: "comp", type: { name: "String", }, }, }; const listType = { parameterPath: "listType", mapper: { defaultValue: "committed", serializedName: "blocklisttype", required: true, xmlName: "blocklisttype", type: { name: "Enum", allowedValues: ["committed", "uncommitted", "all"], }, }, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing Service operations. */ class ServiceImpl { /** * Initialize a new instance of the class Service class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * Sets properties for a storage account's Blob service endpoint, including properties for Storage * Analytics and CORS (Cross-Origin Resource Sharing) rules * @param blobServiceProperties The StorageService properties. * @param options The options parameters. */ setProperties(blobServiceProperties, options) { return this.client.sendOperationRequest({ blobServiceProperties, options }, setPropertiesOperationSpec); } /** * gets the properties of a storage account's Blob service, including properties for Storage Analytics * and CORS (Cross-Origin Resource Sharing) rules. * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$2); } /** * Retrieves statistics related to replication for the Blob service. It is only available on the * secondary location endpoint when read-access geo-redundant replication is enabled for the storage * account. * @param options The options parameters. */ getStatistics(options) { return this.client.sendOperationRequest({ options }, getStatisticsOperationSpec); } /** * The List Containers Segment operation returns a list of the containers under the specified account * @param options The options parameters. */ listContainersSegment(options) { return this.client.sendOperationRequest({ options }, listContainersSegmentOperationSpec); } /** * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * @param keyInfo Key information * @param options The options parameters. */ getUserDelegationKey(keyInfo, options) { return this.client.sendOperationRequest({ keyInfo, options }, getUserDelegationKeyOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$2); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch * boundary. Example header value: multipart/mixed; boundary=batch_ * @param body Initial data * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec$1); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags match a * given search expression. Filter blobs searches across all containers within a storage account but * can be scoped within the expression to a single container. * @param options The options parameters. */ filterBlobs(options) { return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec$1); } } // Operation Specifications const xmlSerializer$5 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const setPropertiesOperationSpec = { path: "/", httpMethod: "PUT", responses: { 202: { headersMapper: ServiceSetPropertiesHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceSetPropertiesExceptionHeaders, }, }, requestBody: blobServiceProperties, queryParameters: [ restype, comp, timeoutInSeconds, ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5, }; const getPropertiesOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: BlobServiceProperties, headersMapper: ServiceGetPropertiesHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceGetPropertiesExceptionHeaders, }, }, queryParameters: [ restype, comp, timeoutInSeconds, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$5, }; const getStatisticsOperationSpec = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: BlobServiceStatistics, headersMapper: ServiceGetStatisticsHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceGetStatisticsExceptionHeaders, }, }, queryParameters: [ restype, timeoutInSeconds, comp1, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$5, }; const listContainersSegmentOperationSpec = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: ListContainersSegmentResponse, headersMapper: ServiceListContainersSegmentHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceListContainersSegmentExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, include, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$5, }; const getUserDelegationKeyOperationSpec = { path: "/", httpMethod: "POST", responses: { 200: { bodyMapper: UserDelegationKey, headersMapper: ServiceGetUserDelegationKeyHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceGetUserDelegationKeyExceptionHeaders, }, }, requestBody: keyInfo, queryParameters: [ restype, timeoutInSeconds, comp3, ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5, }; const getAccountInfoOperationSpec$2 = { path: "/", httpMethod: "GET", responses: { 200: { headersMapper: ServiceGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceGetAccountInfoExceptionHeaders, }, }, queryParameters: [ comp, timeoutInSeconds, restype1, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$5, }; const submitBatchOperationSpec$1 = { path: "/", httpMethod: "POST", responses: { 202: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: ServiceSubmitBatchHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceSubmitBatchExceptionHeaders, }, }, requestBody: body, queryParameters: [timeoutInSeconds, comp4], urlParameters: [url], headerParameters: [ accept, version, requestId, contentLength, multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$5, }; const filterBlobsOperationSpec$1 = { path: "/", httpMethod: "GET", responses: { 200: { bodyMapper: FilterBlobSegment, headersMapper: ServiceFilterBlobsHeaders, }, default: { bodyMapper: StorageError, headersMapper: ServiceFilterBlobsExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, where, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$5, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing Container operations. */ class ContainerImpl { /** * Initialize a new instance of the class Container class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * creates a new container under the specified account. If the container with the same name already * exists, the operation fails * @param options The options parameters. */ create(options) { return this.client.sendOperationRequest({ options }, createOperationSpec$2); } /** * returns all user-defined metadata and system properties for the specified container. The data * returned does not include the container's list of blobs * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec$1); } /** * operation marks the specified container for deletion. The container and any blobs contained within * it are later deleted during garbage collection * @param options The options parameters. */ delete(options) { return this.client.sendOperationRequest({ options }, deleteOperationSpec$1); } /** * operation sets one or more user-defined name-value pairs for the specified container. * @param options The options parameters. */ setMetadata(options) { return this.client.sendOperationRequest({ options }, setMetadataOperationSpec$1); } /** * gets the permissions for the specified container. The permissions indicate whether container data * may be accessed publicly. * @param options The options parameters. */ getAccessPolicy(options) { return this.client.sendOperationRequest({ options }, getAccessPolicyOperationSpec); } /** * sets the permissions for the specified container. The permissions indicate whether blobs in a * container may be accessed publicly. * @param options The options parameters. */ setAccessPolicy(options) { return this.client.sendOperationRequest({ options }, setAccessPolicyOperationSpec); } /** * Restores a previously-deleted container. * @param options The options parameters. */ restore(options) { return this.client.sendOperationRequest({ options }, restoreOperationSpec); } /** * Renames an existing container. * @param sourceContainerName Required. Specifies the name of the container to rename. * @param options The options parameters. */ rename(sourceContainerName, options) { return this.client.sendOperationRequest({ sourceContainerName, options }, renameOperationSpec); } /** * The Batch operation allows multiple API calls to be embedded into a single HTTP request. * @param contentLength The length of the request. * @param multipartContentType Required. The value of this header must be multipart/mixed with a batch * boundary. Example header value: multipart/mixed; boundary=batch_ * @param body Initial data * @param options The options parameters. */ submitBatch(contentLength, multipartContentType, body, options) { return this.client.sendOperationRequest({ contentLength, multipartContentType, body, options }, submitBatchOperationSpec); } /** * The Filter Blobs operation enables callers to list blobs in a container whose tags match a given * search expression. Filter blobs searches within the given container. * @param options The options parameters. */ filterBlobs(options) { return this.client.sendOperationRequest({ options }, filterBlobsOperationSpec); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ acquireLease(options) { return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ releaseLease(leaseId, options) { return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ renewLease(leaseId, options) { return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param options The options parameters. */ breakLease(options) { return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec$1); } /** * [Update] establishes and manages a lock on a container for delete operations. The lock duration can * be 15 to 60 seconds, or can be infinite * @param leaseId Specifies the current lease ID on the resource. * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor * (String) for a list of valid GUID string formats. * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec$1); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param options The options parameters. */ listBlobFlatSegment(options) { return this.client.sendOperationRequest({ options }, listBlobFlatSegmentOperationSpec); } /** * [Update] The List Blobs operation returns a list of the blobs under the specified container * @param delimiter When the request includes this parameter, the operation returns a BlobPrefix * element in the response body that acts as a placeholder for all blobs whose names begin with the * same substring up to the appearance of the delimiter character. The delimiter may be a single * character or a string. * @param options The options parameters. */ listBlobHierarchySegment(delimiter, options) { return this.client.sendOperationRequest({ delimiter, options }, listBlobHierarchySegmentOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec$1); } } // Operation Specifications const xmlSerializer$4 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$2 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerCreateHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerCreateExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, access, defaultEncryptionScope, preventEncryptionScopeOverride, ], isXML: true, serializer: xmlSerializer$4, }; const getPropertiesOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { headersMapper: ContainerGetPropertiesHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerGetPropertiesExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ], isXML: true, serializer: xmlSerializer$4, }; const deleteOperationSpec$1 = { path: "/{containerName}", httpMethod: "DELETE", responses: { 202: { headersMapper: ContainerDeleteHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerDeleteExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, restype2], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ], isXML: true, serializer: xmlSerializer$4, }; const setMetadataOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerSetMetadataHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerSetMetadataExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp6, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ], isXML: true, serializer: xmlSerializer$4, }; const getAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: { type: { name: "Sequence", element: { type: { name: "Composite", className: "SignedIdentifier" }, }, }, serializedName: "SignedIdentifiers", xmlName: "SignedIdentifiers", xmlIsWrapped: true, xmlElementName: "SignedIdentifier", }, headersMapper: ContainerGetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerGetAccessPolicyExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp7, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ], isXML: true, serializer: xmlSerializer$4, }; const setAccessPolicyOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerSetAccessPolicyHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerSetAccessPolicyExceptionHeaders, }, }, requestBody: containerAcl, queryParameters: [ timeoutInSeconds, restype2, comp7, ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, access, leaseId, ifModifiedSince, ifUnmodifiedSince, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$4, }; const restoreOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerRestoreHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerRestoreExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp8, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, deletedContainerName, deletedContainerVersion, ], isXML: true, serializer: xmlSerializer$4, }; const renameOperationSpec = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerRenameHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerRenameExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp9, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, sourceContainerName, sourceLeaseId, ], isXML: true, serializer: xmlSerializer$4, }; const submitBatchOperationSpec = { path: "/{containerName}", httpMethod: "POST", responses: { 202: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: ContainerSubmitBatchHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerSubmitBatchExceptionHeaders, }, }, requestBody: body, queryParameters: [ timeoutInSeconds, comp4, restype2, ], urlParameters: [url], headerParameters: [ accept, version, requestId, contentLength, multipartContentType, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$4, }; const filterBlobsOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: FilterBlobSegment, headersMapper: ContainerFilterBlobsHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerFilterBlobsExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, comp5, where, restype2, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$4, }; const acquireLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 201: { headersMapper: ContainerAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerAcquireLeaseExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp10, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action, duration, proposedLeaseId, ], isXML: true, serializer: xmlSerializer$4, }; const releaseLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerReleaseLeaseExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp10, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action1, leaseId1, ], isXML: true, serializer: xmlSerializer$4, }; const renewLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerRenewLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerRenewLeaseExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp10, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action2, ], isXML: true, serializer: xmlSerializer$4, }; const breakLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 202: { headersMapper: ContainerBreakLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerBreakLeaseExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp10, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action3, breakPeriod, ], isXML: true, serializer: xmlSerializer$4, }; const changeLeaseOperationSpec$1 = { path: "/{containerName}", httpMethod: "PUT", responses: { 200: { headersMapper: ContainerChangeLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerChangeLeaseExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, restype2, comp10, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action4, proposedLeaseId1, ], isXML: true, serializer: xmlSerializer$4, }; const listBlobFlatSegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: ListBlobsFlatSegmentResponse, headersMapper: ContainerListBlobFlatSegmentHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerListBlobFlatSegmentExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, restype2, include1, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$4, }; const listBlobHierarchySegmentOperationSpec = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { bodyMapper: ListBlobsHierarchySegmentResponse, headersMapper: ContainerListBlobHierarchySegmentHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerListBlobHierarchySegmentExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, comp2, prefix, marker, maxPageSize, restype2, include1, delimiter, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$4, }; const getAccountInfoOperationSpec$1 = { path: "/{containerName}", httpMethod: "GET", responses: { 200: { headersMapper: ContainerGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, headersMapper: ContainerGetAccountInfoExceptionHeaders, }, }, queryParameters: [ comp, timeoutInSeconds, restype1, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$4, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing Blob operations. */ class BlobImpl { /** * Initialize a new instance of the class Blob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Download operation reads or downloads a blob from the system, including its metadata and * properties. You can also call Download to read a snapshot. * @param options The options parameters. */ download(options) { return this.client.sendOperationRequest({ options }, downloadOperationSpec); } /** * The Get Properties operation returns all user-defined metadata, standard HTTP properties, and system * properties for the blob. It does not return the content of the blob. * @param options The options parameters. */ getProperties(options) { return this.client.sendOperationRequest({ options }, getPropertiesOperationSpec); } /** * If the storage account's soft delete feature is disabled then, when a blob is deleted, it is * permanently removed from the storage account. If the storage account's soft delete feature is * enabled, then, when a blob is deleted, it is marked for deletion and becomes inaccessible * immediately. However, the blob service retains the blob or snapshot for the number of days specified * by the DeleteRetentionPolicy section of [Storage service properties] * (Set-Blob-Service-Properties.md). After the specified number of days has passed, the blob's data is * permanently removed from the storage account. Note that you continue to be charged for the * soft-deleted blob's storage until it is permanently removed. Use the List Blobs API and specify the * "include=deleted" query parameter to discover which blobs and snapshots have been soft deleted. You * can then use the Undelete Blob API to restore a soft-deleted blob. All other operations on a * soft-deleted blob or snapshot causes the service to return an HTTP status code of 404 * (ResourceNotFound). * @param options The options parameters. */ delete(options) { return this.client.sendOperationRequest({ options }, deleteOperationSpec); } /** * Undelete a blob that was previously soft deleted * @param options The options parameters. */ undelete(options) { return this.client.sendOperationRequest({ options }, undeleteOperationSpec); } /** * Sets the time a blob will expire and be deleted. * @param expiryOptions Required. Indicates mode of the expiry time * @param options The options parameters. */ setExpiry(expiryOptions, options) { return this.client.sendOperationRequest({ expiryOptions, options }, setExpiryOperationSpec); } /** * The Set HTTP Headers operation sets system properties on the blob * @param options The options parameters. */ setHttpHeaders(options) { return this.client.sendOperationRequest({ options }, setHttpHeadersOperationSpec); } /** * The Set Immutability Policy operation sets the immutability policy on the blob * @param options The options parameters. */ setImmutabilityPolicy(options) { return this.client.sendOperationRequest({ options }, setImmutabilityPolicyOperationSpec); } /** * The Delete Immutability Policy operation deletes the immutability policy on the blob * @param options The options parameters. */ deleteImmutabilityPolicy(options) { return this.client.sendOperationRequest({ options }, deleteImmutabilityPolicyOperationSpec); } /** * The Set Legal Hold operation sets a legal hold on the blob. * @param legalHold Specified if a legal hold should be set on the blob. * @param options The options parameters. */ setLegalHold(legalHold, options) { return this.client.sendOperationRequest({ legalHold, options }, setLegalHoldOperationSpec); } /** * The Set Blob Metadata operation sets user-defined metadata for the specified blob as one or more * name-value pairs * @param options The options parameters. */ setMetadata(options) { return this.client.sendOperationRequest({ options }, setMetadataOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ acquireLease(options) { return this.client.sendOperationRequest({ options }, acquireLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ releaseLease(leaseId, options) { return this.client.sendOperationRequest({ leaseId, options }, releaseLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param options The options parameters. */ renewLease(leaseId, options) { return this.client.sendOperationRequest({ leaseId, options }, renewLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param leaseId Specifies the current lease ID on the resource. * @param proposedLeaseId Proposed lease ID, in a GUID string format. The Blob service returns 400 * (Invalid request) if the proposed lease ID is not in the correct format. See Guid Constructor * (String) for a list of valid GUID string formats. * @param options The options parameters. */ changeLease(leaseId, proposedLeaseId, options) { return this.client.sendOperationRequest({ leaseId, proposedLeaseId, options }, changeLeaseOperationSpec); } /** * [Update] The Lease Blob operation establishes and manages a lock on a blob for write and delete * operations * @param options The options parameters. */ breakLease(options) { return this.client.sendOperationRequest({ options }, breakLeaseOperationSpec); } /** * The Create Snapshot operation creates a read-only snapshot of a blob * @param options The options parameters. */ createSnapshot(options) { return this.client.sendOperationRequest({ options }, createSnapshotOperationSpec); } /** * The Start Copy From URL operation copies a blob or an internet resource to a new blob. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ startCopyFromURL(copySource, options) { return this.client.sendOperationRequest({ copySource, options }, startCopyFromURLOperationSpec); } /** * The Copy From URL operation copies a blob or an internet resource to a new blob. It will not return * a response until the copy is complete. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ copyFromURL(copySource, options) { return this.client.sendOperationRequest({ copySource, options }, copyFromURLOperationSpec); } /** * The Abort Copy From URL operation aborts a pending Copy From URL operation, and leaves a destination * blob with zero length and full metadata. * @param copyId The copy identifier provided in the x-ms-copy-id header of the original Copy Blob * operation. * @param options The options parameters. */ abortCopyFromURL(copyId, options) { return this.client.sendOperationRequest({ copyId, options }, abortCopyFromURLOperationSpec); } /** * The Set Tier operation sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant storage only). A * premium page blob's tier determines the allowed size, IOPS, and bandwidth of the blob. A block * blob's tier determines Hot/Cool/Archive storage type. This operation does not update the blob's * ETag. * @param tier Indicates the tier to be set on the blob. * @param options The options parameters. */ setTier(tier, options) { return this.client.sendOperationRequest({ tier, options }, setTierOperationSpec); } /** * Returns the sku name and account kind * @param options The options parameters. */ getAccountInfo(options) { return this.client.sendOperationRequest({ options }, getAccountInfoOperationSpec); } /** * The Query operation enables users to select/project on blob data by providing simple query * expressions. * @param options The options parameters. */ query(options) { return this.client.sendOperationRequest({ options }, queryOperationSpec); } /** * The Get Tags operation enables users to get the tags associated with a blob. * @param options The options parameters. */ getTags(options) { return this.client.sendOperationRequest({ options }, getTagsOperationSpec); } /** * The Set Tags operation enables users to set tags on a blob. * @param options The options parameters. */ setTags(options) { return this.client.sendOperationRequest({ options }, setTagsOperationSpec); } } // Operation Specifications const xmlSerializer$3 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const downloadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: BlobDownloadHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: BlobDownloadHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobDownloadExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, rangeGetContentMD5, rangeGetContentCRC64, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const getPropertiesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "HEAD", responses: { 200: { headersMapper: BlobGetPropertiesHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobGetPropertiesExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const deleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 202: { headersMapper: BlobDeleteHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobDeleteExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, blobDeleteType, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, deleteSnapshots, ], isXML: true, serializer: xmlSerializer$3, }; const undeleteOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobUndeleteHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobUndeleteExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp8], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$3, }; const setExpiryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetExpiryHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetExpiryExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp11], urlParameters: [url], headerParameters: [ version, requestId, accept1, expiryOptions, expiresOn, ], isXML: true, serializer: xmlSerializer$3, }; const setHttpHeadersOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetHttpHeadersHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetHttpHeadersExceptionHeaders, }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, ], isXML: true, serializer: xmlSerializer$3, }; const setImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetImmutabilityPolicyExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp12, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifUnmodifiedSince, immutabilityPolicyExpiry, immutabilityPolicyMode, ], isXML: true, serializer: xmlSerializer$3, }; const deleteImmutabilityPolicyOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "DELETE", responses: { 200: { headersMapper: BlobDeleteImmutabilityPolicyHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobDeleteImmutabilityPolicyExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp12, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$3, }; const setLegalHoldOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetLegalHoldHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetLegalHoldExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp13, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, legalHold, ], isXML: true, serializer: xmlSerializer$3, }; const setMetadataOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetMetadataHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetMetadataExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp6], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, ], isXML: true, serializer: xmlSerializer$3, }; const acquireLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlobAcquireLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobAcquireLeaseExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action, duration, proposedLeaseId, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const releaseLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobReleaseLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobReleaseLeaseExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action1, leaseId1, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const renewLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobRenewLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobRenewLeaseExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action2, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const changeLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobChangeLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobChangeLeaseExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, leaseId1, action4, proposedLeaseId1, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const breakLeaseOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobBreakLeaseHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobBreakLeaseExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp10], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, action3, breakPeriod, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const createSnapshotOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlobCreateSnapshotHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobCreateSnapshotExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp14], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, ], isXML: true, serializer: xmlSerializer$3, }; const startCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobStartCopyFromURLHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobStartCopyFromURLExceptionHeaders, }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, tier, rehydratePriority, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceIfTags, copySource, blobTagsString, sealBlob, legalHold1, ], isXML: true, serializer: xmlSerializer$3, }; const copyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: BlobCopyFromURLHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobCopyFromURLExceptionHeaders, }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, copySource, blobTagsString, legalHold1, xMsRequiresSync, sourceContentMD5, copySourceAuthorization, copySourceTags, ], isXML: true, serializer: xmlSerializer$3, }; const abortCopyFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { headersMapper: BlobAbortCopyFromURLHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobAbortCopyFromURLExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, comp15, copyId, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, copyActionAbortConstant, ], isXML: true, serializer: xmlSerializer$3, }; const setTierOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: BlobSetTierHeaders, }, 202: { headersMapper: BlobSetTierHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetTierExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp16, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags, rehydratePriority, tier1, ], isXML: true, serializer: xmlSerializer$3, }; const getAccountInfoOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { headersMapper: BlobGetAccountInfoHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobGetAccountInfoExceptionHeaders, }, }, queryParameters: [ comp, timeoutInSeconds, restype1, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, ], isXML: true, serializer: xmlSerializer$3, }; const queryOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "POST", responses: { 200: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: BlobQueryHeaders, }, 206: { bodyMapper: { type: { name: "Stream" }, serializedName: "parsedResponse", }, headersMapper: BlobQueryHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobQueryExceptionHeaders, }, }, requestBody: queryRequest, queryParameters: [ timeoutInSeconds, snapshot, comp17, ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$3, }; const getTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: BlobTags, headersMapper: BlobGetTagsHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobGetTagsExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, versionId, comp18, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags, ], isXML: true, serializer: xmlSerializer$3, }; const setTagsOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 204: { headersMapper: BlobSetTagsHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlobSetTagsExceptionHeaders, }, }, requestBody: tags, queryParameters: [ timeoutInSeconds, versionId, comp18, ], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, leaseId, ifTags, transactionalContentMD5, transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer$3, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing PageBlob operations. */ class PageBlobImpl { /** * Initialize a new instance of the class PageBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Create operation creates a new page blob. * @param contentLength The length of the request. * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ create(contentLength, blobContentLength, options) { return this.client.sendOperationRequest({ contentLength, blobContentLength, options }, createOperationSpec$1); } /** * The Upload Pages operation writes a range of pages to a page blob * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ uploadPages(contentLength, body, options) { return this.client.sendOperationRequest({ contentLength, body, options }, uploadPagesOperationSpec); } /** * The Clear Pages operation clears a set of pages from a page blob * @param contentLength The length of the request. * @param options The options parameters. */ clearPages(contentLength, options) { return this.client.sendOperationRequest({ contentLength, options }, clearPagesOperationSpec); } /** * The Upload Pages operation writes a range of pages to a page blob where the contents are read from a * URL * @param sourceUrl Specify a URL to the copy source. * @param sourceRange Bytes of source data in the specified range. The length of this range should * match the ContentLength header and x-ms-range/Range destination range header. * @param contentLength The length of the request. * @param range The range of bytes to which the source range would be written. The range should be 512 * aligned and range-end is required. * @param options The options parameters. */ uploadPagesFromURL(sourceUrl, sourceRange, contentLength, range, options) { return this.client.sendOperationRequest({ sourceUrl, sourceRange, contentLength, range, options }, uploadPagesFromURLOperationSpec); } /** * The Get Page Ranges operation returns the list of valid page ranges for a page blob or snapshot of a * page blob * @param options The options parameters. */ getPageRanges(options) { return this.client.sendOperationRequest({ options }, getPageRangesOperationSpec); } /** * The Get Page Ranges Diff operation returns the list of valid page ranges for a page blob that were * changed between target blob and previous snapshot. * @param options The options parameters. */ getPageRangesDiff(options) { return this.client.sendOperationRequest({ options }, getPageRangesDiffOperationSpec); } /** * Resize the Blob * @param blobContentLength This header specifies the maximum size for the page blob, up to 1 TB. The * page blob size must be aligned to a 512-byte boundary. * @param options The options parameters. */ resize(blobContentLength, options) { return this.client.sendOperationRequest({ blobContentLength, options }, resizeOperationSpec); } /** * Update the sequence number of the blob * @param sequenceNumberAction Required if the x-ms-blob-sequence-number header is set for the request. * This property applies to page blobs only. This property indicates how the service should modify the * blob's sequence number * @param options The options parameters. */ updateSequenceNumber(sequenceNumberAction, options) { return this.client.sendOperationRequest({ sequenceNumberAction, options }, updateSequenceNumberOperationSpec); } /** * The Copy Incremental operation copies a snapshot of the source page blob to a destination page blob. * The snapshot is copied such that only the differential changes between the previously copied * snapshot are transferred to the destination. The copied snapshots are complete copies of the * original snapshot and can be read or copied from as usual. This API is supported since REST version * 2016-05-31. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ copyIncremental(copySource, options) { return this.client.sendOperationRequest({ copySource, options }, copyIncrementalOperationSpec); } } // Operation Specifications const xmlSerializer$2 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec$1 = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobCreateHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobCreateExceptionHeaders, }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, blobType, blobContentLength, blobSequenceNumber, ], isXML: true, serializer: xmlSerializer$2, }; const uploadPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobUploadPagesHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobUploadPagesExceptionHeaders, }, }, requestBody: body1, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, range, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, pageWrite, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer$2, }; const clearPagesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobClearPagesHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobClearPagesExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, range, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, pageWrite1, ], isXML: true, serializer: xmlSerializer$2, }; const uploadPagesFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: PageBlobUploadPagesFromURLHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobUploadPagesFromURLExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp19], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, pageWrite, ifSequenceNumberLessThanOrEqualTo, ifSequenceNumberLessThan, ifSequenceNumberEqualTo, sourceUrl, sourceRange, sourceContentCrc64, range1, ], isXML: true, serializer: xmlSerializer$2, }; const getPageRangesOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: PageList, headersMapper: PageBlobGetPageRangesHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobGetPageRangesExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, snapshot, comp20, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, ifMatch, ifNoneMatch, ifTags, ], isXML: true, serializer: xmlSerializer$2, }; const getPageRangesDiffOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: PageList, headersMapper: PageBlobGetPageRangesDiffHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobGetPageRangesDiffExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, marker, maxPageSize, snapshot, comp20, prevsnapshot, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, range, ifMatch, ifNoneMatch, ifTags, prevSnapshotUrl, ], isXML: true, serializer: xmlSerializer$2, }; const resizeOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: PageBlobResizeHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobResizeExceptionHeaders, }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, blobContentLength, ], isXML: true, serializer: xmlSerializer$2, }; const updateSequenceNumberOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: PageBlobUpdateSequenceNumberHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobUpdateSequenceNumberExceptionHeaders, }, }, queryParameters: [comp, timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, blobSequenceNumber, sequenceNumberAction, ], isXML: true, serializer: xmlSerializer$2, }; const copyIncrementalOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 202: { headersMapper: PageBlobCopyIncrementalHeaders, }, default: { bodyMapper: StorageError, headersMapper: PageBlobCopyIncrementalExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp21], urlParameters: [url], headerParameters: [ version, requestId, accept1, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, ifTags, copySource, ], isXML: true, serializer: xmlSerializer$2, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing AppendBlob operations. */ class AppendBlobImpl { /** * Initialize a new instance of the class AppendBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Create Append Blob operation creates a new append blob. * @param contentLength The length of the request. * @param options The options parameters. */ create(contentLength, options) { return this.client.sendOperationRequest({ contentLength, options }, createOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob. The * Append Block operation is permitted only if the blob was created with x-ms-blob-type set to * AppendBlob. Append Block is supported only on version 2015-02-21 version or later. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ appendBlock(contentLength, body, options) { return this.client.sendOperationRequest({ contentLength, body, options }, appendBlockOperationSpec); } /** * The Append Block operation commits a new block of data to the end of an existing append blob where * the contents are read from a source url. The Append Block operation is permitted only if the blob * was created with x-ms-blob-type set to AppendBlob. Append Block is supported only on version * 2015-02-21 version or later. * @param sourceUrl Specify a URL to the copy source. * @param contentLength The length of the request. * @param options The options parameters. */ appendBlockFromUrl(sourceUrl, contentLength, options) { return this.client.sendOperationRequest({ sourceUrl, contentLength, options }, appendBlockFromUrlOperationSpec); } /** * The Seal operation seals the Append Blob to make it read-only. Seal is supported only on version * 2019-12-12 version or later. * @param options The options parameters. */ seal(options) { return this.client.sendOperationRequest({ options }, sealOperationSpec); } } // Operation Specifications const xmlSerializer$1 = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const createOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobCreateHeaders, }, default: { bodyMapper: StorageError, headersMapper: AppendBlobCreateExceptionHeaders, }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, blobTagsString, legalHold1, blobType1, ], isXML: true, serializer: xmlSerializer$1, }; const appendBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobAppendBlockHeaders, }, default: { bodyMapper: StorageError, headersMapper: AppendBlobAppendBlockExceptionHeaders, }, }, requestBody: body1, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, maxSize, appendPosition, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer$1, }; const appendBlockFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: AppendBlobAppendBlockFromUrlHeaders, }, default: { bodyMapper: StorageError, headersMapper: AppendBlobAppendBlockFromUrlExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp22], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, transactionalContentMD5, sourceUrl, sourceContentCrc64, maxSize, appendPosition, sourceRange1, ], isXML: true, serializer: xmlSerializer$1, }; const sealOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 200: { headersMapper: AppendBlobSealHeaders, }, default: { bodyMapper: StorageError, headersMapper: AppendBlobSealExceptionHeaders, }, }, queryParameters: [timeoutInSeconds, comp23], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifModifiedSince, ifUnmodifiedSince, ifMatch, ifNoneMatch, appendPosition, ], isXML: true, serializer: xmlSerializer$1, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ /** Class containing BlockBlob operations. */ class BlockBlobImpl { /** * Initialize a new instance of the class BlockBlob class. * @param client Reference to the service client */ constructor(client) { this.client = client; } /** * The Upload Block Blob operation updates the content of an existing block blob. Updating an existing * block blob overwrites any existing metadata on the blob. Partial updates are not supported with Put * Blob; the content of the existing blob is overwritten with the content of the new blob. To perform a * partial update of the content of a block blob, use the Put Block List operation. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ upload(contentLength, body, options) { return this.client.sendOperationRequest({ contentLength, body, options }, uploadOperationSpec); } /** * The Put Blob from URL operation creates a new Block Blob where the contents of the blob are read * from a given URL. This API is supported beginning with the 2020-04-08 version. Partial updates are * not supported with Put Blob from URL; the content of an existing blob is overwritten with the * content of the new blob. To perform partial updates to a block blob’s contents using a source URL, * use the Put Block from URL API in conjunction with Put Block List. * @param contentLength The length of the request. * @param copySource Specifies the name of the source page blob snapshot. This value is a URL of up to * 2 KB in length that specifies a page blob snapshot. The value should be URL-encoded as it would * appear in a request URI. The source blob must either be public or must be authenticated via a shared * access signature. * @param options The options parameters. */ putBlobFromUrl(contentLength, copySource, options) { return this.client.sendOperationRequest({ contentLength, copySource, options }, putBlobFromUrlOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified * for the blockid parameter must be the same size for each block. * @param contentLength The length of the request. * @param body Initial data * @param options The options parameters. */ stageBlock(blockId, contentLength, body, options) { return this.client.sendOperationRequest({ blockId, contentLength, body, options }, stageBlockOperationSpec); } /** * The Stage Block operation creates a new block to be committed as part of a blob where the contents * are read from a URL. * @param blockId A valid Base64 string value that identifies the block. Prior to encoding, the string * must be less than or equal to 64 bytes in size. For a given blob, the length of the value specified * for the blockid parameter must be the same size for each block. * @param contentLength The length of the request. * @param sourceUrl Specify a URL to the copy source. * @param options The options parameters. */ stageBlockFromURL(blockId, contentLength, sourceUrl, options) { return this.client.sendOperationRequest({ blockId, contentLength, sourceUrl, options }, stageBlockFromURLOperationSpec); } /** * The Commit Block List operation writes a blob by specifying the list of block IDs that make up the * blob. In order to be written as part of a blob, a block must have been successfully written to the * server in a prior Put Block operation. You can call Put Block List to update a blob by uploading * only those blocks that have changed, then committing the new and existing blocks together. You can * do this by specifying whether to commit a block from the committed block list or from the * uncommitted block list, or to commit the most recently uploaded version of the block, whichever list * it may belong to. * @param blocks Blob Blocks. * @param options The options parameters. */ commitBlockList(blocks, options) { return this.client.sendOperationRequest({ blocks, options }, commitBlockListOperationSpec); } /** * The Get Block List operation retrieves the list of blocks that have been uploaded as part of a block * blob * @param listType Specifies whether to return the list of committed blocks, the list of uncommitted * blocks, or both lists together. * @param options The options parameters. */ getBlockList(listType, options) { return this.client.sendOperationRequest({ listType, options }, getBlockListOperationSpec); } } // Operation Specifications const xmlSerializer = coreClient__namespace.createSerializer(Mappers, /* isXml */ true); const uploadOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobUploadHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobUploadExceptionHeaders, }, }, requestBody: body1, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, blobType2, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer, }; const putBlobFromUrlOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobPutBlobFromUrlHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobPutBlobFromUrlExceptionHeaders, }, }, queryParameters: [timeoutInSeconds], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, encryptionScope, tier, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceIfTags, copySource, blobTagsString, sourceContentMD5, copySourceAuthorization, copySourceTags, transactionalContentMD5, blobType2, copySourceBlobProperties, ], isXML: true, serializer: xmlSerializer, }; const stageBlockOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobStageBlockHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobStageBlockExceptionHeaders, }, }, requestBody: body1, queryParameters: [ timeoutInSeconds, comp24, blockId, ], urlParameters: [url], headerParameters: [ version, requestId, contentLength, leaseId, encryptionKey, encryptionKeySha256, encryptionAlgorithm, encryptionScope, transactionalContentMD5, transactionalContentCrc64, contentType1, accept2, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "binary", serializer: xmlSerializer, }; const stageBlockFromURLOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobStageBlockFromURLHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobStageBlockFromURLExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, comp24, blockId, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, contentLength, leaseId, encryptionKey, encryptionKeySha256, encryptionAlgorithm, encryptionScope, sourceIfModifiedSince, sourceIfUnmodifiedSince, sourceIfMatch, sourceIfNoneMatch, sourceContentMD5, copySourceAuthorization, sourceUrl, sourceContentCrc64, sourceRange1, ], isXML: true, serializer: xmlSerializer, }; const commitBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "PUT", responses: { 201: { headersMapper: BlockBlobCommitBlockListHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobCommitBlockListExceptionHeaders, }, }, requestBody: blocks, queryParameters: [timeoutInSeconds, comp25], urlParameters: [url], headerParameters: [ contentType, accept, version, requestId, metadata, leaseId, ifModifiedSince, ifUnmodifiedSince, encryptionKey, encryptionKeySha256, encryptionAlgorithm, ifMatch, ifNoneMatch, ifTags, blobCacheControl, blobContentType, blobContentMD5, blobContentEncoding, blobContentLanguage, blobContentDisposition, immutabilityPolicyExpiry, immutabilityPolicyMode, encryptionScope, tier, blobTagsString, legalHold1, transactionalContentMD5, transactionalContentCrc64, ], isXML: true, contentType: "application/xml; charset=utf-8", mediaType: "xml", serializer: xmlSerializer, }; const getBlockListOperationSpec = { path: "/{containerName}/{blob}", httpMethod: "GET", responses: { 200: { bodyMapper: BlockList, headersMapper: BlockBlobGetBlockListHeaders, }, default: { bodyMapper: StorageError, headersMapper: BlockBlobGetBlockListExceptionHeaders, }, }, queryParameters: [ timeoutInSeconds, snapshot, comp25, listType, ], urlParameters: [url], headerParameters: [ version, requestId, accept1, leaseId, ifTags, ], isXML: true, serializer: xmlSerializer, }; /* * Copyright (c) Microsoft Corporation. * Licensed under the MIT License. * * Code generated by Microsoft (R) AutoRest Code Generator. * Changes may cause incorrect behavior and will be lost if the code is regenerated. */ let StorageClient$1 = class StorageClient extends coreHttpCompat__namespace.ExtendedServiceClient { /** * Initializes a new instance of the StorageClient class. * @param url The URL of the service account, container, or blob that is the target of the desired * operation. * @param options The parameter options */ constructor(url, options) { var _a, _b; if (url === undefined) { throw new Error("'url' cannot be null"); } // Initializing default values for options if (!options) { options = {}; } const defaults = { requestContentType: "application/json; charset=utf-8", }; const packageDetails = `azsdk-js-azure-storage-blob/12.26.0`; const userAgentPrefix = options.userAgentOptions && options.userAgentOptions.userAgentPrefix ? `${options.userAgentOptions.userAgentPrefix} ${packageDetails}` : `${packageDetails}`; const optionsWithDefaults = Object.assign(Object.assign(Object.assign({}, defaults), options), { userAgentOptions: { userAgentPrefix, }, endpoint: (_b = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri) !== null && _b !== void 0 ? _b : "{url}" }); super(optionsWithDefaults); // Parameter assignments this.url = url; // Assigning values to Constant parameters this.version = options.version || "2025-01-05"; this.service = new ServiceImpl(this); this.container = new ContainerImpl(this); this.blob = new BlobImpl(this); this.pageBlob = new PageBlobImpl(this); this.appendBlob = new AppendBlobImpl(this); this.blockBlob = new BlockBlobImpl(this); } }; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * @internal */ class StorageContextClient extends StorageClient$1 { async sendOperationRequest(operationArguments, operationSpec) { const operationSpecToSend = Object.assign({}, operationSpec); if (operationSpecToSend.path === "/{containerName}" || operationSpecToSend.path === "/{containerName}/{blob}") { operationSpecToSend.path = ""; } return super.sendOperationRequest(operationArguments, operationSpecToSend); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A StorageClient represents a based URL class for {@link BlobServiceClient}, {@link ContainerClient} * and etc. */ class StorageClient { /** * Creates an instance of StorageClient. * @param url - url to resource * @param pipeline - request policy pipeline. */ constructor(url, pipeline) { // URL should be encoded and only once, protocol layer shouldn't encode URL again this.url = escapeURLPath(url); this.accountName = getAccountNameFromUrl(url); this.pipeline = pipeline; this.storageClientContext = new StorageContextClient(this.url, getCoreClientOptions(pipeline)); this.isHttps = iEqual(getURLScheme(this.url) || "", "https"); this.credential = getCredentialFromPipeline(pipeline); // Override protocol layer's default content-type const storageClientContext = this.storageClientContext; storageClientContext.requestContentType = undefined; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Creates a span using the global tracer. * @internal */ const tracingClient = coreTracing.createTracingClient({ packageName: "@azure/storage-blob", packageVersion: SDK_VERSION, namespace: "Microsoft.Storage", }); // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a blob. Setting * a value to true means that any SAS which uses these permissions will grant permissions for that operation. Once all * the values are set, this should be serialized with toString and set as the permissions field on a * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ class BlobSASPermissions { constructor() { /** * Specifies Read access granted. */ this.read = false; /** * Specifies Add access granted. */ this.add = false; /** * Specifies Create access granted. */ this.create = false; /** * Specifies Write access granted. */ this.write = false; /** * Specifies Delete access granted. */ this.delete = false; /** * Specifies Delete version access granted. */ this.deleteVersion = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Specifies Move access granted. */ this.move = false; /** * Specifies Execute access granted. */ this.execute = false; /** * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; } /** * Creates a {@link BlobSASPermissions} from the specified permissions string. This method will throw an * Error if it encounters a character that does not correspond to a valid permission. * * @param permissions - */ static parse(permissions) { const blobSASPermissions = new BlobSASPermissions(); for (const char of permissions) { switch (char) { case "r": blobSASPermissions.read = true; break; case "a": blobSASPermissions.add = true; break; case "c": blobSASPermissions.create = true; break; case "w": blobSASPermissions.write = true; break; case "d": blobSASPermissions.delete = true; break; case "x": blobSASPermissions.deleteVersion = true; break; case "t": blobSASPermissions.tag = true; break; case "m": blobSASPermissions.move = true; break; case "e": blobSASPermissions.execute = true; break; case "i": blobSASPermissions.setImmutabilityPolicy = true; break; case "y": blobSASPermissions.permanentDelete = true; break; default: throw new RangeError(`Invalid permission: ${char}`); } } return blobSASPermissions; } /** * Creates a {@link BlobSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const blobSASPermissions = new BlobSASPermissions(); if (permissionLike.read) { blobSASPermissions.read = true; } if (permissionLike.add) { blobSASPermissions.add = true; } if (permissionLike.create) { blobSASPermissions.create = true; } if (permissionLike.write) { blobSASPermissions.write = true; } if (permissionLike.delete) { blobSASPermissions.delete = true; } if (permissionLike.deleteVersion) { blobSASPermissions.deleteVersion = true; } if (permissionLike.tag) { blobSASPermissions.tag = true; } if (permissionLike.move) { blobSASPermissions.move = true; } if (permissionLike.execute) { blobSASPermissions.execute = true; } if (permissionLike.setImmutabilityPolicy) { blobSASPermissions.setImmutabilityPolicy = true; } if (permissionLike.permanentDelete) { blobSASPermissions.permanentDelete = true; } return blobSASPermissions; } /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * @returns A string which represents the BlobSASPermissions */ toString() { const permissions = []; if (this.read) { permissions.push("r"); } if (this.add) { permissions.push("a"); } if (this.create) { permissions.push("c"); } if (this.write) { permissions.push("w"); } if (this.delete) { permissions.push("d"); } if (this.deleteVersion) { permissions.push("x"); } if (this.tag) { permissions.push("t"); } if (this.move) { permissions.push("m"); } if (this.execute) { permissions.push("e"); } if (this.setImmutabilityPolicy) { permissions.push("i"); } if (this.permanentDelete) { permissions.push("y"); } return permissions.join(""); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * This is a helper class to construct a string representing the permissions granted by a ServiceSAS to a container. * Setting a value to true means that any SAS which uses these permissions will grant permissions for that operation. * Once all the values are set, this should be serialized with toString and set as the permissions field on a * {@link BlobSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ class ContainerSASPermissions { constructor() { /** * Specifies Read access granted. */ this.read = false; /** * Specifies Add access granted. */ this.add = false; /** * Specifies Create access granted. */ this.create = false; /** * Specifies Write access granted. */ this.write = false; /** * Specifies Delete access granted. */ this.delete = false; /** * Specifies Delete version access granted. */ this.deleteVersion = false; /** * Specifies List access granted. */ this.list = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Specifies Move access granted. */ this.move = false; /** * Specifies Execute access granted. */ this.execute = false; /** * Specifies SetImmutabilityPolicy access granted. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; /** * Specifies that Filter Blobs by Tags is permitted. */ this.filterByTags = false; } /** * Creates an {@link ContainerSASPermissions} from the specified permissions string. This method will throw an * Error if it encounters a character that does not correspond to a valid permission. * * @param permissions - */ static parse(permissions) { const containerSASPermissions = new ContainerSASPermissions(); for (const char of permissions) { switch (char) { case "r": containerSASPermissions.read = true; break; case "a": containerSASPermissions.add = true; break; case "c": containerSASPermissions.create = true; break; case "w": containerSASPermissions.write = true; break; case "d": containerSASPermissions.delete = true; break; case "l": containerSASPermissions.list = true; break; case "t": containerSASPermissions.tag = true; break; case "x": containerSASPermissions.deleteVersion = true; break; case "m": containerSASPermissions.move = true; break; case "e": containerSASPermissions.execute = true; break; case "i": containerSASPermissions.setImmutabilityPolicy = true; break; case "y": containerSASPermissions.permanentDelete = true; break; case "f": containerSASPermissions.filterByTags = true; break; default: throw new RangeError(`Invalid permission ${char}`); } } return containerSASPermissions; } /** * Creates a {@link ContainerSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const containerSASPermissions = new ContainerSASPermissions(); if (permissionLike.read) { containerSASPermissions.read = true; } if (permissionLike.add) { containerSASPermissions.add = true; } if (permissionLike.create) { containerSASPermissions.create = true; } if (permissionLike.write) { containerSASPermissions.write = true; } if (permissionLike.delete) { containerSASPermissions.delete = true; } if (permissionLike.list) { containerSASPermissions.list = true; } if (permissionLike.deleteVersion) { containerSASPermissions.deleteVersion = true; } if (permissionLike.tag) { containerSASPermissions.tag = true; } if (permissionLike.move) { containerSASPermissions.move = true; } if (permissionLike.execute) { containerSASPermissions.execute = true; } if (permissionLike.setImmutabilityPolicy) { containerSASPermissions.setImmutabilityPolicy = true; } if (permissionLike.permanentDelete) { containerSASPermissions.permanentDelete = true; } if (permissionLike.filterByTags) { containerSASPermissions.filterByTags = true; } return containerSASPermissions; } /** * Converts the given permissions to a string. Using this method will guarantee the permissions are in an * order accepted by the service. * * The order of the characters should be as specified here to ensure correctness. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * */ toString() { const permissions = []; if (this.read) { permissions.push("r"); } if (this.add) { permissions.push("a"); } if (this.create) { permissions.push("c"); } if (this.write) { permissions.push("w"); } if (this.delete) { permissions.push("d"); } if (this.deleteVersion) { permissions.push("x"); } if (this.list) { permissions.push("l"); } if (this.tag) { permissions.push("t"); } if (this.move) { permissions.push("m"); } if (this.execute) { permissions.push("e"); } if (this.setImmutabilityPolicy) { permissions.push("i"); } if (this.permanentDelete) { permissions.push("y"); } if (this.filterByTags) { permissions.push("f"); } return permissions.join(""); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * UserDelegationKeyCredential is only used for generation of user delegation SAS. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas */ class UserDelegationKeyCredential { /** * Creates an instance of UserDelegationKeyCredential. * @param accountName - * @param userDelegationKey - */ constructor(accountName, userDelegationKey) { this.accountName = accountName; this.userDelegationKey = userDelegationKey; this.key = Buffer.from(userDelegationKey.value, "base64"); } /** * Generates a hash signature for an HTTP request or for a SAS. * * @param stringToSign - */ computeHMACSHA256(stringToSign) { // console.log(`stringToSign: ${JSON.stringify(stringToSign)}`); return crypto.createHmac("sha256", this.key).update(stringToSign, "utf8").digest("base64"); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Generate SasIPRange format string. For example: * * "8.8.8.8" or "1.1.1.1-255.255.255.255" * * @param ipRange - */ function ipRangeToString(ipRange) { return ipRange.end ? `${ipRange.start}-${ipRange.end}` : ipRange.start; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Protocols for generated SAS. */ exports.SASProtocol = void 0; (function (SASProtocol) { /** * Protocol that allows HTTPS only */ SASProtocol["Https"] = "https"; /** * Protocol that allows both HTTPS and HTTP */ SASProtocol["HttpsAndHttp"] = "https,http"; })(exports.SASProtocol || (exports.SASProtocol = {})); /** * Represents the components that make up an Azure Storage SAS' query parameters. This type is not constructed directly * by the user; it is only generated by the {@link AccountSASSignatureValues} and {@link BlobSASSignatureValues} * types. Once generated, it can be encoded into a {@link String} and appended to a URL directly (though caution should * be taken here in case there are existing query parameters, which might affect the appropriate means of appending * these query parameters). * * NOTE: Instances of this class are immutable. */ class SASQueryParameters { /** * Optional. IP range allowed for this SAS. * * @readonly */ get ipRange() { if (this.ipRangeInner) { return { end: this.ipRangeInner.end, start: this.ipRangeInner.start, }; } return undefined; } constructor(version, signature, permissionsOrOptions, services, resourceTypes, protocol, startsOn, expiresOn, ipRange, identifier, resource, cacheControl, contentDisposition, contentEncoding, contentLanguage, contentType, userDelegationKey, preauthorizedAgentObjectId, correlationId, encryptionScope) { this.version = version; this.signature = signature; if (permissionsOrOptions !== undefined && typeof permissionsOrOptions !== "string") { // SASQueryParametersOptions this.permissions = permissionsOrOptions.permissions; this.services = permissionsOrOptions.services; this.resourceTypes = permissionsOrOptions.resourceTypes; this.protocol = permissionsOrOptions.protocol; this.startsOn = permissionsOrOptions.startsOn; this.expiresOn = permissionsOrOptions.expiresOn; this.ipRangeInner = permissionsOrOptions.ipRange; this.identifier = permissionsOrOptions.identifier; this.encryptionScope = permissionsOrOptions.encryptionScope; this.resource = permissionsOrOptions.resource; this.cacheControl = permissionsOrOptions.cacheControl; this.contentDisposition = permissionsOrOptions.contentDisposition; this.contentEncoding = permissionsOrOptions.contentEncoding; this.contentLanguage = permissionsOrOptions.contentLanguage; this.contentType = permissionsOrOptions.contentType; if (permissionsOrOptions.userDelegationKey) { this.signedOid = permissionsOrOptions.userDelegationKey.signedObjectId; this.signedTenantId = permissionsOrOptions.userDelegationKey.signedTenantId; this.signedStartsOn = permissionsOrOptions.userDelegationKey.signedStartsOn; this.signedExpiresOn = permissionsOrOptions.userDelegationKey.signedExpiresOn; this.signedService = permissionsOrOptions.userDelegationKey.signedService; this.signedVersion = permissionsOrOptions.userDelegationKey.signedVersion; this.preauthorizedAgentObjectId = permissionsOrOptions.preauthorizedAgentObjectId; this.correlationId = permissionsOrOptions.correlationId; } } else { this.services = services; this.resourceTypes = resourceTypes; this.expiresOn = expiresOn; this.permissions = permissionsOrOptions; this.protocol = protocol; this.startsOn = startsOn; this.ipRangeInner = ipRange; this.encryptionScope = encryptionScope; this.identifier = identifier; this.resource = resource; this.cacheControl = cacheControl; this.contentDisposition = contentDisposition; this.contentEncoding = contentEncoding; this.contentLanguage = contentLanguage; this.contentType = contentType; if (userDelegationKey) { this.signedOid = userDelegationKey.signedObjectId; this.signedTenantId = userDelegationKey.signedTenantId; this.signedStartsOn = userDelegationKey.signedStartsOn; this.signedExpiresOn = userDelegationKey.signedExpiresOn; this.signedService = userDelegationKey.signedService; this.signedVersion = userDelegationKey.signedVersion; this.preauthorizedAgentObjectId = preauthorizedAgentObjectId; this.correlationId = correlationId; } } } /** * Encodes all SAS query parameters into a string that can be appended to a URL. * */ toString() { const params = [ "sv", "ss", "srt", "spr", "st", "se", "sip", "si", "ses", "skoid", // Signed object ID "sktid", // Signed tenant ID "skt", // Signed key start time "ske", // Signed key expiry time "sks", // Signed key service "skv", // Signed key version "sr", "sp", "sig", "rscc", "rscd", "rsce", "rscl", "rsct", "saoid", "scid", ]; const queries = []; for (const param of params) { switch (param) { case "sv": this.tryAppendQueryParameter(queries, param, this.version); break; case "ss": this.tryAppendQueryParameter(queries, param, this.services); break; case "srt": this.tryAppendQueryParameter(queries, param, this.resourceTypes); break; case "spr": this.tryAppendQueryParameter(queries, param, this.protocol); break; case "st": this.tryAppendQueryParameter(queries, param, this.startsOn ? truncatedISO8061Date(this.startsOn, false) : undefined); break; case "se": this.tryAppendQueryParameter(queries, param, this.expiresOn ? truncatedISO8061Date(this.expiresOn, false) : undefined); break; case "sip": this.tryAppendQueryParameter(queries, param, this.ipRange ? ipRangeToString(this.ipRange) : undefined); break; case "si": this.tryAppendQueryParameter(queries, param, this.identifier); break; case "ses": this.tryAppendQueryParameter(queries, param, this.encryptionScope); break; case "skoid": // Signed object ID this.tryAppendQueryParameter(queries, param, this.signedOid); break; case "sktid": // Signed tenant ID this.tryAppendQueryParameter(queries, param, this.signedTenantId); break; case "skt": // Signed key start time this.tryAppendQueryParameter(queries, param, this.signedStartsOn ? truncatedISO8061Date(this.signedStartsOn, false) : undefined); break; case "ske": // Signed key expiry time this.tryAppendQueryParameter(queries, param, this.signedExpiresOn ? truncatedISO8061Date(this.signedExpiresOn, false) : undefined); break; case "sks": // Signed key service this.tryAppendQueryParameter(queries, param, this.signedService); break; case "skv": // Signed key version this.tryAppendQueryParameter(queries, param, this.signedVersion); break; case "sr": this.tryAppendQueryParameter(queries, param, this.resource); break; case "sp": this.tryAppendQueryParameter(queries, param, this.permissions); break; case "sig": this.tryAppendQueryParameter(queries, param, this.signature); break; case "rscc": this.tryAppendQueryParameter(queries, param, this.cacheControl); break; case "rscd": this.tryAppendQueryParameter(queries, param, this.contentDisposition); break; case "rsce": this.tryAppendQueryParameter(queries, param, this.contentEncoding); break; case "rscl": this.tryAppendQueryParameter(queries, param, this.contentLanguage); break; case "rsct": this.tryAppendQueryParameter(queries, param, this.contentType); break; case "saoid": this.tryAppendQueryParameter(queries, param, this.preauthorizedAgentObjectId); break; case "scid": this.tryAppendQueryParameter(queries, param, this.correlationId); break; } } return queries.join("&"); } /** * A private helper method used to filter and append query key/value pairs into an array. * * @param queries - * @param key - * @param value - */ tryAppendQueryParameter(queries, key, value) { if (!value) { return; } key = encodeURIComponent(key); value = encodeURIComponent(value); if (key.length > 0 && value.length > 0) { queries.push(`${key}=${value}`); } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. function generateBlobSASQueryParameters(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { return generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName).sasQueryParameters; } function generateBlobSASQueryParametersInternal(blobSASSignatureValues, sharedKeyCredentialOrUserDelegationKey, accountName) { const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; const sharedKeyCredential = sharedKeyCredentialOrUserDelegationKey instanceof StorageSharedKeyCredential ? sharedKeyCredentialOrUserDelegationKey : undefined; let userDelegationKeyCredential; if (sharedKeyCredential === undefined && accountName !== undefined) { userDelegationKeyCredential = new UserDelegationKeyCredential(accountName, sharedKeyCredentialOrUserDelegationKey); } if (sharedKeyCredential === undefined && userDelegationKeyCredential === undefined) { throw TypeError("Invalid sharedKeyCredential, userDelegationKey or accountName."); } // Version 2020-12-06 adds support for encryptionscope in SAS. if (version >= "2020-12-06") { if (sharedKeyCredential !== undefined) { return generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential); } else { return generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential); } } // Version 2019-12-12 adds support for the blob tags permission. // Version 2018-11-09 adds support for the signed resource and signed blob snapshot time fields. // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas#constructing-the-signature-string if (version >= "2018-11-09") { if (sharedKeyCredential !== undefined) { return generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential); } else { // Version 2020-02-10 delegation SAS signature construction includes preauthorizedAgentObjectId, agentObjectId, correlationId. if (version >= "2020-02-10") { return generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential); } else { return generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential); } } } if (version >= "2015-04-05") { if (sharedKeyCredential !== undefined) { return generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential); } else { throw new RangeError("'version' must be >= '2018-11-09' when generating user delegation SAS using user delegation key."); } } throw new RangeError("'version' must be >= '2015-04-05'."); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2015-04-05 AND BEFORE 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20150405(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } let resource = "c"; if (blobSASSignatureValues.blobName) { resource = "b"; } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), stringToSign: stringToSign, }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20181109(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { resource = "bs"; } else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType), stringToSign: stringToSign, }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn and identifier. * * WARNING: When identifier is not provided, permissions and expiresOn are required. * You MUST assign value to identifier or expiresOn & permissions manually if you initial with * this constructor. * * @param blobSASSignatureValues - * @param sharedKeyCredential - */ function generateBlobSASQueryParameters20201206(blobSASSignatureValues, sharedKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); if (!blobSASSignatureValues.identifier && !(blobSASSignatureValues.permissions && blobSASSignatureValues.expiresOn)) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when 'identifier' is not provided."); } let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { resource = "bs"; } else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(sharedKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), blobSASSignatureValues.identifier, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.encryptionScope, blobSASSignatureValues.cacheControl ? blobSASSignatureValues.cacheControl : "", blobSASSignatureValues.contentDisposition ? blobSASSignatureValues.contentDisposition : "", blobSASSignatureValues.contentEncoding ? blobSASSignatureValues.contentEncoding : "", blobSASSignatureValues.contentLanguage ? blobSASSignatureValues.contentLanguage : "", blobSASSignatureValues.contentType ? blobSASSignatureValues.contentType : "", ].join("\n"); const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, undefined, undefined, undefined, blobSASSignatureValues.encryptionScope), stringToSign: stringToSign, }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2018-11-09. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20181109(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); // Stored access policies are not supported for a user delegation SAS. if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { resource = "bs"; } else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey), stringToSign: stringToSign, }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-02-10. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20200210(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); // Stored access policies are not supported for a user delegation SAS. if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { resource = "bs"; } else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId), stringToSign: stringToSign, }; } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * IMPLEMENTATION FOR API VERSION FROM 2020-12-06. * * Creates an instance of SASQueryParameters. * * Only accepts required settings needed to create a SAS. For optional settings please * set corresponding properties directly, such as permissions, startsOn. * * WARNING: identifier will be ignored, permissions and expiresOn are required. * * @param blobSASSignatureValues - * @param userDelegationKeyCredential - */ function generateBlobSASQueryParametersUDK20201206(blobSASSignatureValues, userDelegationKeyCredential) { blobSASSignatureValues = SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues); // Stored access policies are not supported for a user delegation SAS. if (!blobSASSignatureValues.permissions || !blobSASSignatureValues.expiresOn) { throw new RangeError("Must provide 'permissions' and 'expiresOn' for Blob SAS generation when generating user delegation SAS."); } let resource = "c"; let timestamp = blobSASSignatureValues.snapshotTime; if (blobSASSignatureValues.blobName) { resource = "b"; if (blobSASSignatureValues.snapshotTime) { resource = "bs"; } else if (blobSASSignatureValues.versionId) { resource = "bv"; timestamp = blobSASSignatureValues.versionId; } } // Calling parse and toString guarantees the proper ordering and throws on invalid characters. let verifiedPermissions; if (blobSASSignatureValues.permissions) { if (blobSASSignatureValues.blobName) { verifiedPermissions = BlobSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } else { verifiedPermissions = ContainerSASPermissions.parse(blobSASSignatureValues.permissions.toString()).toString(); } } // Signature is generated on the un-url-encoded values. const stringToSign = [ verifiedPermissions ? verifiedPermissions : "", blobSASSignatureValues.startsOn ? truncatedISO8061Date(blobSASSignatureValues.startsOn, false) : "", blobSASSignatureValues.expiresOn ? truncatedISO8061Date(blobSASSignatureValues.expiresOn, false) : "", getCanonicalName(userDelegationKeyCredential.accountName, blobSASSignatureValues.containerName, blobSASSignatureValues.blobName), userDelegationKeyCredential.userDelegationKey.signedObjectId, userDelegationKeyCredential.userDelegationKey.signedTenantId, userDelegationKeyCredential.userDelegationKey.signedStartsOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedStartsOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedExpiresOn ? truncatedISO8061Date(userDelegationKeyCredential.userDelegationKey.signedExpiresOn, false) : "", userDelegationKeyCredential.userDelegationKey.signedService, userDelegationKeyCredential.userDelegationKey.signedVersion, blobSASSignatureValues.preauthorizedAgentObjectId, undefined, // agentObjectId blobSASSignatureValues.correlationId, blobSASSignatureValues.ipRange ? ipRangeToString(blobSASSignatureValues.ipRange) : "", blobSASSignatureValues.protocol ? blobSASSignatureValues.protocol : "", blobSASSignatureValues.version, resource, timestamp, blobSASSignatureValues.encryptionScope, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, ].join("\n"); const signature = userDelegationKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(blobSASSignatureValues.version, signature, verifiedPermissions, undefined, undefined, blobSASSignatureValues.protocol, blobSASSignatureValues.startsOn, blobSASSignatureValues.expiresOn, blobSASSignatureValues.ipRange, blobSASSignatureValues.identifier, resource, blobSASSignatureValues.cacheControl, blobSASSignatureValues.contentDisposition, blobSASSignatureValues.contentEncoding, blobSASSignatureValues.contentLanguage, blobSASSignatureValues.contentType, userDelegationKeyCredential.userDelegationKey, blobSASSignatureValues.preauthorizedAgentObjectId, blobSASSignatureValues.correlationId, blobSASSignatureValues.encryptionScope), stringToSign: stringToSign, }; } function getCanonicalName(accountName, containerName, blobName) { // Container: "/blob/account/containerName" // Blob: "/blob/account/containerName/blobName" const elements = [`/blob/${accountName}/${containerName}`]; if (blobName) { elements.push(`/${blobName}`); } return elements.join(""); } function SASSignatureValuesSanityCheckAndAutofill(blobSASSignatureValues) { const version = blobSASSignatureValues.version ? blobSASSignatureValues.version : SERVICE_VERSION; if (blobSASSignatureValues.snapshotTime && version < "2018-11-09") { throw RangeError("'version' must be >= '2018-11-09' when providing 'snapshotTime'."); } if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.snapshotTime) { throw RangeError("Must provide 'blobName' when providing 'snapshotTime'."); } if (blobSASSignatureValues.versionId && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'versionId'."); } if (blobSASSignatureValues.blobName === undefined && blobSASSignatureValues.versionId) { throw RangeError("Must provide 'blobName' when providing 'versionId'."); } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'x' permission."); } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when providing 'y' permission."); } if (blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.tag && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when providing 't' permission."); } if (version < "2020-02-10" && blobSASSignatureValues.permissions && (blobSASSignatureValues.permissions.move || blobSASSignatureValues.permissions.execute)) { throw RangeError("'version' must be >= '2020-02-10' when providing the 'm' or 'e' permission."); } if (version < "2021-04-10" && blobSASSignatureValues.permissions && blobSASSignatureValues.permissions.filterByTags) { throw RangeError("'version' must be >= '2021-04-10' when providing the 'f' permission."); } if (version < "2020-02-10" && (blobSASSignatureValues.preauthorizedAgentObjectId || blobSASSignatureValues.correlationId)) { throw RangeError("'version' must be >= '2020-02-10' when providing 'preauthorizedAgentObjectId' or 'correlationId'."); } if (blobSASSignatureValues.encryptionScope && version < "2020-12-06") { throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } blobSASSignatureValues.version = version; return blobSASSignatureValues; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A client that manages leases for a {@link ContainerClient} or a {@link BlobClient}. */ class BlobLeaseClient { /** * Gets the lease Id. * * @readonly */ get leaseId() { return this._leaseId; } /** * Gets the url. * * @readonly */ get url() { return this._url; } /** * Creates an instance of BlobLeaseClient. * @param client - The client to make the lease operation requests. * @param leaseId - Initial proposed lease id. */ constructor(client, leaseId) { const clientContext = client.storageClientContext; this._url = client.url; if (client.name === undefined) { this._isContainer = true; this._containerOrBlobOperation = clientContext.container; } else { this._isContainer = false; this._containerOrBlobOperation = clientContext.blob; } if (!leaseId) { leaseId = coreUtil.randomUUID(); } this._leaseId = leaseId; } /** * Establishes and manages a lock on a container for delete operations, or on a blob * for write and delete operations. * The lock duration can be 15 to 60 seconds, or can be infinite. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param duration - Must be between 15 to 60 seconds, or infinite (-1) * @param options - option to configure lease management operations. * @returns Response data for acquire lease operation. */ async acquireLease(duration, options = {}) { var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } return tracingClient.withSpan("BlobLeaseClient-acquireLease", options, async (updatedOptions) => { var _a; return assertResponse(await this._containerOrBlobOperation.acquireLease({ abortSignal: options.abortSignal, duration, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), proposedLeaseId: this._leaseId, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * To change the ID of the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param proposedLeaseId - the proposed new lease Id. * @param options - option to configure lease management operations. * @returns Response data for change lease operation. */ async changeLease(proposedLeaseId, options = {}) { var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } return tracingClient.withSpan("BlobLeaseClient-changeLease", options, async (updatedOptions) => { var _a; const response = assertResponse(await this._containerOrBlobOperation.changeLease(this._leaseId, proposedLeaseId, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); this._leaseId = proposedLeaseId; return response; }); } /** * To free the lease if it is no longer needed so that another client may * immediately acquire a lease against the container or the blob. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param options - option to configure lease management operations. * @returns Response data for release lease operation. */ async releaseLease(options = {}) { var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } return tracingClient.withSpan("BlobLeaseClient-releaseLease", options, async (updatedOptions) => { var _a; return assertResponse(await this._containerOrBlobOperation.releaseLease(this._leaseId, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * To renew the lease. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param options - Optional option to configure lease management operations. * @returns Response data for renew lease operation. */ async renewLease(options = {}) { var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } return tracingClient.withSpan("BlobLeaseClient-renewLease", options, async (updatedOptions) => { var _a; return this._containerOrBlobOperation.renewLease(this._leaseId, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, }); }); } /** * To end the lease but ensure that another client cannot acquire a new lease * until the current lease period has expired. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-container * and * @see https://docs.microsoft.com/en-us/rest/api/storageservices/lease-blob * * @param breakPeriod - Break period * @param options - Optional options to configure lease management operations. * @returns Response data for break lease operation. */ async breakLease(breakPeriod, options = {}) { var _a, _b, _c, _d, _e; if (this._isContainer && ((((_a = options.conditions) === null || _a === void 0 ? void 0 : _a.ifMatch) && ((_b = options.conditions) === null || _b === void 0 ? void 0 : _b.ifMatch) !== ETagNone) || (((_c = options.conditions) === null || _c === void 0 ? void 0 : _c.ifNoneMatch) && ((_d = options.conditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch) !== ETagNone) || ((_e = options.conditions) === null || _e === void 0 ? void 0 : _e.tagConditions))) { throw new RangeError("The IfMatch, IfNoneMatch and tags access conditions are ignored by the service. Values other than undefined or their default values are not acceptable."); } return tracingClient.withSpan("BlobLeaseClient-breakLease", options, async (updatedOptions) => { var _a; const operationOptions = { abortSignal: options.abortSignal, breakPeriod, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, }; return assertResponse(await this._containerOrBlobOperation.breakLease(operationOptions)); }); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * A Node.js ReadableStream will internally retry when internal ReadableStream unexpected ends. */ class RetriableReadableStream extends stream.Readable { /** * Creates an instance of RetriableReadableStream. * * @param source - The current ReadableStream returned from getter * @param getter - A method calling downloading request returning * a new ReadableStream from specified offset * @param offset - Offset position in original data source to read * @param count - How much data in original data source to read * @param options - */ constructor(source, getter, offset, count, options = {}) { super({ highWaterMark: options.highWaterMark }); this.retries = 0; this.sourceDataHandler = (data) => { if (this.options.doInjectErrorOnce) { this.options.doInjectErrorOnce = undefined; this.source.pause(); this.sourceErrorOrEndHandler(); this.source.destroy(); return; } // console.log( // `Offset: ${this.offset}, Received ${data.length} from internal stream` // ); this.offset += data.length; if (this.onProgress) { this.onProgress({ loadedBytes: this.offset - this.start }); } if (!this.push(data)) { this.source.pause(); } }; this.sourceAbortedHandler = () => { const abortError = new abortController.AbortError("The operation was aborted."); this.destroy(abortError); }; this.sourceErrorOrEndHandler = (err) => { if (err && err.name === "AbortError") { this.destroy(err); return; } // console.log( // `Source stream emits end or error, offset: ${ // this.offset // }, dest end : ${this.end}` // ); this.removeSourceEventHandlers(); if (this.offset - 1 === this.end) { this.push(null); } else if (this.offset <= this.end) { // console.log( // `retries: ${this.retries}, max retries: ${this.maxRetries}` // ); if (this.retries < this.maxRetryRequests) { this.retries += 1; this.getter(this.offset) .then((newSource) => { this.source = newSource; this.setSourceEventHandlers(); return; }) .catch((error) => { this.destroy(error); }); } else { this.destroy(new Error(`Data corruption failure: received less data than required and reached maxRetires limitation. Received data offset: ${this.offset - 1}, data needed offset: ${this.end}, retries: ${this.retries}, max retries: ${this.maxRetryRequests}`)); } } else { this.destroy(new Error(`Data corruption failure: Received more data than original request, data needed offset is ${this.end}, received offset: ${this.offset - 1}`)); } }; this.getter = getter; this.source = source; this.start = offset; this.offset = offset; this.end = offset + count - 1; this.maxRetryRequests = options.maxRetryRequests && options.maxRetryRequests >= 0 ? options.maxRetryRequests : 0; this.onProgress = options.onProgress; this.options = options; this.setSourceEventHandlers(); } _read() { this.source.resume(); } setSourceEventHandlers() { this.source.on("data", this.sourceDataHandler); this.source.on("end", this.sourceErrorOrEndHandler); this.source.on("error", this.sourceErrorOrEndHandler); // needed for Node14 this.source.on("aborted", this.sourceAbortedHandler); } removeSourceEventHandlers() { this.source.removeListener("data", this.sourceDataHandler); this.source.removeListener("end", this.sourceErrorOrEndHandler); this.source.removeListener("error", this.sourceErrorOrEndHandler); this.source.removeListener("aborted", this.sourceAbortedHandler); } _destroy(error, callback) { // remove listener from source and release source this.removeSourceEventHandlers(); this.source.destroy(); callback(error === null ? undefined : error); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * BlobDownloadResponse implements BlobDownloadResponseParsed interface, and in Node.js runtime it will * automatically retry when internal read stream unexpected ends. (This kind of unexpected ends cannot * trigger retries defined in pipeline retry policy.) * * The {@link readableStreamBody} stream will retry underlayer, you can just use it as a normal Node.js * Readable stream. */ class BlobDownloadResponse { /** * Indicates that the service supports * requests for partial file content. * * @readonly */ get acceptRanges() { return this.originalResponse.acceptRanges; } /** * Returns if it was previously specified * for the file. * * @readonly */ get cacheControl() { return this.originalResponse.cacheControl; } /** * Returns the value that was specified * for the 'x-ms-content-disposition' header and specifies how to process the * response. * * @readonly */ get contentDisposition() { return this.originalResponse.contentDisposition; } /** * Returns the value that was specified * for the Content-Encoding request header. * * @readonly */ get contentEncoding() { return this.originalResponse.contentEncoding; } /** * Returns the value that was specified * for the Content-Language request header. * * @readonly */ get contentLanguage() { return this.originalResponse.contentLanguage; } /** * The current sequence number for a * page blob. This header is not returned for block blobs or append blobs. * * @readonly */ get blobSequenceNumber() { return this.originalResponse.blobSequenceNumber; } /** * The blob's type. Possible values include: * 'BlockBlob', 'PageBlob', 'AppendBlob'. * * @readonly */ get blobType() { return this.originalResponse.blobType; } /** * The number of bytes present in the * response body. * * @readonly */ get contentLength() { return this.originalResponse.contentLength; } /** * If the file has an MD5 hash and the * request is to read the full file, this response header is returned so that * the client can check for message content integrity. If the request is to * read a specified range and the 'x-ms-range-get-content-md5' is set to * true, then the request returns an MD5 hash for the range, as long as the * range size is less than or equal to 4 MB. If neither of these sets of * conditions is true, then no value is returned for the 'Content-MD5' * header. * * @readonly */ get contentMD5() { return this.originalResponse.contentMD5; } /** * Indicates the range of bytes returned if * the client requested a subset of the file by setting the Range request * header. * * @readonly */ get contentRange() { return this.originalResponse.contentRange; } /** * The content type specified for the file. * The default content type is 'application/octet-stream' * * @readonly */ get contentType() { return this.originalResponse.contentType; } /** * Conclusion time of the last attempted * Copy File operation where this file was the destination file. This value * can specify the time of a completed, aborted, or failed copy attempt. * * @readonly */ get copyCompletedOn() { return this.originalResponse.copyCompletedOn; } /** * String identifier for the last attempted Copy * File operation where this file was the destination file. * * @readonly */ get copyId() { return this.originalResponse.copyId; } /** * Contains the number of bytes copied and * the total bytes in the source in the last attempted Copy File operation * where this file was the destination file. Can show between 0 and * Content-Length bytes copied. * * @readonly */ get copyProgress() { return this.originalResponse.copyProgress; } /** * URL up to 2KB in length that specifies the * source file used in the last attempted Copy File operation where this file * was the destination file. * * @readonly */ get copySource() { return this.originalResponse.copySource; } /** * State of the copy operation * identified by 'x-ms-copy-id'. Possible values include: 'pending', * 'success', 'aborted', 'failed' * * @readonly */ get copyStatus() { return this.originalResponse.copyStatus; } /** * Only appears when * x-ms-copy-status is failed or pending. Describes cause of fatal or * non-fatal copy operation failure. * * @readonly */ get copyStatusDescription() { return this.originalResponse.copyStatusDescription; } /** * When a blob is leased, * specifies whether the lease is of infinite or fixed duration. Possible * values include: 'infinite', 'fixed'. * * @readonly */ get leaseDuration() { return this.originalResponse.leaseDuration; } /** * Lease state of the blob. Possible * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * * @readonly */ get leaseState() { return this.originalResponse.leaseState; } /** * The current lease status of the * blob. Possible values include: 'locked', 'unlocked'. * * @readonly */ get leaseStatus() { return this.originalResponse.leaseStatus; } /** * A UTC date/time value generated by the service that * indicates the time at which the response was initiated. * * @readonly */ get date() { return this.originalResponse.date; } /** * The number of committed blocks * present in the blob. This header is returned only for append blobs. * * @readonly */ get blobCommittedBlockCount() { return this.originalResponse.blobCommittedBlockCount; } /** * The ETag contains a value that you can use to * perform operations conditionally, in quotes. * * @readonly */ get etag() { return this.originalResponse.etag; } /** * The number of tags associated with the blob * * @readonly */ get tagCount() { return this.originalResponse.tagCount; } /** * The error code. * * @readonly */ get errorCode() { return this.originalResponse.errorCode; } /** * The value of this header is set to * true if the file data and application metadata are completely encrypted * using the specified algorithm. Otherwise, the value is set to false (when * the file is unencrypted, or if only parts of the file/application metadata * are encrypted). * * @readonly */ get isServerEncrypted() { return this.originalResponse.isServerEncrypted; } /** * If the blob has a MD5 hash, and if * request contains range header (Range or x-ms-range), this response header * is returned with the value of the whole blob's MD5 value. This value may * or may not be equal to the value returned in Content-MD5 header, with the * latter calculated from the requested range. * * @readonly */ get blobContentMD5() { return this.originalResponse.blobContentMD5; } /** * Returns the date and time the file was last * modified. Any operation that modifies the file or its properties updates * the last modified time. * * @readonly */ get lastModified() { return this.originalResponse.lastModified; } /** * Returns the UTC date and time generated by the service that indicates the time at which the blob was * last read or written to. * * @readonly */ get lastAccessed() { return this.originalResponse.lastAccessed; } /** * Returns the date and time the blob was created. * * @readonly */ get createdOn() { return this.originalResponse.createdOn; } /** * A name-value pair * to associate with a file storage object. * * @readonly */ get metadata() { return this.originalResponse.metadata; } /** * This header uniquely identifies the request * that was made and can be used for troubleshooting the request. * * @readonly */ get requestId() { return this.originalResponse.requestId; } /** * If a client request id header is sent in the request, this header will be present in the * response with the same value. * * @readonly */ get clientRequestId() { return this.originalResponse.clientRequestId; } /** * Indicates the version of the Blob service used * to execute the request. * * @readonly */ get version() { return this.originalResponse.version; } /** * Indicates the versionId of the downloaded blob version. * * @readonly */ get versionId() { return this.originalResponse.versionId; } /** * Indicates whether version of this blob is a current version. * * @readonly */ get isCurrentVersion() { return this.originalResponse.isCurrentVersion; } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. * * @readonly */ get encryptionKeySha256() { return this.originalResponse.encryptionKeySha256; } /** * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to * true, then the request returns a crc64 for the range, as long as the range size is less than * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is * specified in the same request, it will fail with 400(Bad Request) */ get contentCrc64() { return this.originalResponse.contentCrc64; } /** * Object Replication Policy Id of the destination blob. * * @readonly */ get objectReplicationDestinationPolicyId() { return this.originalResponse.objectReplicationDestinationPolicyId; } /** * Parsed Object Replication Policy Id, Rule Id(s) and status of the source blob. * * @readonly */ get objectReplicationSourceProperties() { return this.originalResponse.objectReplicationSourceProperties; } /** * If this blob has been sealed. * * @readonly */ get isSealed() { return this.originalResponse.isSealed; } /** * UTC date/time value generated by the service that indicates the time at which the blob immutability policy will expire. * * @readonly */ get immutabilityPolicyExpiresOn() { return this.originalResponse.immutabilityPolicyExpiresOn; } /** * Indicates immutability policy mode. * * @readonly */ get immutabilityPolicyMode() { return this.originalResponse.immutabilityPolicyMode; } /** * Indicates if a legal hold is present on the blob. * * @readonly */ get legalHold() { return this.originalResponse.legalHold; } /** * The response body as a browser Blob. * Always undefined in node.js. * * @readonly */ get contentAsBlob() { return this.originalResponse.blobBody; } /** * The response body as a node.js Readable stream. * Always undefined in the browser. * * It will automatically retry when internal read stream unexpected ends. * * @readonly */ get readableStreamBody() { return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** * The HTTP response. */ get _response() { return this.originalResponse._response; } /** * Creates an instance of BlobDownloadResponse. * * @param originalResponse - * @param getter - * @param offset - * @param count - * @param options - */ constructor(originalResponse, getter, offset, count, options = {}) { this.originalResponse = originalResponse; this.blobDownloadStream = new RetriableReadableStream(this.originalResponse.readableStreamBody, getter, offset, count, options); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. const AVRO_SYNC_MARKER_SIZE = 16; const AVRO_INIT_BYTES = new Uint8Array([79, 98, 106, 1]); const AVRO_CODEC_KEY = "avro.codec"; const AVRO_SCHEMA_KEY = "avro.schema"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. class AvroParser { /** * Reads a fixed number of bytes from the stream. * * @param stream - * @param length - * @param options - */ static async readFixedBytes(stream, length, options = {}) { const bytes = await stream.read(length, { abortSignal: options.abortSignal }); if (bytes.length !== length) { throw new Error("Hit stream end."); } return bytes; } /** * Reads a single byte from the stream. * * @param stream - * @param options - */ static async readByte(stream, options = {}) { const buf = await AvroParser.readFixedBytes(stream, 1, options); return buf[0]; } // int and long are stored in variable-length zig-zag coding. // variable-length: https://lucene.apache.org/core/3_5_0/fileformats.html#VInt // zig-zag: https://developers.google.com/protocol-buffers/docs/encoding?csw=1#types static async readZigZagLong(stream, options = {}) { let zigZagEncoded = 0; let significanceInBit = 0; let byte, haveMoreByte, significanceInFloat; do { byte = await AvroParser.readByte(stream, options); haveMoreByte = byte & 0x80; zigZagEncoded |= (byte & 0x7f) << significanceInBit; significanceInBit += 7; } while (haveMoreByte && significanceInBit < 28); // bitwise operation only works for 32-bit integers if (haveMoreByte) { // Switch to float arithmetic // eslint-disable-next-line no-self-assign zigZagEncoded = zigZagEncoded; significanceInFloat = 268435456; // 2 ** 28. do { byte = await AvroParser.readByte(stream, options); zigZagEncoded += (byte & 0x7f) * significanceInFloat; significanceInFloat *= 128; // 2 ** 7 } while (byte & 0x80); const res = (zigZagEncoded % 2 ? -(zigZagEncoded + 1) : zigZagEncoded) / 2; if (res < Number.MIN_SAFE_INTEGER || res > Number.MAX_SAFE_INTEGER) { throw new Error("Integer overflow."); } return res; } return (zigZagEncoded >> 1) ^ -(zigZagEncoded & 1); } static async readLong(stream, options = {}) { return AvroParser.readZigZagLong(stream, options); } static async readInt(stream, options = {}) { return AvroParser.readZigZagLong(stream, options); } static async readNull() { return null; } static async readBoolean(stream, options = {}) { const b = await AvroParser.readByte(stream, options); if (b === 1) { return true; } else if (b === 0) { return false; } else { throw new Error("Byte was not a boolean."); } } static async readFloat(stream, options = {}) { const u8arr = await AvroParser.readFixedBytes(stream, 4, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat32(0, true); // littleEndian = true } static async readDouble(stream, options = {}) { const u8arr = await AvroParser.readFixedBytes(stream, 8, options); const view = new DataView(u8arr.buffer, u8arr.byteOffset, u8arr.byteLength); return view.getFloat64(0, true); // littleEndian = true } static async readBytes(stream, options = {}) { const size = await AvroParser.readLong(stream, options); if (size < 0) { throw new Error("Bytes size was negative."); } return stream.read(size, { abortSignal: options.abortSignal }); } static async readString(stream, options = {}) { const u8arr = await AvroParser.readBytes(stream, options); const utf8decoder = new TextDecoder(); return utf8decoder.decode(u8arr); } static async readMapPair(stream, readItemMethod, options = {}) { const key = await AvroParser.readString(stream, options); // FUTURE: this won't work with readFixed (currently not supported) which needs a length as the parameter. const value = await readItemMethod(stream, options); return { key, value }; } static async readMap(stream, readItemMethod, options = {}) { const readPairMethod = (s, opts = {}) => { return AvroParser.readMapPair(s, readItemMethod, opts); }; const pairs = await AvroParser.readArray(stream, readPairMethod, options); const dict = {}; for (const pair of pairs) { dict[pair.key] = pair.value; } return dict; } static async readArray(stream, readItemMethod, options = {}) { const items = []; for (let count = await AvroParser.readLong(stream, options); count !== 0; count = await AvroParser.readLong(stream, options)) { if (count < 0) { // Ignore block sizes await AvroParser.readLong(stream, options); count = -count; } while (count--) { const item = await readItemMethod(stream, options); items.push(item); } } return items; } } var AvroComplex; (function (AvroComplex) { AvroComplex["RECORD"] = "record"; AvroComplex["ENUM"] = "enum"; AvroComplex["ARRAY"] = "array"; AvroComplex["MAP"] = "map"; AvroComplex["UNION"] = "union"; AvroComplex["FIXED"] = "fixed"; })(AvroComplex || (AvroComplex = {})); var AvroPrimitive; (function (AvroPrimitive) { AvroPrimitive["NULL"] = "null"; AvroPrimitive["BOOLEAN"] = "boolean"; AvroPrimitive["INT"] = "int"; AvroPrimitive["LONG"] = "long"; AvroPrimitive["FLOAT"] = "float"; AvroPrimitive["DOUBLE"] = "double"; AvroPrimitive["BYTES"] = "bytes"; AvroPrimitive["STRING"] = "string"; })(AvroPrimitive || (AvroPrimitive = {})); class AvroType { /** * Determines the AvroType from the Avro Schema. */ // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types static fromSchema(schema) { if (typeof schema === "string") { return AvroType.fromStringSchema(schema); } else if (Array.isArray(schema)) { return AvroType.fromArraySchema(schema); } else { return AvroType.fromObjectSchema(schema); } } static fromStringSchema(schema) { switch (schema) { case AvroPrimitive.NULL: case AvroPrimitive.BOOLEAN: case AvroPrimitive.INT: case AvroPrimitive.LONG: case AvroPrimitive.FLOAT: case AvroPrimitive.DOUBLE: case AvroPrimitive.BYTES: case AvroPrimitive.STRING: return new AvroPrimitiveType(schema); default: throw new Error(`Unexpected Avro type ${schema}`); } } static fromArraySchema(schema) { return new AvroUnionType(schema.map(AvroType.fromSchema)); } static fromObjectSchema(schema) { const type = schema.type; // Primitives can be defined as strings or objects try { return AvroType.fromStringSchema(type); } catch (_a) { // no-op } switch (type) { case AvroComplex.RECORD: if (schema.aliases) { throw new Error(`aliases currently is not supported, schema: ${schema}`); } if (!schema.name) { throw new Error(`Required attribute 'name' doesn't exist on schema: ${schema}`); } // eslint-disable-next-line no-case-declarations const fields = {}; if (!schema.fields) { throw new Error(`Required attribute 'fields' doesn't exist on schema: ${schema}`); } for (const field of schema.fields) { fields[field.name] = AvroType.fromSchema(field.type); } return new AvroRecordType(fields, schema.name); case AvroComplex.ENUM: if (schema.aliases) { throw new Error(`aliases currently is not supported, schema: ${schema}`); } if (!schema.symbols) { throw new Error(`Required attribute 'symbols' doesn't exist on schema: ${schema}`); } return new AvroEnumType(schema.symbols); case AvroComplex.MAP: if (!schema.values) { throw new Error(`Required attribute 'values' doesn't exist on schema: ${schema}`); } return new AvroMapType(AvroType.fromSchema(schema.values)); case AvroComplex.ARRAY: // Unused today case AvroComplex.FIXED: // Unused today default: throw new Error(`Unexpected Avro type ${type} in ${schema}`); } } } class AvroPrimitiveType extends AvroType { constructor(primitive) { super(); this._primitive = primitive; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types read(stream, options = {}) { switch (this._primitive) { case AvroPrimitive.NULL: return AvroParser.readNull(); case AvroPrimitive.BOOLEAN: return AvroParser.readBoolean(stream, options); case AvroPrimitive.INT: return AvroParser.readInt(stream, options); case AvroPrimitive.LONG: return AvroParser.readLong(stream, options); case AvroPrimitive.FLOAT: return AvroParser.readFloat(stream, options); case AvroPrimitive.DOUBLE: return AvroParser.readDouble(stream, options); case AvroPrimitive.BYTES: return AvroParser.readBytes(stream, options); case AvroPrimitive.STRING: return AvroParser.readString(stream, options); default: throw new Error("Unknown Avro Primitive"); } } } class AvroEnumType extends AvroType { constructor(symbols) { super(); this._symbols = symbols; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types async read(stream, options = {}) { const value = await AvroParser.readInt(stream, options); return this._symbols[value]; } } class AvroUnionType extends AvroType { constructor(types) { super(); this._types = types; } async read(stream, options = {}) { const typeIndex = await AvroParser.readInt(stream, options); return this._types[typeIndex].read(stream, options); } } class AvroMapType extends AvroType { constructor(itemType) { super(); this._itemType = itemType; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types read(stream, options = {}) { const readItemMethod = (s, opts) => { return this._itemType.read(s, opts); }; return AvroParser.readMap(stream, readItemMethod, options); } } class AvroRecordType extends AvroType { constructor(fields, name) { super(); this._fields = fields; this._name = name; } // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types async read(stream, options = {}) { // eslint-disable-next-line @typescript-eslint/no-wrapper-object-types const record = {}; record["$schema"] = this._name; for (const key in this._fields) { if (Object.prototype.hasOwnProperty.call(this._fields, key)) { record[key] = await this._fields[key].read(stream, options); } } return record; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. function arraysEqual(a, b) { if (a === b) return true; if (a == null || b == null) return false; if (a.length !== b.length) return false; for (let i = 0; i < a.length; ++i) { if (a[i] !== b[i]) return false; } return true; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. class AvroReader { get blockOffset() { return this._blockOffset; } get objectIndex() { return this._objectIndex; } constructor(dataStream, headerStream, currentBlockOffset, indexWithinCurrentBlock) { this._dataStream = dataStream; this._headerStream = headerStream || dataStream; this._initialized = false; this._blockOffset = currentBlockOffset || 0; this._objectIndex = indexWithinCurrentBlock || 0; this._initialBlockOffset = currentBlockOffset || 0; } async initialize(options = {}) { const header = await AvroParser.readFixedBytes(this._headerStream, AVRO_INIT_BYTES.length, { abortSignal: options.abortSignal, }); if (!arraysEqual(header, AVRO_INIT_BYTES)) { throw new Error("Stream is not an Avro file."); } // File metadata is written as if defined by the following map schema: // { "type": "map", "values": "bytes"} this._metadata = await AvroParser.readMap(this._headerStream, AvroParser.readString, { abortSignal: options.abortSignal, }); // Validate codec const codec = this._metadata[AVRO_CODEC_KEY]; if (!(codec === undefined || codec === null || codec === "null")) { throw new Error("Codecs are not supported"); } // The 16-byte, randomly-generated sync marker for this file. this._syncMarker = await AvroParser.readFixedBytes(this._headerStream, AVRO_SYNC_MARKER_SIZE, { abortSignal: options.abortSignal, }); // Parse the schema const schema = JSON.parse(this._metadata[AVRO_SCHEMA_KEY]); this._itemType = AvroType.fromSchema(schema); if (this._blockOffset === 0) { this._blockOffset = this._initialBlockOffset + this._dataStream.position; } this._itemsRemainingInBlock = await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal, }); // skip block length await AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal }); this._initialized = true; if (this._objectIndex && this._objectIndex > 0) { for (let i = 0; i < this._objectIndex; i++) { await this._itemType.read(this._dataStream, { abortSignal: options.abortSignal }); this._itemsRemainingInBlock--; } } } hasNext() { return !this._initialized || this._itemsRemainingInBlock > 0; } parseObjects() { return tslib.__asyncGenerator(this, arguments, function* parseObjects_1(options = {}) { if (!this._initialized) { yield tslib.__await(this.initialize(options)); } while (this.hasNext()) { const result = yield tslib.__await(this._itemType.read(this._dataStream, { abortSignal: options.abortSignal, })); this._itemsRemainingInBlock--; this._objectIndex++; if (this._itemsRemainingInBlock === 0) { const marker = yield tslib.__await(AvroParser.readFixedBytes(this._dataStream, AVRO_SYNC_MARKER_SIZE, { abortSignal: options.abortSignal, })); this._blockOffset = this._initialBlockOffset + this._dataStream.position; this._objectIndex = 0; if (!arraysEqual(this._syncMarker, marker)) { throw new Error("Stream is not a valid Avro file."); } try { this._itemsRemainingInBlock = yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal, })); } catch (_a) { // We hit the end of the stream. this._itemsRemainingInBlock = 0; } if (this._itemsRemainingInBlock > 0) { // Ignore block size yield tslib.__await(AvroParser.readLong(this._dataStream, { abortSignal: options.abortSignal })); } } yield yield tslib.__await(result); } }); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. class AvroReadable { } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. const ABORT_ERROR = new abortController.AbortError("Reading from the avro stream was aborted."); class AvroReadableFromStream extends AvroReadable { toUint8Array(data) { if (typeof data === "string") { return Buffer.from(data); } return data; } constructor(readable) { super(); this._readable = readable; this._position = 0; } get position() { return this._position; } async read(size, options = {}) { var _a; if ((_a = options.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { throw ABORT_ERROR; } if (size < 0) { throw new Error(`size parameter should be positive: ${size}`); } if (size === 0) { return new Uint8Array(); } if (!this._readable.readable) { throw new Error("Stream no longer readable."); } // See if there is already enough data. const chunk = this._readable.read(size); if (chunk) { this._position += chunk.length; // chunk.length maybe less than desired size if the stream ends. return this.toUint8Array(chunk); } else { // register callback to wait for enough data to read return new Promise((resolve, reject) => { /* eslint-disable @typescript-eslint/no-use-before-define */ const cleanUp = () => { this._readable.removeListener("readable", readableCallback); this._readable.removeListener("error", rejectCallback); this._readable.removeListener("end", rejectCallback); this._readable.removeListener("close", rejectCallback); if (options.abortSignal) { options.abortSignal.removeEventListener("abort", abortHandler); } }; const readableCallback = () => { const callbackChunk = this._readable.read(size); if (callbackChunk) { this._position += callbackChunk.length; cleanUp(); // callbackChunk.length maybe less than desired size if the stream ends. resolve(this.toUint8Array(callbackChunk)); } }; const rejectCallback = () => { cleanUp(); reject(); }; const abortHandler = () => { cleanUp(); reject(ABORT_ERROR); }; this._readable.on("readable", readableCallback); this._readable.once("error", rejectCallback); this._readable.once("end", rejectCallback); this._readable.once("close", rejectCallback); if (options.abortSignal) { options.abortSignal.addEventListener("abort", abortHandler); } /* eslint-enable @typescript-eslint/no-use-before-define */ }); } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * A Node.js BlobQuickQueryStream will internally parse avro data stream for blob query. */ class BlobQuickQueryStream extends stream.Readable { /** * Creates an instance of BlobQuickQueryStream. * * @param source - The current ReadableStream returned from getter * @param options - */ constructor(source, options = {}) { super(); this.avroPaused = true; this.source = source; this.onProgress = options.onProgress; this.onError = options.onError; this.avroReader = new AvroReader(new AvroReadableFromStream(this.source)); this.avroIter = this.avroReader.parseObjects({ abortSignal: options.abortSignal }); } _read() { if (this.avroPaused) { this.readInternal().catch((err) => { this.emit("error", err); }); } } async readInternal() { this.avroPaused = false; let avroNext; do { avroNext = await this.avroIter.next(); if (avroNext.done) { break; } const obj = avroNext.value; const schema = obj.$schema; if (typeof schema !== "string") { throw Error("Missing schema in avro record."); } switch (schema) { case "com.microsoft.azure.storage.queryBlobContents.resultData": { const data = obj.data; if (data instanceof Uint8Array === false) { throw Error("Invalid data in avro result record."); } if (!this.push(Buffer.from(data))) { this.avroPaused = true; } } break; case "com.microsoft.azure.storage.queryBlobContents.progress": { const bytesScanned = obj.bytesScanned; if (typeof bytesScanned !== "number") { throw Error("Invalid bytesScanned in avro progress record."); } if (this.onProgress) { this.onProgress({ loadedBytes: bytesScanned }); } } break; case "com.microsoft.azure.storage.queryBlobContents.end": if (this.onProgress) { const totalBytes = obj.totalBytes; if (typeof totalBytes !== "number") { throw Error("Invalid totalBytes in avro end record."); } this.onProgress({ loadedBytes: totalBytes }); } this.push(null); break; case "com.microsoft.azure.storage.queryBlobContents.error": if (this.onError) { const fatal = obj.fatal; if (typeof fatal !== "boolean") { throw Error("Invalid fatal in avro error record."); } const name = obj.name; if (typeof name !== "string") { throw Error("Invalid name in avro error record."); } const description = obj.description; if (typeof description !== "string") { throw Error("Invalid description in avro error record."); } const position = obj.position; if (typeof position !== "number") { throw Error("Invalid position in avro error record."); } this.onError({ position, name, isFatal: fatal, description, }); } break; default: throw Error(`Unknown schema ${schema} in avro progress record.`); } } while (!avroNext.done && !this.avroPaused); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * BlobQueryResponse implements BlobDownloadResponseModel interface, and in Node.js runtime it will * parse avor data returned by blob query. */ class BlobQueryResponse { /** * Indicates that the service supports * requests for partial file content. * * @readonly */ get acceptRanges() { return this.originalResponse.acceptRanges; } /** * Returns if it was previously specified * for the file. * * @readonly */ get cacheControl() { return this.originalResponse.cacheControl; } /** * Returns the value that was specified * for the 'x-ms-content-disposition' header and specifies how to process the * response. * * @readonly */ get contentDisposition() { return this.originalResponse.contentDisposition; } /** * Returns the value that was specified * for the Content-Encoding request header. * * @readonly */ get contentEncoding() { return this.originalResponse.contentEncoding; } /** * Returns the value that was specified * for the Content-Language request header. * * @readonly */ get contentLanguage() { return this.originalResponse.contentLanguage; } /** * The current sequence number for a * page blob. This header is not returned for block blobs or append blobs. * * @readonly */ get blobSequenceNumber() { return this.originalResponse.blobSequenceNumber; } /** * The blob's type. Possible values include: * 'BlockBlob', 'PageBlob', 'AppendBlob'. * * @readonly */ get blobType() { return this.originalResponse.blobType; } /** * The number of bytes present in the * response body. * * @readonly */ get contentLength() { return this.originalResponse.contentLength; } /** * If the file has an MD5 hash and the * request is to read the full file, this response header is returned so that * the client can check for message content integrity. If the request is to * read a specified range and the 'x-ms-range-get-content-md5' is set to * true, then the request returns an MD5 hash for the range, as long as the * range size is less than or equal to 4 MB. If neither of these sets of * conditions is true, then no value is returned for the 'Content-MD5' * header. * * @readonly */ get contentMD5() { return this.originalResponse.contentMD5; } /** * Indicates the range of bytes returned if * the client requested a subset of the file by setting the Range request * header. * * @readonly */ get contentRange() { return this.originalResponse.contentRange; } /** * The content type specified for the file. * The default content type is 'application/octet-stream' * * @readonly */ get contentType() { return this.originalResponse.contentType; } /** * Conclusion time of the last attempted * Copy File operation where this file was the destination file. This value * can specify the time of a completed, aborted, or failed copy attempt. * * @readonly */ get copyCompletedOn() { return undefined; } /** * String identifier for the last attempted Copy * File operation where this file was the destination file. * * @readonly */ get copyId() { return this.originalResponse.copyId; } /** * Contains the number of bytes copied and * the total bytes in the source in the last attempted Copy File operation * where this file was the destination file. Can show between 0 and * Content-Length bytes copied. * * @readonly */ get copyProgress() { return this.originalResponse.copyProgress; } /** * URL up to 2KB in length that specifies the * source file used in the last attempted Copy File operation where this file * was the destination file. * * @readonly */ get copySource() { return this.originalResponse.copySource; } /** * State of the copy operation * identified by 'x-ms-copy-id'. Possible values include: 'pending', * 'success', 'aborted', 'failed' * * @readonly */ get copyStatus() { return this.originalResponse.copyStatus; } /** * Only appears when * x-ms-copy-status is failed or pending. Describes cause of fatal or * non-fatal copy operation failure. * * @readonly */ get copyStatusDescription() { return this.originalResponse.copyStatusDescription; } /** * When a blob is leased, * specifies whether the lease is of infinite or fixed duration. Possible * values include: 'infinite', 'fixed'. * * @readonly */ get leaseDuration() { return this.originalResponse.leaseDuration; } /** * Lease state of the blob. Possible * values include: 'available', 'leased', 'expired', 'breaking', 'broken'. * * @readonly */ get leaseState() { return this.originalResponse.leaseState; } /** * The current lease status of the * blob. Possible values include: 'locked', 'unlocked'. * * @readonly */ get leaseStatus() { return this.originalResponse.leaseStatus; } /** * A UTC date/time value generated by the service that * indicates the time at which the response was initiated. * * @readonly */ get date() { return this.originalResponse.date; } /** * The number of committed blocks * present in the blob. This header is returned only for append blobs. * * @readonly */ get blobCommittedBlockCount() { return this.originalResponse.blobCommittedBlockCount; } /** * The ETag contains a value that you can use to * perform operations conditionally, in quotes. * * @readonly */ get etag() { return this.originalResponse.etag; } /** * The error code. * * @readonly */ get errorCode() { return this.originalResponse.errorCode; } /** * The value of this header is set to * true if the file data and application metadata are completely encrypted * using the specified algorithm. Otherwise, the value is set to false (when * the file is unencrypted, or if only parts of the file/application metadata * are encrypted). * * @readonly */ get isServerEncrypted() { return this.originalResponse.isServerEncrypted; } /** * If the blob has a MD5 hash, and if * request contains range header (Range or x-ms-range), this response header * is returned with the value of the whole blob's MD5 value. This value may * or may not be equal to the value returned in Content-MD5 header, with the * latter calculated from the requested range. * * @readonly */ get blobContentMD5() { return this.originalResponse.blobContentMD5; } /** * Returns the date and time the file was last * modified. Any operation that modifies the file or its properties updates * the last modified time. * * @readonly */ get lastModified() { return this.originalResponse.lastModified; } /** * A name-value pair * to associate with a file storage object. * * @readonly */ get metadata() { return this.originalResponse.metadata; } /** * This header uniquely identifies the request * that was made and can be used for troubleshooting the request. * * @readonly */ get requestId() { return this.originalResponse.requestId; } /** * If a client request id header is sent in the request, this header will be present in the * response with the same value. * * @readonly */ get clientRequestId() { return this.originalResponse.clientRequestId; } /** * Indicates the version of the File service used * to execute the request. * * @readonly */ get version() { return this.originalResponse.version; } /** * The SHA-256 hash of the encryption key used to encrypt the blob. This value is only returned * when the blob was encrypted with a customer-provided key. * * @readonly */ get encryptionKeySha256() { return this.originalResponse.encryptionKeySha256; } /** * If the request is to read a specified range and the x-ms-range-get-content-crc64 is set to * true, then the request returns a crc64 for the range, as long as the range size is less than * or equal to 4 MB. If both x-ms-range-get-content-crc64 & x-ms-range-get-content-md5 is * specified in the same request, it will fail with 400(Bad Request) */ get contentCrc64() { return this.originalResponse.contentCrc64; } /** * The response body as a browser Blob. * Always undefined in node.js. * * @readonly */ get blobBody() { return undefined; } /** * The response body as a node.js Readable stream. * Always undefined in the browser. * * It will parse avor data returned by blob query. * * @readonly */ get readableStreamBody() { return coreUtil.isNode ? this.blobDownloadStream : undefined; } /** * The HTTP response. */ get _response() { return this.originalResponse._response; } /** * Creates an instance of BlobQueryResponse. * * @param originalResponse - * @param options - */ constructor(originalResponse, options = {}) { this.originalResponse = originalResponse; this.blobDownloadStream = new BlobQuickQueryStream(this.originalResponse.readableStreamBody, options); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Represents the access tier on a blob. * For detailed information about block blob level tiering see {@link https://docs.microsoft.com/azure/storage/blobs/storage-blob-storage-tiers|Hot, cool and archive storage tiers.} */ exports.BlockBlobTier = void 0; (function (BlockBlobTier) { /** * Optimized for storing data that is accessed frequently. */ BlockBlobTier["Hot"] = "Hot"; /** * Optimized for storing data that is infrequently accessed and stored for at least 30 days. */ BlockBlobTier["Cool"] = "Cool"; /** * Optimized for storing data that is rarely accessed. */ BlockBlobTier["Cold"] = "Cold"; /** * Optimized for storing data that is rarely accessed and stored for at least 180 days * with flexible latency requirements (on the order of hours). */ BlockBlobTier["Archive"] = "Archive"; })(exports.BlockBlobTier || (exports.BlockBlobTier = {})); /** * Specifies the page blob tier to set the blob to. This is only applicable to page blobs on premium storage accounts. * Please see {@link https://docs.microsoft.com/azure/storage/storage-premium-storage#scalability-and-performance-targets|here} * for detailed information on the corresponding IOPS and throughput per PageBlobTier. */ exports.PremiumPageBlobTier = void 0; (function (PremiumPageBlobTier) { /** * P4 Tier. */ PremiumPageBlobTier["P4"] = "P4"; /** * P6 Tier. */ PremiumPageBlobTier["P6"] = "P6"; /** * P10 Tier. */ PremiumPageBlobTier["P10"] = "P10"; /** * P15 Tier. */ PremiumPageBlobTier["P15"] = "P15"; /** * P20 Tier. */ PremiumPageBlobTier["P20"] = "P20"; /** * P30 Tier. */ PremiumPageBlobTier["P30"] = "P30"; /** * P40 Tier. */ PremiumPageBlobTier["P40"] = "P40"; /** * P50 Tier. */ PremiumPageBlobTier["P50"] = "P50"; /** * P60 Tier. */ PremiumPageBlobTier["P60"] = "P60"; /** * P70 Tier. */ PremiumPageBlobTier["P70"] = "P70"; /** * P80 Tier. */ PremiumPageBlobTier["P80"] = "P80"; })(exports.PremiumPageBlobTier || (exports.PremiumPageBlobTier = {})); function toAccessTier(tier) { if (tier === undefined) { return undefined; } return tier; // No more check if string is a valid AccessTier, and left this to underlay logic to decide(service). } function ensureCpkIfSpecified(cpk, isHttps) { if (cpk && !isHttps) { throw new RangeError("Customer-provided encryption key must be used over HTTPS."); } if (cpk && !cpk.encryptionAlgorithm) { cpk.encryptionAlgorithm = EncryptionAlgorithmAES25; } } /** * Defines the known cloud audiences for Storage. */ exports.StorageBlobAudience = void 0; (function (StorageBlobAudience) { /** * The OAuth scope to use to retrieve an AAD token for Azure Storage. */ StorageBlobAudience["StorageOAuthScopes"] = "https://storage.azure.com/.default"; /** * The OAuth scope to use to retrieve an AAD token for Azure Disk. */ StorageBlobAudience["DiskComputeOAuthScopes"] = "https://disk.compute.azure.com/.default"; })(exports.StorageBlobAudience || (exports.StorageBlobAudience = {})); /** * * To get OAuth audience for a storage account for blob service. */ function getBlobServiceAccountAudience(storageAccountName) { return `https://${storageAccountName}.blob.core.windows.net/.default`; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Function that converts PageRange and ClearRange to a common Range object. * PageRange and ClearRange have start and end while Range offset and count * this function normalizes to Range. * @param response - Model PageBlob Range response */ function rangeResponseFromModel(response) { const pageRange = (response._response.parsedBody.pageRange || []).map((x) => ({ offset: x.start, count: x.end - x.start, })); const clearRange = (response._response.parsedBody.clearRange || []).map((x) => ({ offset: x.start, count: x.end - x.start, })); return Object.assign(Object.assign({}, response), { pageRange, clearRange, _response: Object.assign(Object.assign({}, response._response), { parsedBody: { pageRange, clearRange, } }) }); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * This is the poller returned by {@link BlobClient.beginCopyFromURL}. * This can not be instantiated directly outside of this package. * * @hidden */ class BlobBeginCopyFromUrlPoller extends coreLro.Poller { constructor(options) { const { blobClient, copySource, intervalInMs = 15000, onProgress, resumeFrom, startCopyFromURLOptions, } = options; let state; if (resumeFrom) { state = JSON.parse(resumeFrom).state; } const operation = makeBlobBeginCopyFromURLPollOperation(Object.assign(Object.assign({}, state), { blobClient, copySource, startCopyFromURLOptions })); super(operation); if (typeof onProgress === "function") { this.onProgress(onProgress); } this.intervalInMs = intervalInMs; } delay() { return coreUtil.delay(this.intervalInMs); } } /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const cancel = async function cancel(options = {}) { const state = this.state; const { copyId } = state; if (state.isCompleted) { return makeBlobBeginCopyFromURLPollOperation(state); } if (!copyId) { state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); } // if abortCopyFromURL throws, it will bubble up to user's poller.cancelOperation call await state.blobClient.abortCopyFromURL(copyId, { abortSignal: options.abortSignal, }); state.isCancelled = true; return makeBlobBeginCopyFromURLPollOperation(state); }; /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const update = async function update(options = {}) { const state = this.state; const { blobClient, copySource, startCopyFromURLOptions } = state; if (!state.isStarted) { state.isStarted = true; const result = await blobClient.startCopyFromURL(copySource, startCopyFromURLOptions); // copyId is needed to abort state.copyId = result.copyId; if (result.copyStatus === "success") { state.result = result; state.isCompleted = true; } } else if (!state.isCompleted) { try { const result = await state.blobClient.getProperties({ abortSignal: options.abortSignal }); const { copyStatus, copyProgress } = result; const prevCopyProgress = state.copyProgress; if (copyProgress) { state.copyProgress = copyProgress; } if (copyStatus === "pending" && copyProgress !== prevCopyProgress && typeof options.fireProgress === "function") { // trigger in setTimeout, or swallow error? options.fireProgress(state); } else if (copyStatus === "success") { state.result = result; state.isCompleted = true; } else if (copyStatus === "failed") { state.error = new Error(`Blob copy failed with reason: "${result.copyStatusDescription || "unknown"}"`); state.isCompleted = true; } } catch (err) { state.error = err; state.isCompleted = true; } } return makeBlobBeginCopyFromURLPollOperation(state); }; /** * Note: Intentionally using function expression over arrow function expression * so that the function can be invoked with a different context. * This affects what `this` refers to. * @hidden */ const toString = function toString() { return JSON.stringify({ state: this.state }, (key, value) => { // remove blobClient from serialized state since a client can't be hydrated from this info. if (key === "blobClient") { return undefined; } return value; }); }; /** * Creates a poll operation given the provided state. * @hidden */ function makeBlobBeginCopyFromURLPollOperation(state) { return { state: Object.assign({}, state), cancel, toString, update, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Generate a range string. For example: * * "bytes=255-" or "bytes=0-511" * * @param iRange - */ function rangeToString(iRange) { if (iRange.offset < 0) { throw new RangeError(`Range.offset cannot be smaller than 0.`); } if (iRange.count && iRange.count <= 0) { throw new RangeError(`Range.count must be larger than 0. Leave it undefined if you want a range from offset to the end.`); } return iRange.count ? `bytes=${iRange.offset}-${iRange.offset + iRange.count - 1}` : `bytes=${iRange.offset}-`; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. // In browser, during webpack or browserify bundling, this module will be replaced by 'events' // https://github.com/Gozala/events /** * States for Batch. */ var BatchStates; (function (BatchStates) { BatchStates[BatchStates["Good"] = 0] = "Good"; BatchStates[BatchStates["Error"] = 1] = "Error"; })(BatchStates || (BatchStates = {})); /** * Batch provides basic parallel execution with concurrency limits. * Will stop execute left operations when one of the executed operation throws an error. * But Batch cannot cancel ongoing operations, you need to cancel them by yourself. */ class Batch { /** * Creates an instance of Batch. * @param concurrency - */ constructor(concurrency = 5) { /** * Number of active operations under execution. */ this.actives = 0; /** * Number of completed operations under execution. */ this.completed = 0; /** * Offset of next operation to be executed. */ this.offset = 0; /** * Operation array to be executed. */ this.operations = []; /** * States of Batch. When an error happens, state will turn into error. * Batch will stop execute left operations. */ this.state = BatchStates.Good; if (concurrency < 1) { throw new RangeError("concurrency must be larger than 0"); } this.concurrency = concurrency; this.emitter = new events.EventEmitter(); } /** * Add a operation into queue. * * @param operation - */ addOperation(operation) { this.operations.push(async () => { try { this.actives++; await operation(); this.actives--; this.completed++; this.parallelExecute(); } catch (error) { this.emitter.emit("error", error); } }); } /** * Start execute operations in the queue. * */ async do() { if (this.operations.length === 0) { return Promise.resolve(); } this.parallelExecute(); return new Promise((resolve, reject) => { this.emitter.on("finish", resolve); this.emitter.on("error", (error) => { this.state = BatchStates.Error; reject(error); }); }); } /** * Get next operation to be executed. Return null when reaching ends. * */ nextOperation() { if (this.offset < this.operations.length) { return this.operations[this.offset++]; } return null; } /** * Start execute operations. One one the most important difference between * this method with do() is that do() wraps as an sync method. * */ parallelExecute() { if (this.state === BatchStates.Error) { return; } if (this.completed >= this.operations.length) { this.emitter.emit("finish"); return; } while (this.actives < this.concurrency) { const operation = this.nextOperation(); if (operation) { operation(); } else { return; } } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * This class generates a readable stream from the data in an array of buffers. */ class BuffersStream extends stream.Readable { /** * Creates an instance of BuffersStream that will emit the data * contained in the array of buffers. * * @param buffers - Array of buffers containing the data * @param byteLength - The total length of data contained in the buffers */ constructor(buffers, byteLength, options) { super(options); this.buffers = buffers; this.byteLength = byteLength; this.byteOffsetInCurrentBuffer = 0; this.bufferIndex = 0; this.pushedBytesLength = 0; // check byteLength is no larger than buffers[] total length let buffersLength = 0; for (const buf of this.buffers) { buffersLength += buf.byteLength; } if (buffersLength < this.byteLength) { throw new Error("Data size shouldn't be larger than the total length of buffers."); } } /** * Internal _read() that will be called when the stream wants to pull more data in. * * @param size - Optional. The size of data to be read */ _read(size) { if (this.pushedBytesLength >= this.byteLength) { this.push(null); } if (!size) { size = this.readableHighWaterMark; } const outBuffers = []; let i = 0; while (i < size && this.pushedBytesLength < this.byteLength) { // The last buffer may be longer than the data it contains. const remainingDataInAllBuffers = this.byteLength - this.pushedBytesLength; const remainingCapacityInThisBuffer = this.buffers[this.bufferIndex].byteLength - this.byteOffsetInCurrentBuffer; const remaining = Math.min(remainingCapacityInThisBuffer, remainingDataInAllBuffers); if (remaining > size - i) { // chunkSize = size - i const end = this.byteOffsetInCurrentBuffer + size - i; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); this.pushedBytesLength += size - i; this.byteOffsetInCurrentBuffer = end; i = size; break; } else { // chunkSize = remaining const end = this.byteOffsetInCurrentBuffer + remaining; outBuffers.push(this.buffers[this.bufferIndex].slice(this.byteOffsetInCurrentBuffer, end)); if (remaining === remainingCapacityInThisBuffer) { // this.buffers[this.bufferIndex] used up, shift to next one this.byteOffsetInCurrentBuffer = 0; this.bufferIndex++; } else { this.byteOffsetInCurrentBuffer = end; } this.pushedBytesLength += remaining; i += remaining; } } if (outBuffers.length > 1) { this.push(Buffer.concat(outBuffers)); } else if (outBuffers.length === 1) { this.push(outBuffers[0]); } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. const maxBufferLength = buffer.constants.MAX_LENGTH; /** * This class provides a buffer container which conceptually has no hard size limit. * It accepts a capacity, an array of input buffers and the total length of input data. * It will allocate an internal "buffer" of the capacity and fill the data in the input buffers * into the internal "buffer" serially with respect to the total length. * Then by calling PooledBuffer.getReadableStream(), you can get a readable stream * assembled from all the data in the internal "buffer". */ class PooledBuffer { /** * The size of the data contained in the pooled buffers. */ get size() { return this._size; } constructor(capacity, buffers, totalLength) { /** * Internal buffers used to keep the data. * Each buffer has a length of the maxBufferLength except last one. */ this.buffers = []; this.capacity = capacity; this._size = 0; // allocate const bufferNum = Math.ceil(capacity / maxBufferLength); for (let i = 0; i < bufferNum; i++) { let len = i === bufferNum - 1 ? capacity % maxBufferLength : maxBufferLength; if (len === 0) { len = maxBufferLength; } this.buffers.push(Buffer.allocUnsafe(len)); } if (buffers) { this.fill(buffers, totalLength); } } /** * Fill the internal buffers with data in the input buffers serially * with respect to the total length and the total capacity of the internal buffers. * Data copied will be shift out of the input buffers. * * @param buffers - Input buffers containing the data to be filled in the pooled buffer * @param totalLength - Total length of the data to be filled in. * */ fill(buffers, totalLength) { this._size = Math.min(this.capacity, totalLength); let i = 0, j = 0, targetOffset = 0, sourceOffset = 0, totalCopiedNum = 0; while (totalCopiedNum < this._size) { const source = buffers[i]; const target = this.buffers[j]; const copiedNum = source.copy(target, targetOffset, sourceOffset); totalCopiedNum += copiedNum; sourceOffset += copiedNum; targetOffset += copiedNum; if (sourceOffset === source.length) { i++; sourceOffset = 0; } if (targetOffset === target.length) { j++; targetOffset = 0; } } // clear copied from source buffers buffers.splice(0, i); if (buffers.length > 0) { buffers[0] = buffers[0].slice(sourceOffset); } } /** * Get the readable stream assembled from all the data in the internal buffers. * */ getReadableStream() { return new BuffersStream(this.buffers, this.size); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * This class accepts a Node.js Readable stream as input, and keeps reading data * from the stream into the internal buffer structure, until it reaches maxBuffers. * Every available buffer will try to trigger outgoingHandler. * * The internal buffer structure includes an incoming buffer array, and a outgoing * buffer array. The incoming buffer array includes the "empty" buffers can be filled * with new incoming data. The outgoing array includes the filled buffers to be * handled by outgoingHandler. Every above buffer size is defined by parameter bufferSize. * * NUM_OF_ALL_BUFFERS = BUFFERS_IN_INCOMING + BUFFERS_IN_OUTGOING + BUFFERS_UNDER_HANDLING * * NUM_OF_ALL_BUFFERS lesser than or equal to maxBuffers * * PERFORMANCE IMPROVEMENT TIPS: * 1. Input stream highWaterMark is better to set a same value with bufferSize * parameter, which will avoid Buffer.concat() operations. * 2. concurrency should set a smaller value than maxBuffers, which is helpful to * reduce the possibility when a outgoing handler waits for the stream data. * in this situation, outgoing handlers are blocked. * Outgoing queue shouldn't be empty. */ class BufferScheduler { /** * Creates an instance of BufferScheduler. * * @param readable - A Node.js Readable stream * @param bufferSize - Buffer size of every maintained buffer * @param maxBuffers - How many buffers can be allocated * @param outgoingHandler - An async function scheduled to be * triggered when a buffer fully filled * with stream data * @param concurrency - Concurrency of executing outgoingHandlers (>0) * @param encoding - [Optional] Encoding of Readable stream when it's a string stream */ constructor(readable, bufferSize, maxBuffers, outgoingHandler, concurrency, encoding) { /** * An internal event emitter. */ this.emitter = new events.EventEmitter(); /** * An internal offset marker to track data offset in bytes of next outgoingHandler. */ this.offset = 0; /** * An internal marker to track whether stream is end. */ this.isStreamEnd = false; /** * An internal marker to track whether stream or outgoingHandler returns error. */ this.isError = false; /** * How many handlers are executing. */ this.executingOutgoingHandlers = 0; /** * How many buffers have been allocated. */ this.numBuffers = 0; /** * Because this class doesn't know how much data every time stream pops, which * is defined by highWaterMarker of the stream. So BufferScheduler will cache * data received from the stream, when data in unresolvedDataArray exceeds the * blockSize defined, it will try to concat a blockSize of buffer, fill into available * buffers from incoming and push to outgoing array. */ this.unresolvedDataArray = []; /** * How much data consisted in unresolvedDataArray. */ this.unresolvedLength = 0; /** * The array includes all the available buffers can be used to fill data from stream. */ this.incoming = []; /** * The array (queue) includes all the buffers filled from stream data. */ this.outgoing = []; if (bufferSize <= 0) { throw new RangeError(`bufferSize must be larger than 0, current is ${bufferSize}`); } if (maxBuffers <= 0) { throw new RangeError(`maxBuffers must be larger than 0, current is ${maxBuffers}`); } if (concurrency <= 0) { throw new RangeError(`concurrency must be larger than 0, current is ${concurrency}`); } this.bufferSize = bufferSize; this.maxBuffers = maxBuffers; this.readable = readable; this.outgoingHandler = outgoingHandler; this.concurrency = concurrency; this.encoding = encoding; } /** * Start the scheduler, will return error when stream of any of the outgoingHandlers * returns error. * */ async do() { return new Promise((resolve, reject) => { this.readable.on("data", (data) => { data = typeof data === "string" ? Buffer.from(data, this.encoding) : data; this.appendUnresolvedData(data); if (!this.resolveData()) { this.readable.pause(); } }); this.readable.on("error", (err) => { this.emitter.emit("error", err); }); this.readable.on("end", () => { this.isStreamEnd = true; this.emitter.emit("checkEnd"); }); this.emitter.on("error", (err) => { this.isError = true; this.readable.pause(); reject(err); }); this.emitter.on("checkEnd", () => { if (this.outgoing.length > 0) { this.triggerOutgoingHandlers(); return; } if (this.isStreamEnd && this.executingOutgoingHandlers === 0) { if (this.unresolvedLength > 0 && this.unresolvedLength < this.bufferSize) { const buffer = this.shiftBufferFromUnresolvedDataArray(); this.outgoingHandler(() => buffer.getReadableStream(), buffer.size, this.offset) .then(resolve) .catch(reject); } else if (this.unresolvedLength >= this.bufferSize) { return; } else { resolve(); } } }); }); } /** * Insert a new data into unresolved array. * * @param data - */ appendUnresolvedData(data) { this.unresolvedDataArray.push(data); this.unresolvedLength += data.length; } /** * Try to shift a buffer with size in blockSize. The buffer returned may be less * than blockSize when data in unresolvedDataArray is less than bufferSize. * */ shiftBufferFromUnresolvedDataArray(buffer) { if (!buffer) { buffer = new PooledBuffer(this.bufferSize, this.unresolvedDataArray, this.unresolvedLength); } else { buffer.fill(this.unresolvedDataArray, this.unresolvedLength); } this.unresolvedLength -= buffer.size; return buffer; } /** * Resolve data in unresolvedDataArray. For every buffer with size in blockSize * shifted, it will try to get (or allocate a buffer) from incoming, and fill it, * then push it into outgoing to be handled by outgoing handler. * * Return false when available buffers in incoming are not enough, else true. * * @returns Return false when buffers in incoming are not enough, else true. */ resolveData() { while (this.unresolvedLength >= this.bufferSize) { let buffer; if (this.incoming.length > 0) { buffer = this.incoming.shift(); this.shiftBufferFromUnresolvedDataArray(buffer); } else { if (this.numBuffers < this.maxBuffers) { buffer = this.shiftBufferFromUnresolvedDataArray(); this.numBuffers++; } else { // No available buffer, wait for buffer returned return false; } } this.outgoing.push(buffer); this.triggerOutgoingHandlers(); } return true; } /** * Try to trigger a outgoing handler for every buffer in outgoing. Stop when * concurrency reaches. */ async triggerOutgoingHandlers() { let buffer; do { if (this.executingOutgoingHandlers >= this.concurrency) { return; } buffer = this.outgoing.shift(); if (buffer) { this.triggerOutgoingHandler(buffer); } } while (buffer); } /** * Trigger a outgoing handler for a buffer shifted from outgoing. * * @param buffer - */ async triggerOutgoingHandler(buffer) { const bufferLength = buffer.size; this.executingOutgoingHandlers++; this.offset += bufferLength; try { await this.outgoingHandler(() => buffer.getReadableStream(), bufferLength, this.offset - bufferLength); } catch (err) { this.emitter.emit("error", err); return; } this.executingOutgoingHandlers--; this.reuseBuffer(buffer); this.emitter.emit("checkEnd"); } /** * Return buffer used by outgoing handler into incoming. * * @param buffer - */ reuseBuffer(buffer) { this.incoming.push(buffer); if (!this.isError && this.resolveData() && !this.isStreamEnd) { this.readable.resume(); } } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * Reads a readable stream into buffer. Fill the buffer from offset to end. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param offset - From which position in the buffer to be filled, inclusive * @param end - To which position in the buffer to be filled, exclusive * @param encoding - Encoding of the Readable stream */ async function streamToBuffer(stream, buffer, offset, end, encoding) { let pos = 0; // Position in stream const count = end - offset; // Total amount of data needed in stream return new Promise((resolve, reject) => { const timeout = setTimeout(() => reject(new Error(`The operation cannot be completed in timeout.`)), REQUEST_TIMEOUT); stream.on("readable", () => { if (pos >= count) { clearTimeout(timeout); resolve(); return; } let chunk = stream.read(); if (!chunk) { return; } if (typeof chunk === "string") { chunk = Buffer.from(chunk, encoding); } // How much data needed in this chunk const chunkLength = pos + chunk.length > count ? count - pos : chunk.length; buffer.fill(chunk.slice(0, chunkLength), offset + pos, offset + pos + chunkLength); pos += chunkLength; }); stream.on("end", () => { clearTimeout(timeout); if (pos < count) { reject(new Error(`Stream drains before getting enough data needed. Data read: ${pos}, data need: ${count}`)); } resolve(); }); stream.on("error", (msg) => { clearTimeout(timeout); reject(msg); }); }); } /** * Reads a readable stream into buffer entirely. * * @param stream - A Node.js Readable stream * @param buffer - Buffer to be filled, length must greater than or equal to offset * @param encoding - Encoding of the Readable stream * @returns with the count of bytes read. * @throws `RangeError` If buffer size is not big enough. */ async function streamToBuffer2(stream, buffer, encoding) { let pos = 0; // Position in stream const bufferSize = buffer.length; return new Promise((resolve, reject) => { stream.on("readable", () => { let chunk = stream.read(); if (!chunk) { return; } if (typeof chunk === "string") { chunk = Buffer.from(chunk, encoding); } if (pos + chunk.length > bufferSize) { reject(new Error(`Stream exceeds buffer size. Buffer size: ${bufferSize}`)); return; } buffer.fill(chunk, pos, pos + chunk.length); pos += chunk.length; }); stream.on("end", () => { resolve(pos); }); stream.on("error", reject); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Writes the content of a readstream to a local file. Returns a Promise which is completed after the file handle is closed. * * @param rs - The read stream. * @param file - Destination file path. */ async function readStreamToLocalFile(rs, file) { return new Promise((resolve, reject) => { const ws = fs__namespace.createWriteStream(file); rs.on("error", (err) => { reject(err); }); ws.on("error", (err) => { reject(err); }); ws.on("close", resolve); rs.pipe(ws); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Promisified version of fs.stat(). */ const fsStat = util__namespace.promisify(fs__namespace.stat); const fsCreateReadStream = fs__namespace.createReadStream; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A BlobClient represents a URL to an Azure Storage blob; the blob may be a block blob, * append blob, or page blob. */ class BlobClient extends StorageClient { /** * The name of the blob. */ get name() { return this._name; } /** * The name of the storage container the blob is associated with. */ get containerName() { return this._containerName; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { options = options || {}; let pipeline; let url; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); ({ blobName: this._name, containerName: this._containerName } = this.getBlobAndContainerNamesFromUrl()); this.blobContext = this.storageClientContext.blob; this._snapshot = getURLParameter(this.url, URLConstants.Parameters.SNAPSHOT); this._versionId = getURLParameter(this.url, URLConstants.Parameters.VERSIONID); } /** * Creates a new BlobClient object identical to the source but with the specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new BlobClient object identical to the source but with the specified snapshot timestamp */ withSnapshot(snapshot) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** * Creates a new BlobClient object pointing to a version of this blob. * Provide "" will remove the versionId and return a Client to the base blob. * * @param versionId - The versionId. * @returns A new BlobClient object pointing to the version of this blob. */ withVersion(versionId) { return new BlobClient(setURLParameter(this.url, URLConstants.Parameters.VERSIONID, versionId.length === 0 ? undefined : versionId), this.pipeline); } /** * Creates a AppendBlobClient object. * */ getAppendBlobClient() { return new AppendBlobClient(this.url, this.pipeline); } /** * Creates a BlockBlobClient object. * */ getBlockBlobClient() { return new BlockBlobClient(this.url, this.pipeline); } /** * Creates a PageBlobClient object. * */ getPageBlobClient() { return new PageBlobClient(this.url, this.pipeline); } /** * Reads or downloads a blob from the system, including its metadata and properties. * You can also call Get Blob to read a snapshot. * * * In Node.js, data returns in a Readable stream readableStreamBody * * In browsers, data returns in a promise blobBody * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob * * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined * @param options - Optional options to Blob Download operation. * * * Example usage (Node.js): * * ```js * // Download and convert a blob to a string * const downloadBlockBlobResponse = await blobClient.download(); * const downloaded = await streamToBuffer(downloadBlockBlobResponse.readableStreamBody); * console.log("Downloaded blob content:", downloaded.toString()); * * async function streamToBuffer(readableStream) { * return new Promise((resolve, reject) => { * const chunks = []; * readableStream.on("data", (data) => { * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); * readableStream.on("error", reject); * }); * } * ``` * * Example usage (browser): * * ```js * // Download and convert a blob to a string * const downloadBlockBlobResponse = await blobClient.download(); * const downloaded = await blobToString(await downloadBlockBlobResponse.blobBody); * console.log( * "Downloaded blob content", * downloaded * ); * * async function blobToString(blob: Blob): Promise { * const fileReader = new FileReader(); * return new Promise((resolve, reject) => { * fileReader.onloadend = (ev: any) => { * resolve(ev.target!.result); * }; * fileReader.onerror = reject; * fileReader.readAsText(blob); * }); * } * ``` */ async download(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-download", options, async (updatedOptions) => { var _a; const res = assertResponse(await this.blobContext.download({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { onDownloadProgress: coreUtil.isNode ? undefined : options.onProgress, // for Node.js, progress is reported by RetriableReadableStream }, range: offset === 0 && !count ? undefined : rangeToString({ offset, count }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions, })); const wrappedRes = Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); // Return browser response immediately if (!coreUtil.isNode) { return wrappedRes; } // We support retrying when download stream unexpected ends in Node.js runtime // Following code shouldn't be bundled into browser build, however some // bundlers may try to bundle following code and "FileReadResponse.ts". // In this case, "FileDownloadResponse.browser.ts" will be used as a shim of "FileDownloadResponse.ts" // The config is in package.json "browser" field if (options.maxRetryRequests === undefined || options.maxRetryRequests < 0) { // TODO: Default value or make it a required parameter? options.maxRetryRequests = DEFAULT_MAX_DOWNLOAD_RETRY_REQUESTS; } if (res.contentLength === undefined) { throw new RangeError(`File download response doesn't contain valid content length header`); } if (!res.etag) { throw new RangeError(`File download response doesn't contain valid etag header`); } return new BlobDownloadResponse(wrappedRes, async (start) => { var _a; const updatedDownloadOptions = { leaseAccessConditions: options.conditions, modifiedAccessConditions: { ifMatch: options.conditions.ifMatch || res.etag, ifModifiedSince: options.conditions.ifModifiedSince, ifNoneMatch: options.conditions.ifNoneMatch, ifUnmodifiedSince: options.conditions.ifUnmodifiedSince, ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions, }, range: rangeToString({ count: offset + res.contentLength - start, offset: start, }), rangeGetContentMD5: options.rangeGetContentMD5, rangeGetContentCRC64: options.rangeGetContentCrc64, snapshot: options.snapshot, cpkInfo: options.customerProvidedKey, }; // Debug purpose only // console.log( // `Read from internal stream, range: ${ // updatedOptions.range // }, options: ${JSON.stringify(updatedOptions)}` // ); return (await this.blobContext.download(Object.assign({ abortSignal: options.abortSignal }, updatedDownloadOptions))).readableStreamBody; }, offset, res.contentLength, { maxRetryRequests: options.maxRetryRequests, onProgress: options.onProgress, }); }); } /** * Returns true if the Azure blob resource represented by this client exists; false otherwise. * * NOTE: use this function with care since an existing blob might be deleted by other clients or * applications. Vice versa new blobs might be added by other clients or applications after this * function completes. * * @param options - options to Exists operation. */ async exists(options = {}) { return tracingClient.withSpan("BlobClient-exists", options, async (updatedOptions) => { try { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); await this.getProperties({ abortSignal: options.abortSignal, customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { // Expected exception when checking blob existence return false; } else if (e.statusCode === 409 && (e.details.errorCode === BlobUsesCustomerSpecifiedEncryptionMsg || e.details.errorCode === BlobDoesNotUseCustomerSpecifiedEncryption)) { // Expected exception when checking blob existence return true; } throw e; } }); } /** * Returns all user-defined metadata, standard HTTP properties, and system properties * for the blob. It does not return the content of the blob. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by * the methods of {@link ContainerClient} that list blobs using the `includeMetadata` option, which * will retain their original casing. * * @param options - Optional options to Get Properties operation. */ async getProperties(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-getProperties", options, async (updatedOptions) => { var _a; const res = assertResponse(await this.blobContext.getProperties({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions, })); return Object.assign(Object.assign({}, res), { _response: res._response, objectReplicationDestinationPolicyId: res.objectReplicationPolicyId, objectReplicationSourceProperties: parseObjectReplicationRecord(res.objectReplicationRules) }); }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async delete(options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("BlobClient-delete", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.delete({ abortSignal: options.abortSignal, deleteSnapshots: options.deleteSnapshots, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Marks the specified blob or snapshot for deletion if it exists. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param options - Optional options to Blob Delete operation. */ async deleteIfExists(options = {}) { return tracingClient.withSpan("BlobClient-deleteIfExists", options, async (updatedOptions) => { var _a, _b; try { const res = assertResponse(await this.delete(updatedOptions)); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobNotFound") { return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } throw e; } }); } /** * Restores the contents and metadata of soft deleted blob and any associated * soft deleted snapshots. Undelete Blob is supported only on version 2017-07-29 * or later. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/undelete-blob * * @param options - Optional options to Blob Undelete operation. */ async undelete(options = {}) { return tracingClient.withSpan("BlobClient-undelete", options, async (updatedOptions) => { return assertResponse(await this.blobContext.undelete({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets system properties on the blob. * * If no value provided, or no value provided for the specified blob HTTP headers, * these blob HTTP headers without a value will be cleared. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * * @param blobHTTPHeaders - If no value provided, or no value provided for * the specified blob HTTP headers, these blob HTTP * headers without a value will be cleared. * A common header to set is `blobContentType` * enabling the browser to provide functionality * based on file type. * @param options - Optional options to Blob Set HTTP Headers operation. */ async setHTTPHeaders(blobHTTPHeaders, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-setHTTPHeaders", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.setHttpHeaders({ abortSignal: options.abortSignal, blobHttpHeaders: blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), // cpkInfo: options.customerProvidedKey, // CPK is not included in Swagger, should change this back when this issue is fixed in Swagger. tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets user-defined metadata for the specified blob as one or more name-value pairs. * * If no option provided, or no metadata defined in the parameter, the blob * metadata will be removed. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Optional options to Set Metadata operation. */ async setMetadata(metadata, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-setMetadata", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets tags on the underlying blob. * A blob can have up to 10 tags. Tag keys must be between 1 and 128 characters. Tag values must be between 0 and 256 characters. * Valid tag key and value characters include lower and upper case letters, digits (0-9), * space (' '), plus ('+'), minus ('-'), period ('.'), foward slash ('/'), colon (':'), equals ('='), and underscore ('_'). * * @param tags - * @param options - */ async setTags(tags, options = {}) { return tracingClient.withSpan("BlobClient-setTags", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.setTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, tags: toBlobTags(tags), })); }); } /** * Gets the tags associated with the underlying blob. * * @param options - */ async getTags(options = {}) { return tracingClient.withSpan("BlobClient-getTags", options, async (updatedOptions) => { var _a; const response = assertResponse(await this.blobContext.getTags({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, tags: toTags({ blobTagSet: response.blobTagSet }) || {} }); return wrappedResponse; }); } /** * Get a {@link BlobLeaseClient} that manages leases on the blob. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the blob. */ getBlobLeaseClient(proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); } /** * Creates a read-only snapshot of a blob. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/snapshot-blob * * @param options - Optional options to the Blob Create Snapshot operation. */ async createSnapshot(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlobClient-createSnapshot", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.createSnapshot({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Asynchronously copies a blob to a destination within the storage account. * This method returns a long running operation poller that allows you to wait * indefinitely until the copy is completed. * You can also cancel a copy before it is completed by calling `cancelOperation` on the poller. * Note that the onProgress callback will not be invoked if the operation completes in the first * request, and attempting to cancel a completed copy will result in an error being thrown. * * In version 2012-02-12 and later, the source for a Copy Blob operation can be * a committed blob in any Azure storage account. * Beginning with version 2015-02-21, the source for a Copy Blob operation can be * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob * * Example using automatic polling: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using manual polling: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * while (!poller.isDone()) { * await poller.poll(); * } * const result = copyPoller.getResult(); * ``` * * Example using progress updates: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url', { * onProgress(state) { * console.log(`Progress: ${state.copyProgress}`); * } * }); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using a changing polling interval (default 15 seconds): * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url', { * intervalInMs: 1000 // poll blob every 1 second for copy progress * }); * const result = await copyPoller.pollUntilDone(); * ``` * * Example using copy cancellation: * * ```js * const copyPoller = await blobClient.beginCopyFromURL('url'); * // cancel operation after starting it. * try { * await copyPoller.cancelOperation(); * // calls to get the result now throw PollerCancelledError * await copyPoller.getResult(); * } catch (err) { * if (err.name === 'PollerCancelledError') { * console.log('The copy was cancelled.'); * } * } * ``` * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ async beginCopyFromURL(copySource, options = {}) { const client = { abortCopyFromURL: (...args) => this.abortCopyFromURL(...args), getProperties: (...args) => this.getProperties(...args), startCopyFromURL: (...args) => this.startCopyFromURL(...args), }; const poller = new BlobBeginCopyFromUrlPoller({ blobClient: client, copySource, intervalInMs: options.intervalInMs, onProgress: options.onProgress, resumeFrom: options.resumeFrom, startCopyFromURLOptions: options, }); // Trigger the startCopyFromURL call by calling poll. // Any errors from this method should be surfaced to the user. await poller.poll(); return poller; } /** * Aborts a pending asynchronous Copy Blob operation, and leaves a destination blob with zero * length and full metadata. Version 2012-02-12 and newer. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/abort-copy-blob * * @param copyId - Id of the Copy From URL operation. * @param options - Optional options to the Blob Abort Copy From URL operation. */ async abortCopyFromURL(copyId, options = {}) { return tracingClient.withSpan("BlobClient-abortCopyFromURL", options, async (updatedOptions) => { return assertResponse(await this.blobContext.abortCopyFromURL(copyId, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The synchronous Copy From URL operation copies a blob or an internet resource to a new blob. It will not * return a response until the copy is complete. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob-from-url * * @param copySource - The source URL to copy from, Shared Access Signature(SAS) maybe needed for authentication * @param options - */ async syncCopyFromURL(copySource, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; return tracingClient.withSpan("BlobClient-syncCopyFromURL", options, async (updatedOptions) => { var _a, _b, _c, _d, _e, _f, _g; return assertResponse(await this.blobContext.copyFromURL(copySource, { abortSignal: options.abortSignal, metadata: options.metadata, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, }, sourceContentMD5: options.sourceContentMD5, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), immutabilityPolicyExpiry: (_f = options.immutabilityPolicy) === null || _f === void 0 ? void 0 : _f.expiriesOn, immutabilityPolicyMode: (_g = options.immutabilityPolicy) === null || _g === void 0 ? void 0 : _g.policyMode, legalHold: options.legalHold, encryptionScope: options.encryptionScope, copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets the tier on a blob. The operation is allowed on a page blob in a premium * storage account and on a block blob in a blob storage account (locally redundant * storage only). A premium page blob's tier determines the allowed size, IOPS, * and bandwidth of the blob. A block blob's tier determines Hot/Cool/Archive * storage type. This operation does not update the blob's ETag. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-tier * * @param tier - The tier to be set on the blob. Valid values are Hot, Cool, or Archive. * @param options - Optional options to the Blob Set Tier operation. */ async setAccessTier(tier, options = {}) { return tracingClient.withSpan("BlobClient-setAccessTier", options, async (updatedOptions) => { var _a; return assertResponse(await this.blobContext.setTier(toAccessTier(tier), { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), rehydratePriority: options.rehydratePriority, tracingOptions: updatedOptions.tracingOptions, })); }); } async downloadToBuffer(param1, param2, param3, param4 = {}) { var _a; let buffer; let offset = 0; let count = 0; let options = param4; if (param1 instanceof Buffer) { buffer = param1; offset = param2 || 0; count = typeof param3 === "number" ? param3 : 0; } else { offset = typeof param1 === "number" ? param1 : 0; count = typeof param2 === "number" ? param2 : 0; options = param3 || {}; } let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; if (blockSize < 0) { throw new RangeError("blockSize option must be >= 0"); } if (blockSize === 0) { blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } if (offset < 0) { throw new RangeError("offset option must be >= 0"); } if (count && count <= 0) { throw new RangeError("count option must be greater than 0"); } if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("BlobClient-downloadToBuffer", options, async (updatedOptions) => { // Customer doesn't specify length, get it if (!count) { const response = await this.getProperties(Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); count = response.contentLength - offset; if (count < 0) { throw new RangeError(`offset ${offset} shouldn't be larger than blob size ${response.contentLength}`); } } // Allocate the buffer of size = count if the buffer is not provided if (!buffer) { try { buffer = Buffer.alloc(count); } catch (error) { throw new Error(`Unable to allocate the buffer of size: ${count}(in bytes). Please try passing your own buffer to the "downloadToBuffer" method or try using other methods like "download" or "downloadToFile".\t ${error.message}`); } } if (buffer.length < count) { throw new RangeError(`The buffer's size should be equal to or larger than the request count of bytes: ${count}`); } let transferProgress = 0; const batch = new Batch(options.concurrency); for (let off = offset; off < offset + count; off = off + blockSize) { batch.addOperation(async () => { // Exclusive chunk end position let chunkEnd = offset + count; if (off + blockSize < chunkEnd) { chunkEnd = off + blockSize; } const response = await this.download(off, chunkEnd - off, { abortSignal: options.abortSignal, conditions: options.conditions, maxRetryRequests: options.maxRetryRequestsPerBlock, customerProvidedKey: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions, }); const stream = response.readableStreamBody; await streamToBuffer(stream, buffer, off - offset, chunkEnd - offset); // Update progress after block is downloaded, in case of block trying // Could provide finer grained progress updating inside HTTP requests, // only if convenience layer download try is enabled transferProgress += chunkEnd - off; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); } }); } await batch.do(); return buffer; }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Downloads an Azure Blob to a local file. * Fails if the the given file path already exits. * Offset and count are optional, pass 0 and undefined respectively to download the entire blob. * * @param filePath - * @param offset - From which position of the block blob to download. * @param count - How much data to be downloaded. Will download to the end when passing undefined. * @param options - Options to Blob download options. * @returns The response data for blob download operation, * but with readableStreamBody set to undefined since its * content is already read and written into a local file * at the specified path. */ async downloadToFile(filePath, offset = 0, count, options = {}) { return tracingClient.withSpan("BlobClient-downloadToFile", options, async (updatedOptions) => { const response = await this.download(offset, count, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); if (response.readableStreamBody) { await readStreamToLocalFile(response.readableStreamBody, filePath); } // The stream is no longer accessible so setting it to undefined. response.blobDownloadStream = undefined; return response; }); } getBlobAndContainerNamesFromUrl() { let containerName; let blobName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer/blob?sasString"; // "https://myaccount.blob.core.windows.net/mycontainer/blob"; // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt?sasString"; // "https://myaccount.blob.core.windows.net/mycontainer/blob/a.txt"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername/blob` // http://localhost:10001/devstoreaccount1/containername/blob const parsedUrl = new URL(this.url); if (parsedUrl.host.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername/blob". // .getPath() -> /containername/blob const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername/blob // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername/blob // .getPath() -> /devstoreaccount1/containername/blob const pathComponents = parsedUrl.pathname.match("/([^/]*)/([^/]*)(/(.*))?"); containerName = pathComponents[2]; blobName = pathComponents[4]; } else { // "https://customdomain.com/containername/blob". // .getPath() -> /containername/blob const pathComponents = parsedUrl.pathname.match("/([^/]*)(/(.*))?"); containerName = pathComponents[1]; blobName = pathComponents[3]; } // decode the encoded blobName, containerName - to get all the special characters that might be present in them containerName = decodeURIComponent(containerName); blobName = decodeURIComponent(blobName); // Azure Storage Server will replace "\" with "/" in the blob names // doing the same in the SDK side so that the user doesn't have to replace "\" instances in the blobName blobName = blobName.replace(/\\/g, "/"); if (!containerName) { throw new Error("Provided containerName is invalid."); } return { blobName, containerName }; } catch (error) { throw new Error("Unable to extract blobName and containerName with provided information."); } } /** * Asynchronously copies a blob to a destination within the storage account. * In version 2012-02-12 and later, the source for a Copy Blob operation can be * a committed blob in any Azure storage account. * Beginning with version 2015-02-21, the source for a Copy Blob operation can be * an Azure file in any Azure storage account. * Only storage accounts created on or after June 7th, 2012 allow the Copy Blob * operation to copy from another storage account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/copy-blob * * @param copySource - url to the source Azure Blob/File. * @param options - Optional options to the Blob Start Copy From URL operation. */ async startCopyFromURL(copySource, options = {}) { return tracingClient.withSpan("BlobClient-startCopyFromURL", options, async (updatedOptions) => { var _a, _b, _c; options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; return assertResponse(await this.blobContext.startCopyFromURL(copySource, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: options.sourceConditions.ifMatch, sourceIfModifiedSince: options.sourceConditions.ifModifiedSince, sourceIfNoneMatch: options.sourceConditions.ifNoneMatch, sourceIfUnmodifiedSince: options.sourceConditions.ifUnmodifiedSince, sourceIfTags: options.sourceConditions.tagConditions, }, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, rehydratePriority: options.rehydratePriority, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), sealBlob: options.sealBlob, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates a Blob Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), this.credential).stringToSign; } /** * * Generates a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for BlobClient constructed with a shared key credential. * * Generates string to sign for a Blob Service Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName, blobName: this._name, snapshotTime: this._snapshot, versionId: this._versionId }, options), userDelegationKey, this.accountName).stringToSign; } /** * Delete the immutablility policy on the blob. * * @param options - Optional options to delete immutability policy on the blob. */ async deleteImmutabilityPolicy(options = {}) { return tracingClient.withSpan("BlobClient-deleteImmutabilityPolicy", options, async (updatedOptions) => { return assertResponse(await this.blobContext.deleteImmutabilityPolicy({ tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Set immutability policy on the blob. * * @param options - Optional options to set immutability policy on the blob. */ async setImmutabilityPolicy(immutabilityPolicy, options = {}) { return tracingClient.withSpan("BlobClient-setImmutabilityPolicy", options, async (updatedOptions) => { return assertResponse(await this.blobContext.setImmutabilityPolicy({ immutabilityPolicyExpiry: immutabilityPolicy.expiriesOn, immutabilityPolicyMode: immutabilityPolicy.policyMode, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Set legal hold on the blob. * * @param options - Optional options to set legal hold on the blob. */ async setLegalHold(legalHoldEnabled, options = {}) { return tracingClient.withSpan("BlobClient-setLegalHold", options, async (updatedOptions) => { return assertResponse(await this.blobContext.setLegalHold(legalHoldEnabled, { tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("BlobClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.blobContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } } /** * AppendBlobClient defines a set of operations applicable to append blobs. */ class AppendBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); let pipeline; let url; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; url = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; // The second parameter is undefined. Use anonymous credential. pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); this.appendBlobContext = this.storageClientContext.appendBlob; } /** * Creates a new AppendBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new AppendBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { return new AppendBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param options - Options to the Append Block Create operation. * * * Example usage: * * ```js * const appendBlobClient = containerClient.getAppendBlobClient(""); * await appendBlobClient.create(); * ``` */ async create(options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-create", options, async (updatedOptions) => { var _a, _b, _c; return assertResponse(await this.appendBlobContext.create(0, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Creates a 0-length append blob. Call AppendBlock to append data to an append blob. * If the blob with the same name already exists, the content of the existing blob will remain unchanged. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param options - */ async createIfNotExists(options = {}) { const conditions = { ifNoneMatch: ETagAny }; return tracingClient.withSpan("AppendBlobClient-createIfNotExists", options, async (updatedOptions) => { var _a, _b; try { const res = assertResponse(await this.create(Object.assign(Object.assign({}, updatedOptions), { conditions }))); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } throw e; } }); } /** * Seals the append blob, making it read only. * * @param options - */ async seal(options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("AppendBlobClient-seal", options, async (updatedOptions) => { var _a; return assertResponse(await this.appendBlobContext.seal({ abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Commits a new block of data to the end of the existing append blob. * @see https://docs.microsoft.com/rest/api/storageservices/append-block * * @param body - Data to be appended. * @param contentLength - Length of the body in bytes. * @param options - Options to the Append Block operation. * * * Example usage: * * ```js * const content = "Hello World!"; * * // Create a new append blob and append data to the blob. * const newAppendBlobClient = containerClient.getAppendBlobClient(""); * await newAppendBlobClient.create(); * await newAppendBlobClient.appendBlock(content, content.length); * * // Append data to an existing append blob. * const existingAppendBlobClient = containerClient.getAppendBlobClient(""); * await existingAppendBlobClient.appendBlock(content, content.length); * ``` */ async appendBlock(body, contentLength, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-appendBlock", options, async (updatedOptions) => { var _a; return assertResponse(await this.appendBlobContext.appendBlock(contentLength, body, { abortSignal: options.abortSignal, appendPositionAccessConditions: options.conditions, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The Append Block operation commits a new block of data to the end of an existing append blob * where the contents are read from a source url. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/append-block-from-url * * @param sourceURL - * The url to the blob that will be the source of the copy. A source blob in the same storage account can * be authenticated via Shared Key. However, if the source is a blob in another account, the source blob * must either be public or must be authenticated via a shared access signature. If the source blob is * public, no authentication is required to perform the operation. * @param sourceOffset - Offset in source to be appended * @param count - Number of bytes to be appended as a block * @param options - */ async appendBlockFromURL(sourceURL, sourceOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("AppendBlobClient-appendBlockFromURL", options, async (updatedOptions) => { var _a, _b, _c, _d, _e; return assertResponse(await this.appendBlobContext.appendBlockFromUrl(sourceURL, 0, { abortSignal: options.abortSignal, sourceRange: rangeToString({ offset: sourceOffset, count }), sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, appendPositionAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, }, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } } /** * BlockBlobClient defines a set of operations applicable to block blobs. */ class BlockBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); let pipeline; let url; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; if (blobNameOrOptions && typeof blobNameOrOptions !== "string") { options = blobNameOrOptions; } pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); this.blockBlobContext = this.storageClientContext.blockBlob; this._blobContext = this.storageClientContext.blob; } /** * Creates a new BlockBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a URL to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new BlockBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { return new BlockBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Quick query for a JSON or CSV formatted blob. * * Example usage (Node.js): * * ```js * // Query and convert a blob to a string * const queryBlockBlobResponse = await blockBlobClient.query("select * from BlobStorage"); * const downloaded = (await streamToBuffer(queryBlockBlobResponse.readableStreamBody)).toString(); * console.log("Query blob content:", downloaded); * * async function streamToBuffer(readableStream) { * return new Promise((resolve, reject) => { * const chunks = []; * readableStream.on("data", (data) => { * chunks.push(data instanceof Buffer ? data : Buffer.from(data)); * }); * readableStream.on("end", () => { * resolve(Buffer.concat(chunks)); * }); * readableStream.on("error", reject); * }); * } * ``` * * @param query - * @param options - */ async query(query, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); if (!coreUtil.isNode) { throw new Error("This operation currently is only supported in Node.js."); } return tracingClient.withSpan("BlockBlobClient-query", options, async (updatedOptions) => { var _a; const response = assertResponse(await this._blobContext.query({ abortSignal: options.abortSignal, queryRequest: { queryType: "SQL", expression: query, inputSerialization: toQuerySerialization(options.inputTextConfiguration), outputSerialization: toQuerySerialization(options.outputTextConfiguration), }, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, tracingOptions: updatedOptions.tracingOptions, })); return new BlobQueryResponse(response, { abortSignal: options.abortSignal, onProgress: options.onProgress, onError: options.onError, }); }); } /** * Creates a new block blob, or updates the content of an existing block blob. * Updating an existing block blob overwrites any existing metadata on the blob. * Partial updates are not supported; the content of the existing blob is * overwritten with the new content. To perform a partial update of a block blob's, * use {@link stageBlock} and {@link commitBlockList}. * * This is a non-parallel uploading method, please use {@link uploadFile}, * {@link uploadStream} or {@link uploadBrowserData} for better performance * with concurrency uploading. * * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function * which returns a new Readable stream whose offset is from data source beginning. * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a * string including non non-Base64/Hex-encoded characters. * @param options - Options to the Block Blob Upload operation. * @returns Response data for the Block Blob Upload operation. * * Example usage: * * ```js * const content = "Hello world!"; * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ async upload(body, contentLength, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-upload", options, async (updatedOptions) => { var _a, _b, _c; return assertResponse(await this.blockBlobContext.upload(contentLength, body, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { onUploadProgress: options.onProgress, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Creates a new Block Blob where the contents of the blob are read from a given URL. * This API is supported beginning with the 2020-04-08 version. Partial updates * are not supported with Put Blob from URL; the content of an existing blob is overwritten with * the content of the new blob. To perform partial updates to a block blob’s contents using a * source URL, use {@link stageBlockFromURL} and {@link commitBlockList}. * * @param sourceURL - Specifies the URL of the blob. The value * may be a URL of up to 2 KB in length that specifies a blob. * The value should be URL-encoded as it would appear * in a request URI. The source blob must either be public * or must be authenticated via a shared access signature. * If the source blob is public, no authentication is required * to perform the operation. Here are some examples of source object URLs: * - https://myaccount.blob.core.windows.net/mycontainer/myblob * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Optional parameters. */ async syncUploadFromURL(sourceURL, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-syncUploadFromURL", options, async (updatedOptions) => { var _a, _b, _c, _d, _e, _f; return assertResponse(await this.blockBlobContext.putBlobFromUrl(0, sourceURL, Object.assign(Object.assign({}, options), { blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, sourceIfTags: (_f = options.sourceConditions) === null || _f === void 0 ? void 0 : _f.tagConditions, }, cpkInfo: options.customerProvidedKey, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), copySourceTags: options.copySourceTags, tracingOptions: updatedOptions.tracingOptions }))); }); } /** * Uploads the specified block to the block blob's "staging area" to be later * committed by a call to commitBlockList. * @see https://docs.microsoft.com/rest/api/storageservices/put-block * * @param blockId - A 64-byte value that is base64-encoded * @param body - Data to upload to the staging area. * @param contentLength - Number of bytes to upload. * @param options - Options to the Block Blob Stage Block operation. * @returns Response data for the Block Blob Stage Block operation. */ async stageBlock(blockId, body, contentLength, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-stageBlock", options, async (updatedOptions) => { return assertResponse(await this.blockBlobContext.stageBlock(blockId, contentLength, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, requestOptions: { onUploadProgress: options.onProgress, }, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The Stage Block From URL operation creates a new block to be committed as part * of a blob where the contents are read from a URL. * This API is available starting in version 2018-03-28. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-block-from-url * * @param blockId - A 64-byte value that is base64-encoded * @param sourceURL - Specifies the URL of the blob. The value * may be a URL of up to 2 KB in length that specifies a blob. * The value should be URL-encoded as it would appear * in a request URI. The source blob must either be public * or must be authenticated via a shared access signature. * If the source blob is public, no authentication is required * to perform the operation. Here are some examples of source object URLs: * - https://myaccount.blob.core.windows.net/mycontainer/myblob * - https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param offset - From which position of the blob to download, greater than or equal to 0 * @param count - How much data to be downloaded, greater than 0. Will download to the end when undefined * @param options - Options to the Block Blob Stage Block From URL operation. * @returns Response data for the Block Blob Stage Block From URL operation. */ async stageBlockFromURL(blockId, sourceURL, offset = 0, count, options = {}) { ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-stageBlockFromURL", options, async (updatedOptions) => { return assertResponse(await this.blockBlobContext.stageBlockFromURL(blockId, 0, sourceURL, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, sourceRange: offset === 0 && !count ? undefined : rangeToString({ offset, count }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Writes a blob by specifying the list of block IDs that make up the blob. * In order to be written as part of a blob, a block must have been successfully written * to the server in a prior {@link stageBlock} operation. You can call {@link commitBlockList} to * update a blob by uploading only those blocks that have changed, then committing the new and existing * blocks together. Any blocks not specified in the block list and permanently deleted. * @see https://docs.microsoft.com/rest/api/storageservices/put-block-list * * @param blocks - Array of 64-byte value that is base64-encoded * @param options - Options to the Block Blob Commit Block List operation. * @returns Response data for the Block Blob Commit Block List operation. */ async commitBlockList(blocks, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("BlockBlobClient-commitBlockList", options, async (updatedOptions) => { var _a, _b, _c; return assertResponse(await this.blockBlobContext.commitBlockList({ latest: blocks }, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Returns the list of blocks that have been uploaded as part of a block blob * using the specified block list filter. * @see https://docs.microsoft.com/rest/api/storageservices/get-block-list * * @param listType - Specifies whether to return the list of committed blocks, * the list of uncommitted blocks, or both lists together. * @param options - Options to the Block Blob Get Block List operation. * @returns Response data for the Block Blob Get Block List operation. */ async getBlockList(listType, options = {}) { return tracingClient.withSpan("BlockBlobClient-getBlockList", options, async (updatedOptions) => { var _a; const res = assertResponse(await this.blockBlobContext.getBlockList(listType, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); if (!res.committedBlocks) { res.committedBlocks = []; } if (!res.uncommittedBlocks) { res.uncommittedBlocks = []; } return res; }); } // High level functions /** * Uploads a Buffer(Node.js)/Blob(browsers)/ArrayBuffer/ArrayBufferView object to a BlockBlob. * * When data length is no more than the specifiled {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. * * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is * `blobContentType`, enabling the browser to provide * functionality based on file type. * * @param data - Buffer(Node.js), Blob, ArrayBuffer or ArrayBufferView * @param options - */ async uploadData(data, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadData", options, async (updatedOptions) => { if (coreUtil.isNode) { let buffer; if (data instanceof Buffer) { buffer = data; } else if (data instanceof ArrayBuffer) { buffer = Buffer.from(data); } else { data = data; buffer = Buffer.from(data.buffer, data.byteOffset, data.byteLength); } return this.uploadSeekableInternal((offset, size) => buffer.slice(offset, offset + size), buffer.byteLength, updatedOptions); } else { const browserBlob = new Blob([data]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); } }); } /** * ONLY AVAILABLE IN BROWSERS. * * Uploads a browser Blob/File/ArrayBuffer/ArrayBufferView object to block blob. * * When buffer length lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call * {@link commitBlockList} to commit the block list. * * A common {@link BlockBlobParallelUploadOptions.blobHTTPHeaders} option to set is * `blobContentType`, enabling the browser to provide * functionality based on file type. * * @deprecated Use {@link uploadData} instead. * * @param browserData - Blob, File, ArrayBuffer or ArrayBufferView * @param options - Options to upload browser data. * @returns Response data for the Blob Upload operation. */ async uploadBrowserData(browserData, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadBrowserData", options, async (updatedOptions) => { const browserBlob = new Blob([browserData]); return this.uploadSeekableInternal((offset, size) => browserBlob.slice(offset, offset + size), browserBlob.size, updatedOptions); }); } /** * * Uploads data to block blob. Requires a bodyFactory as the data source, * which need to return a {@link HttpRequestBody} object with the offset and size provided. * * When data length is no more than the specified {@link BlockBlobParallelUploadOptions.maxSingleShotSize} (default is * {@link BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}), this method will use 1 {@link upload} call to finish the upload. * Otherwise, this method will call {@link stageBlock} to upload blocks, and finally call {@link commitBlockList} * to commit the block list. * * @param bodyFactory - * @param size - size of the data to upload. * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadSeekableInternal(bodyFactory, size, options = {}) { var _a, _b; let blockSize = (_a = options.blockSize) !== null && _a !== void 0 ? _a : 0; if (blockSize < 0 || blockSize > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES) { throw new RangeError(`blockSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES}`); } const maxSingleShotSize = (_b = options.maxSingleShotSize) !== null && _b !== void 0 ? _b : BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES; if (maxSingleShotSize < 0 || maxSingleShotSize > BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES) { throw new RangeError(`maxSingleShotSize option must be >= 0 and <= ${BLOCK_BLOB_MAX_UPLOAD_BLOB_BYTES}`); } if (blockSize === 0) { if (size > BLOCK_BLOB_MAX_STAGE_BLOCK_BYTES * BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`${size} is too larger to upload to a block blob.`); } if (size > maxSingleShotSize) { blockSize = Math.ceil(size / BLOCK_BLOB_MAX_BLOCKS); if (blockSize < DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES) { blockSize = DEFAULT_BLOB_DOWNLOAD_BLOCK_BYTES; } } } if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("BlockBlobClient-uploadSeekableInternal", options, async (updatedOptions) => { if (size <= maxSingleShotSize) { return assertResponse(await this.upload(bodyFactory(0, size), size, updatedOptions)); } const numBlocks = Math.floor((size - 1) / blockSize) + 1; if (numBlocks > BLOCK_BLOB_MAX_BLOCKS) { throw new RangeError(`The buffer's size is too big or the BlockSize is too small;` + `the number of blocks must be <= ${BLOCK_BLOB_MAX_BLOCKS}`); } const blockList = []; const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const batch = new Batch(options.concurrency); for (let i = 0; i < numBlocks; i++) { batch.addOperation(async () => { const blockID = generateBlockID(blockIDPrefix, i); const start = blockSize * i; const end = i === numBlocks - 1 ? size : start + blockSize; const contentLength = end - start; blockList.push(blockID); await this.stageBlock(blockID, bodyFactory(start, contentLength), contentLength, { abortSignal: options.abortSignal, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying // TODO: Hook with convenience layer progress event in finer level transferProgress += contentLength; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress, }); } }); } await batch.do(); return this.commitBlockList(blockList, updatedOptions); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Uploads a local file in blocks to a block blob. * * When file size lesser than or equal to 256MB, this method will use 1 upload call to finish the upload. * Otherwise, this method will call stageBlock to upload blocks, and finally call commitBlockList * to commit the block list. * * @param filePath - Full path of local file * @param options - Options to Upload to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadFile(filePath, options = {}) { return tracingClient.withSpan("BlockBlobClient-uploadFile", options, async (updatedOptions) => { const size = (await fsStat(filePath)).size; return this.uploadSeekableInternal((offset, count) => { return () => fsCreateReadStream(filePath, { autoClose: true, end: count ? offset + count - 1 : Infinity, start: offset, }); }, size, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions })); }); } /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Uploads a Node.js Readable stream into block blob. * * PERFORMANCE IMPROVEMENT TIPS: * * Input stream highWaterMark is better to set a same value with bufferSize * parameter, which will avoid Buffer.concat() operations. * * @param stream - Node.js Readable stream * @param bufferSize - Size of every buffer allocated, also the block size in the uploaded block blob. Default value is 8MB * @param maxConcurrency - Max concurrency indicates the max number of buffers that can be allocated, * positive correlation with max uploading concurrency. Default value is 5 * @param options - Options to Upload Stream to Block Blob operation. * @returns Response data for the Blob Upload operation. */ async uploadStream(stream, bufferSize = DEFAULT_BLOCK_BUFFER_SIZE_BYTES, maxConcurrency = 5, options = {}) { if (!options.blobHTTPHeaders) { options.blobHTTPHeaders = {}; } if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("BlockBlobClient-uploadStream", options, async (updatedOptions) => { let blockNum = 0; const blockIDPrefix = coreUtil.randomUUID(); let transferProgress = 0; const blockList = []; const scheduler = new BufferScheduler(stream, bufferSize, maxConcurrency, async (body, length) => { const blockID = generateBlockID(blockIDPrefix, blockNum); blockList.push(blockID); blockNum++; await this.stageBlock(blockID, body, length, { customerProvidedKey: options.customerProvidedKey, conditions: options.conditions, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, }); // Update progress after block is successfully uploaded to server, in case of block trying transferProgress += length; if (options.onProgress) { options.onProgress({ loadedBytes: transferProgress }); } }, // concurrency should set a smaller value than maxConcurrency, which is helpful to // reduce the possibility when a outgoing handler waits for stream data, in // this situation, outgoing handlers are blocked. // Outgoing queue shouldn't be empty. Math.ceil((maxConcurrency / 4) * 3)); await scheduler.do(); return assertResponse(await this.commitBlockList(blockList, Object.assign(Object.assign({}, options), { tracingOptions: updatedOptions.tracingOptions }))); }); } } /** * PageBlobClient defines a set of operations applicable to page blobs. */ class PageBlobClient extends BlobClient { constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, blobNameOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { // In TypeScript we cannot simply pass all parameters to super() like below so have to duplicate the code instead. // super(s, credentialOrPipelineOrContainerNameOrOptions, blobNameOrOptions, options); let pipeline; let url; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; options = blobNameOrOptions; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string" && blobNameOrOptions && typeof blobNameOrOptions === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; const blobName = blobNameOrOptions; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { url = appendToURLPath(appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)), encodeURIComponent(blobName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName and blobName parameters"); } super(url, pipeline); this.pageBlobContext = this.storageClientContext.pageBlob; } /** * Creates a new PageBlobClient object identical to the source but with the * specified snapshot timestamp. * Provide "" will remove the snapshot and return a Client to the base blob. * * @param snapshot - The snapshot timestamp. * @returns A new PageBlobClient object identical to the source but with the specified snapshot timestamp. */ withSnapshot(snapshot) { return new PageBlobClient(setURLParameter(this.url, URLConstants.Parameters.SNAPSHOT, snapshot.length === 0 ? undefined : snapshot), this.pipeline); } /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param size - size of the page blob. * @param options - Options to the Page Blob Create operation. * @returns Response data for the Page Blob Create operation. */ async create(size, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-create", options, async (updatedOptions) => { var _a, _b, _c; return assertResponse(await this.pageBlobContext.create(0, size, { abortSignal: options.abortSignal, blobHttpHeaders: options.blobHTTPHeaders, blobSequenceNumber: options.blobSequenceNumber, leaseAccessConditions: options.conditions, metadata: options.metadata, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, immutabilityPolicyExpiry: (_b = options.immutabilityPolicy) === null || _b === void 0 ? void 0 : _b.expiriesOn, immutabilityPolicyMode: (_c = options.immutabilityPolicy) === null || _c === void 0 ? void 0 : _c.policyMode, legalHold: options.legalHold, tier: toAccessTier(options.tier), blobTagsString: toBlobTagsString(options.tags), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Creates a page blob of the specified length. Call uploadPages to upload data * data to a page blob. If the blob with the same name already exists, the content * of the existing blob will remain unchanged. * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param size - size of the page blob. * @param options - */ async createIfNotExists(size, options = {}) { return tracingClient.withSpan("PageBlobClient-createIfNotExists", options, async (updatedOptions) => { var _a, _b; try { const conditions = { ifNoneMatch: ETagAny }; const res = assertResponse(await this.create(size, Object.assign(Object.assign({}, options), { conditions, tracingOptions: updatedOptions.tracingOptions }))); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "BlobAlreadyExists") { return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } throw e; } }); } /** * Writes 1 or more pages to the page blob. The start and end offsets must be a multiple of 512. * @see https://docs.microsoft.com/rest/api/storageservices/put-page * * @param body - Data to upload * @param offset - Offset of destination page blob * @param count - Content length of the body, also number of bytes to be uploaded * @param options - Options to the Page Blob Upload Pages operation. * @returns Response data for the Page Blob Upload Pages operation. */ async uploadPages(body, offset, count, options = {}) { options.conditions = options.conditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-uploadPages", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.uploadPages(count, body, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), requestOptions: { onUploadProgress: options.onProgress, }, range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, transactionalContentMD5: options.transactionalContentMD5, transactionalContentCrc64: options.transactionalContentCrc64, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The Upload Pages operation writes a range of pages to a page blob where the * contents are read from a URL. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/put-page-from-url * * @param sourceURL - Specify a URL to the copy source, Shared Access Signature(SAS) maybe needed for authentication * @param sourceOffset - The source offset to copy from. Pass 0 to copy from the beginning of source page blob * @param destOffset - Offset of destination page blob * @param count - Number of bytes to be uploaded from source page blob * @param options - */ async uploadPagesFromURL(sourceURL, sourceOffset, destOffset, count, options = {}) { options.conditions = options.conditions || {}; options.sourceConditions = options.sourceConditions || {}; ensureCpkIfSpecified(options.customerProvidedKey, this.isHttps); return tracingClient.withSpan("PageBlobClient-uploadPagesFromURL", options, async (updatedOptions) => { var _a, _b, _c, _d, _e; return assertResponse(await this.pageBlobContext.uploadPagesFromURL(sourceURL, rangeToString({ offset: sourceOffset, count }), 0, rangeToString({ offset: destOffset, count }), { abortSignal: options.abortSignal, sourceContentMD5: options.sourceContentMD5, sourceContentCrc64: options.sourceContentCrc64, leaseAccessConditions: options.conditions, sequenceNumberAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), sourceModifiedAccessConditions: { sourceIfMatch: (_b = options.sourceConditions) === null || _b === void 0 ? void 0 : _b.ifMatch, sourceIfModifiedSince: (_c = options.sourceConditions) === null || _c === void 0 ? void 0 : _c.ifModifiedSince, sourceIfNoneMatch: (_d = options.sourceConditions) === null || _d === void 0 ? void 0 : _d.ifNoneMatch, sourceIfUnmodifiedSince: (_e = options.sourceConditions) === null || _e === void 0 ? void 0 : _e.ifUnmodifiedSince, }, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, copySourceAuthorization: httpAuthorizationToString(options.sourceAuthorization), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Frees the specified pages from the page blob. * @see https://docs.microsoft.com/rest/api/storageservices/put-page * * @param offset - Starting byte position of the pages to clear. * @param count - Number of bytes to clear. * @param options - Options to the Page Blob Clear Pages operation. * @returns Response data for the Page Blob Clear Pages operation. */ async clearPages(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-clearPages", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.clearPages(0, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), sequenceNumberAccessConditions: options.conditions, cpkInfo: options.customerProvidedKey, encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Returns the list of valid page ranges for a page blob or snapshot of a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. * @returns Response data for the Page Blob Get Ranges operation. */ async getPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-getPageRanges", options, async (updatedOptions) => { var _a; const response = assertResponse(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions, })); return rangeResponseFromModel(response); }); } /** * getPageRangesSegment returns a single segment of page ranges starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to PageBlob Get Page Ranges Segment operation. */ async listPageRangesSegment(offset = 0, count, marker, options = {}) { return tracingClient.withSpan("PageBlobClient-getPageRangesSegment", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.getPageRanges({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), range: rangeToString({ offset, count }), marker: marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesResponseModel} * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param marker - A string value that identifies the portion of * the get of page ranges to be returned with the next getting operation. The * operation returns the ContinuationToken value within the response body if the * getting operation did not return all page ranges remaining within the current page. * The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. * @param options - Options to List Page Ranges operation. */ listPageRangeItemSegments() { return tslib.__asyncGenerator(this, arguments, function* listPageRangeItemSegments_1(offset = 0, count, marker, options = {}) { let getPageRangeItemSegmentsResponse; if (!!marker || marker === undefined) { do { getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesSegment(offset, count, marker, options)); marker = getPageRangeItemSegmentsResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); } while (marker); } }); } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to List Page Ranges operation. */ listPageRangeItems() { return tslib.__asyncGenerator(this, arguments, function* listPageRangeItems_1(offset = 0, count, options = {}) { var _a, e_1, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeItemSegments(offset, count, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const getPageRangesSegment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to list of page ranges for a page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * .byPage() returns an async iterable iterator to list of page ranges for a page blob. * * Example using `for await` syntax: * * ```js * // Get the pageBlobClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; * for await (const pageRange of pageBlobClient.listPageRanges()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = pageBlobClient.listPageRanges(); * let pageRangeItem = await iter.next(); * while (!pageRangeItem.done) { * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); * pageRangeItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of pageBlobClient.listPageRanges().byPage({ maxPageSize: 20 })) { * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = pageBlobClient.listPageRanges().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 page ranges * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = pageBlobClient.listPageRanges().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 page ranges * for (const blob of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param options - Options to the Page Blob Get Ranges operation. * @returns An asyncIterableIterator that supports paging. */ listPageRanges(offset = 0, count, options = {}) { options.conditions = options.conditions || {}; // AsyncIterableIterator to iterate over blobs const iter = this.listPageRangeItems(offset, count, options); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.listPageRangeItemSegments(offset, count, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); }, }; } /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page blob * @param count - Number of bytes to get ranges diff. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-getPageRangesDiff", options, async (updatedOptions) => { var _a; const result = assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshot, range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions, })); return rangeResponseFromModel(result); }); } /** * getPageRangesDiffSegment returns a single segment of page ranges starting from the * specified Marker for difference between previous snapshot and the target page blob. * Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call getPageRangesDiffSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of the get to be returned with the next get operation. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ async listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options = {}) { return tracingClient.withSpan("PageBlobClient-getPageRangesDiffSegment", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options === null || options === void 0 ? void 0 : options.abortSignal, leaseAccessConditions: options === null || options === void 0 ? void 0 : options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options === null || options === void 0 ? void 0 : options.conditions), { ifTags: (_a = options === null || options === void 0 ? void 0 : options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevsnapshot: prevSnapshotOrUrl, range: rangeToString({ offset: offset, count: count, }), marker: marker, maxPageSize: options === null || options === void 0 ? void 0 : options.maxPageSize, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Returns an AsyncIterableIterator for {@link PageBlobGetPageRangesDiffResponseModel} * * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param marker - A string value that identifies the portion of * the get of page ranges to be returned with the next getting operation. The * operation returns the ContinuationToken value within the response body if the * getting operation did not return all page ranges remaining within the current page. * The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of get * items. The marker value is opaque to the client. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItemSegments_1() { let getPageRangeItemSegmentsResponse; if (!!marker || marker === undefined) { do { getPageRangeItemSegmentsResponse = yield tslib.__await(this.listPageRangesDiffSegment(offset, count, prevSnapshotOrUrl, marker, options)); marker = getPageRangeItemSegmentsResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(getPageRangeItemSegmentsResponse)); } while (marker); } }); } /** * Returns an AsyncIterableIterator of {@link PageRangeInfo} objects * * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshotOrUrl - Timestamp of snapshot to retrieve the difference or URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. */ listPageRangeDiffItems(offset, count, prevSnapshotOrUrl, options) { return tslib.__asyncGenerator(this, arguments, function* listPageRangeDiffItems_1() { var _a, e_2, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.listPageRangeDiffItemSegments(offset, count, prevSnapshotOrUrl, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const getPageRangesSegment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(ExtractPageRangeInfoItems(getPageRangesSegment)))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * .byPage() returns an async iterable iterator to list of page ranges that differ between a specified snapshot and this page blob. * * Example using `for await` syntax: * * ```js * // Get the pageBlobClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("").getPageBlobClient("");` * let i = 1; * for await (const pageRange of pageBlobClient.listPageRangesDiff()) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = pageBlobClient.listPageRangesDiff(); * let pageRangeItem = await iter.next(); * while (!pageRangeItem.done) { * console.log(`Page range ${i++}: ${pageRangeItem.value.start} - ${pageRangeItem.value.end}, IsClear: ${pageRangeItem.value.isClear}`); * pageRangeItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 20 })) { * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = pageBlobClient.listPageRangesDiff().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 page ranges * for (const pageRange of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = pageBlobClient.listPageRangesDiff().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 page ranges * for (const blob of response) { * console.log(`Page range ${i++}: ${pageRange.start} - ${pageRange.end}`); * } * ``` * @param offset - Starting byte position of the page ranges. * @param count - Number of bytes to get. * @param prevSnapshot - Timestamp of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Ranges operation. * @returns An asyncIterableIterator that supports paging. */ listPageRangesDiff(offset, count, prevSnapshot, options = {}) { options.conditions = options.conditions || {}; // AsyncIterableIterator to iterate over blobs const iter = this.listPageRangeDiffItems(offset, count, prevSnapshot, Object.assign({}, options)); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.listPageRangeDiffItemSegments(offset, count, prevSnapshot, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, options)); }, }; } /** * Gets the collection of page ranges that differ between a specified snapshot and this page blob for managed disks. * @see https://docs.microsoft.com/rest/api/storageservices/get-page-ranges * * @param offset - Starting byte position of the page blob * @param count - Number of bytes to get ranges diff. * @param prevSnapshotUrl - URL of snapshot to retrieve the difference. * @param options - Options to the Page Blob Get Page Ranges Diff operation. * @returns Response data for the Page Blob Get Page Range Diff operation. */ async getPageRangesDiffForManagedDisks(offset, count, prevSnapshotUrl, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-GetPageRangesDiffForManagedDisks", options, async (updatedOptions) => { var _a; const response = assertResponse(await this.pageBlobContext.getPageRangesDiff({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), prevSnapshotUrl, range: rangeToString({ offset, count }), tracingOptions: updatedOptions.tracingOptions, })); return rangeResponseFromModel(response); }); } /** * Resizes the page blob to the specified size (which must be a multiple of 512). * @see https://docs.microsoft.com/rest/api/storageservices/set-blob-properties * * @param size - Target size * @param options - Options to the Page Blob Resize operation. * @returns Response data for the Page Blob Resize operation. */ async resize(size, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-resize", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.resize(size, { abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), encryptionScope: options.encryptionScope, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets a page blob's sequence number. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-properties * * @param sequenceNumberAction - Indicates how the service should modify the blob's sequence number. * @param sequenceNumber - Required if sequenceNumberAction is max or update * @param options - Options to the Page Blob Update Sequence Number operation. * @returns Response data for the Page Blob Update Sequence Number operation. */ async updateSequenceNumber(sequenceNumberAction, sequenceNumber, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("PageBlobClient-updateSequenceNumber", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.updateSequenceNumber(sequenceNumberAction, { abortSignal: options.abortSignal, blobSequenceNumber: sequenceNumber, leaseAccessConditions: options.conditions, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Begins an operation to start an incremental copy from one page blob's snapshot to this page blob. * The snapshot is copied such that only the differential changes between the previously * copied snapshot are transferred to the destination. * The copied snapshots are complete copies of the original snapshot and can be read or copied from as usual. * @see https://docs.microsoft.com/rest/api/storageservices/incremental-copy-blob * @see https://docs.microsoft.com/en-us/azure/virtual-machines/windows/incremental-snapshots * * @param copySource - Specifies the name of the source page blob snapshot. For example, * https://myaccount.blob.core.windows.net/mycontainer/myblob?snapshot= * @param options - Options to the Page Blob Copy Incremental operation. * @returns Response data for the Page Blob Copy Incremental operation. */ async startCopyIncremental(copySource, options = {}) { return tracingClient.withSpan("PageBlobClient-startCopyIncremental", options, async (updatedOptions) => { var _a; return assertResponse(await this.pageBlobContext.copyIncremental(copySource, { abortSignal: options.abortSignal, modifiedAccessConditions: Object.assign(Object.assign({}, options.conditions), { ifTags: (_a = options.conditions) === null || _a === void 0 ? void 0 : _a.tagConditions }), tracingOptions: updatedOptions.tracingOptions, })); }); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. async function getBodyAsText(batchResponse) { let buffer = Buffer.alloc(BATCH_MAX_PAYLOAD_IN_BYTES); const responseLength = await streamToBuffer2(batchResponse.readableStreamBody, buffer); // Slice the buffer to trim the empty ending. buffer = buffer.slice(0, responseLength); return buffer.toString(); } function utf8ByteLength(str) { return Buffer.byteLength(str); } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. const HTTP_HEADER_DELIMITER = ": "; const SPACE_DELIMITER = " "; const NOT_FOUND = -1; /** * Util class for parsing batch response. */ class BatchResponseParser { constructor(batchResponse, subRequests) { if (!batchResponse || !batchResponse.contentType) { // In special case(reported), server may return invalid content-type which could not be parsed. throw new RangeError("batchResponse is malformed or doesn't contain valid content-type."); } if (!subRequests || subRequests.size === 0) { // This should be prevent during coding. throw new RangeError("Invalid state: subRequests is not provided or size is 0."); } this.batchResponse = batchResponse; this.subRequests = subRequests; this.responseBatchBoundary = this.batchResponse.contentType.split("=")[1]; this.perResponsePrefix = `--${this.responseBatchBoundary}${HTTP_LINE_ENDING}`; this.batchResponseEnding = `--${this.responseBatchBoundary}--`; } // For example of response, please refer to https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#response async parseBatchResponse() { // When logic reach here, suppose batch request has already succeeded with 202, so we can further parse // sub request's response. if (this.batchResponse._response.status !== HTTPURLConnection.HTTP_ACCEPTED) { throw new Error(`Invalid state: batch request failed with status: '${this.batchResponse._response.status}'.`); } const responseBodyAsText = await getBodyAsText(this.batchResponse); const subResponses = responseBodyAsText .split(this.batchResponseEnding)[0] // string after ending is useless .split(this.perResponsePrefix) .slice(1); // string before first response boundary is useless const subResponseCount = subResponses.length; // Defensive coding in case of potential error parsing. // Note: subResponseCount == 1 is special case where sub request is invalid. // We try to prevent such cases through early validation, e.g. validate sub request count >= 1. // While in unexpected sub request invalid case, we allow sub response to be parsed and return to user. if (subResponseCount !== this.subRequests.size && subResponseCount !== 1) { throw new Error("Invalid state: sub responses' count is not equal to sub requests' count."); } const deserializedSubResponses = new Array(subResponseCount); let subResponsesSucceededCount = 0; let subResponsesFailedCount = 0; // Parse sub subResponses. for (let index = 0; index < subResponseCount; index++) { const subResponse = subResponses[index]; const deserializedSubResponse = {}; deserializedSubResponse.headers = coreHttpCompat.toHttpHeadersLike(coreRestPipeline.createHttpHeaders()); const responseLines = subResponse.split(`${HTTP_LINE_ENDING}`); let subRespHeaderStartFound = false; let subRespHeaderEndFound = false; let subRespFailed = false; let contentId = NOT_FOUND; for (const responseLine of responseLines) { if (!subRespHeaderStartFound) { // Convention line to indicate content ID if (responseLine.startsWith(HeaderConstants.CONTENT_ID)) { contentId = parseInt(responseLine.split(HTTP_HEADER_DELIMITER)[1]); } // Http version line with status code indicates the start of sub request's response. // Example: HTTP/1.1 202 Accepted if (responseLine.startsWith(HTTP_VERSION_1_1)) { subRespHeaderStartFound = true; const tokens = responseLine.split(SPACE_DELIMITER); deserializedSubResponse.status = parseInt(tokens[1]); deserializedSubResponse.statusMessage = tokens.slice(2).join(SPACE_DELIMITER); } continue; // Skip convention headers not specifically for sub request i.e. Content-Type: application/http and Content-ID: * } if (responseLine.trim() === "") { // Sub response's header start line already found, and the first empty line indicates header end line found. if (!subRespHeaderEndFound) { subRespHeaderEndFound = true; } continue; // Skip empty line } // Note: when code reach here, it indicates subRespHeaderStartFound == true if (!subRespHeaderEndFound) { if (responseLine.indexOf(HTTP_HEADER_DELIMITER) === -1) { // Defensive coding to prevent from missing valuable lines. throw new Error(`Invalid state: find non-empty line '${responseLine}' without HTTP header delimiter '${HTTP_HEADER_DELIMITER}'.`); } // Parse headers of sub response. const tokens = responseLine.split(HTTP_HEADER_DELIMITER); deserializedSubResponse.headers.set(tokens[0], tokens[1]); if (tokens[0] === HeaderConstants.X_MS_ERROR_CODE) { deserializedSubResponse.errorCode = tokens[1]; subRespFailed = true; } } else { // Assemble body of sub response. if (!deserializedSubResponse.bodyAsText) { deserializedSubResponse.bodyAsText = ""; } deserializedSubResponse.bodyAsText += responseLine; } } // Inner for end // The response will contain the Content-ID header for each corresponding subrequest response to use for tracking. // The Content-IDs are set to a valid index in the subrequests we sent. In the status code 202 path, we could expect it // to be 1-1 mapping from the [0, subRequests.size) to the Content-IDs returned. If not, we simply don't return that // unexpected subResponse in the parsed reponse and we can always look it up in the raw response for debugging purpose. if (contentId !== NOT_FOUND && Number.isInteger(contentId) && contentId >= 0 && contentId < this.subRequests.size && deserializedSubResponses[contentId] === undefined) { deserializedSubResponse._request = this.subRequests.get(contentId); deserializedSubResponses[contentId] = deserializedSubResponse; } else { logger.error(`subResponses[${index}] is dropped as the Content-ID is not found or invalid, Content-ID: ${contentId}`); } if (subRespFailed) { subResponsesFailedCount++; } else { subResponsesSucceededCount++; } } return { subResponses: deserializedSubResponses, subResponsesSucceededCount: subResponsesSucceededCount, subResponsesFailedCount: subResponsesFailedCount, }; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. var MutexLockStatus; (function (MutexLockStatus) { MutexLockStatus[MutexLockStatus["LOCKED"] = 0] = "LOCKED"; MutexLockStatus[MutexLockStatus["UNLOCKED"] = 1] = "UNLOCKED"; })(MutexLockStatus || (MutexLockStatus = {})); /** * An async mutex lock. */ class Mutex { /** * Lock for a specific key. If the lock has been acquired by another customer, then * will wait until getting the lock. * * @param key - lock key */ static async lock(key) { return new Promise((resolve) => { if (this.keys[key] === undefined || this.keys[key] === MutexLockStatus.UNLOCKED) { this.keys[key] = MutexLockStatus.LOCKED; resolve(); } else { this.onUnlockEvent(key, () => { this.keys[key] = MutexLockStatus.LOCKED; resolve(); }); } }); } /** * Unlock a key. * * @param key - */ static async unlock(key) { return new Promise((resolve) => { if (this.keys[key] === MutexLockStatus.LOCKED) { this.emitUnlockEvent(key); } delete this.keys[key]; resolve(); }); } static onUnlockEvent(key, handler) { if (this.listeners[key] === undefined) { this.listeners[key] = [handler]; } else { this.listeners[key].push(handler); } } static emitUnlockEvent(key) { if (this.listeners[key] !== undefined && this.listeners[key].length > 0) { const handler = this.listeners[key].shift(); setImmediate(() => { handler.call(this); }); } } } Mutex.keys = {}; Mutex.listeners = {}; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A BlobBatch represents an aggregated set of operations on blobs. * Currently, only `delete` and `setAccessTier` are supported. */ class BlobBatch { constructor() { this.batch = "batch"; this.batchRequest = new InnerBatchRequest(); } /** * Get the value of Content-Type for a batch request. * The value must be multipart/mixed with a batch boundary. * Example: multipart/mixed; boundary=batch_a81786c8-e301-4e42-a729-a32ca24ae252 */ getMultiPartContentType() { return this.batchRequest.getMultipartContentType(); } /** * Get assembled HTTP request body for sub requests. */ getHttpRequestBody() { return this.batchRequest.getHttpRequestBody(); } /** * Get sub requests that are added into the batch request. */ getSubRequests() { return this.batchRequest.getSubRequests(); } async addSubRequestInternal(subRequest, assembleSubRequestFunc) { await Mutex.lock(this.batch); try { this.batchRequest.preAddSubRequest(subRequest); await assembleSubRequestFunc(); this.batchRequest.postAddSubRequest(subRequest); } finally { await Mutex.unlock(this.batch); } } setBatchType(batchType) { if (!this.batchType) { this.batchType = batchType; } if (this.batchType !== batchType) { throw new RangeError(`BlobBatch only supports one operation type per batch and it already is being used for ${this.batchType} operations.`); } } async deleteBlob(urlOrBlobClient, credentialOrOptions, options) { let url; let credential; if (typeof urlOrBlobClient === "string" && ((coreUtil.isNode && credentialOrOptions instanceof StorageSharedKeyCredential) || credentialOrOptions instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrOptions))) { // First overload url = urlOrBlobClient; credential = credentialOrOptions; } else if (urlOrBlobClient instanceof BlobClient) { // Second overload url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; options = credentialOrOptions; } else { throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); } if (!options) { options = {}; } return tracingClient.withSpan("BatchDeleteRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("delete"); await this.addSubRequestInternal({ url: url, credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).delete(updatedOptions); }); }); } async setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options) { let url; let credential; let tier; if (typeof urlOrBlobClient === "string" && ((coreUtil.isNode && credentialOrTier instanceof StorageSharedKeyCredential) || credentialOrTier instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrTier))) { // First overload url = urlOrBlobClient; credential = credentialOrTier; tier = tierOrOptions; } else if (urlOrBlobClient instanceof BlobClient) { // Second overload url = urlOrBlobClient.url; credential = urlOrBlobClient.credential; tier = credentialOrTier; options = tierOrOptions; } else { throw new RangeError("Invalid arguments. Either url and credential, or BlobClient need be provided."); } if (!options) { options = {}; } return tracingClient.withSpan("BatchSetTierRequest-addSubRequest", options, async (updatedOptions) => { this.setBatchType("setAccessTier"); await this.addSubRequestInternal({ url: url, credential: credential, }, async () => { await new BlobClient(url, this.batchRequest.createPipeline(credential)).setAccessTier(tier, updatedOptions); }); }); } } /** * Inner batch request class which is responsible for assembling and serializing sub requests. * See https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch#request-body for how requests are assembled. */ class InnerBatchRequest { constructor() { this.operationCount = 0; this.body = ""; const tempGuid = coreUtil.randomUUID(); // batch_{batchid} this.boundary = `batch_${tempGuid}`; // --batch_{batchid} // Content-Type: application/http // Content-Transfer-Encoding: binary this.subRequestPrefix = `--${this.boundary}${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TYPE}: application/http${HTTP_LINE_ENDING}${HeaderConstants.CONTENT_TRANSFER_ENCODING}: binary`; // multipart/mixed; boundary=batch_{batchid} this.multipartContentType = `multipart/mixed; boundary=${this.boundary}`; // --batch_{batchid}-- this.batchRequestEnding = `--${this.boundary}--`; this.subRequests = new Map(); } /** * Create pipeline to assemble sub requests. The idea here is to use existing * credential and serialization/deserialization components, with additional policies to * filter unnecessary headers, assemble sub requests into request's body * and intercept request from going to wire. * @param credential - Such as AnonymousCredential, StorageSharedKeyCredential or any credential from the `@azure/identity` package to authenticate requests to the service. You can also provide an object that implements the TokenCredential interface. If not specified, AnonymousCredential is used. */ createPipeline(credential) { const corePipeline = coreRestPipeline.createEmptyPipeline(); corePipeline.addPolicy(coreClient.serializationPolicy({ stringifyXML: coreXml.stringifyXML, serializerOptions: { xml: { xmlCharKey: "#", }, }, }), { phase: "Serialize" }); // Use batch header filter policy to exclude unnecessary headers corePipeline.addPolicy(batchHeaderFilterPolicy()); // Use batch assemble policy to assemble request and intercept request from going to wire corePipeline.addPolicy(batchRequestAssemblePolicy(this), { afterPhase: "Sign" }); if (coreAuth.isTokenCredential(credential)) { corePipeline.addPolicy(coreRestPipeline.bearerTokenAuthenticationPolicy({ credential, scopes: StorageOAuthScopes, challengeCallbacks: { authorizeRequestOnChallenge: coreClient.authorizeRequestOnTenantChallenge }, }), { phase: "Sign" }); } else if (credential instanceof StorageSharedKeyCredential) { corePipeline.addPolicy(storageSharedKeyCredentialPolicy({ accountName: credential.accountName, accountKey: credential.accountKey, }), { phase: "Sign" }); } const pipeline = new Pipeline([]); // attach the v2 pipeline to this one pipeline._credential = credential; pipeline._corePipeline = corePipeline; return pipeline; } appendSubRequestToBody(request) { // Start to assemble sub request this.body += [ this.subRequestPrefix, // sub request constant prefix `${HeaderConstants.CONTENT_ID}: ${this.operationCount}`, // sub request's content ID "", // empty line after sub request's content ID `${request.method.toString()} ${getURLPathAndQuery(request.url)} ${HTTP_VERSION_1_1}${HTTP_LINE_ENDING}`, // sub request start line with method ].join(HTTP_LINE_ENDING); for (const [name, value] of request.headers) { this.body += `${name}: ${value}${HTTP_LINE_ENDING}`; } this.body += HTTP_LINE_ENDING; // sub request's headers need be ending with an empty line // No body to assemble for current batch request support // End to assemble sub request } preAddSubRequest(subRequest) { if (this.operationCount >= BATCH_MAX_REQUEST) { throw new RangeError(`Cannot exceed ${BATCH_MAX_REQUEST} sub requests in a single batch`); } // Fast fail if url for sub request is invalid const path = getURLPath(subRequest.url); if (!path || path === "") { throw new RangeError(`Invalid url for sub request: '${subRequest.url}'`); } } postAddSubRequest(subRequest) { this.subRequests.set(this.operationCount, subRequest); this.operationCount++; } // Return the http request body with assembling the ending line to the sub request body. getHttpRequestBody() { return `${this.body}${this.batchRequestEnding}${HTTP_LINE_ENDING}`; } getMultipartContentType() { return this.multipartContentType; } getSubRequests() { return this.subRequests; } } function batchRequestAssemblePolicy(batchRequest) { return { name: "batchRequestAssemblePolicy", async sendRequest(request) { batchRequest.appendSubRequestToBody(request); return { request, status: 200, headers: coreRestPipeline.createHttpHeaders(), }; }, }; } function batchHeaderFilterPolicy() { return { name: "batchHeaderFilterPolicy", async sendRequest(request, next) { let xMsHeaderName = ""; for (const [name] of request.headers) { if (iEqual(name, HeaderConstants.X_MS_VERSION)) { xMsHeaderName = name; } } if (xMsHeaderName !== "") { request.headers.delete(xMsHeaderName); // The subrequests should not have the x-ms-version header. } return next(request); }, }; } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * A BlobBatchClient allows you to make batched requests to the Azure Storage Blob service. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch */ class BlobBatchClient { constructor(url, credentialOrPipeline, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { let pipeline; if (isPipelineLike(credentialOrPipeline)) { pipeline = credentialOrPipeline; } else if (!credentialOrPipeline) { // no credential provided pipeline = newPipeline(new AnonymousCredential(), options); } else { pipeline = newPipeline(credentialOrPipeline, options); } const storageClientContext = new StorageContextClient(url, getCoreClientOptions(pipeline)); const path = getURLPath(url); if (path && path !== "/") { // Container scoped. this.serviceOrContainerContext = storageClientContext.container; } else { this.serviceOrContainerContext = storageClientContext.service; } } /** * Creates a {@link BlobBatch}. * A BlobBatch represents an aggregated set of operations on blobs. */ createBatch() { return new BlobBatch(); } async deleteBlobs(urlsOrBlobClients, credentialOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { const batch = new BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.deleteBlob(urlOrBlobClient, credentialOrOptions, options); } else { await batch.deleteBlob(urlOrBlobClient, credentialOrOptions); } } return this.submitBatch(batch); } async setBlobsAccessTier(urlsOrBlobClients, credentialOrTier, tierOrOptions, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { const batch = new BlobBatch(); for (const urlOrBlobClient of urlsOrBlobClients) { if (typeof urlOrBlobClient === "string") { await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions, options); } else { await batch.setBlobAccessTier(urlOrBlobClient, credentialOrTier, tierOrOptions); } } return this.submitBatch(batch); } /** * Submit batch request which consists of multiple subrequests. * * Get `blobBatchClient` and other details before running the snippets. * `blobServiceClient.getBlobBatchClient()` gives the `blobBatchClient` * * Example usage: * * ```js * let batchRequest = new BlobBatch(); * await batchRequest.deleteBlob(urlInString0, credential0); * await batchRequest.deleteBlob(urlInString1, credential1, { * deleteSnapshots: "include" * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * * Example using a lease: * * ```js * let batchRequest = new BlobBatch(); * await batchRequest.setBlobAccessTier(blockBlobClient0, "Cool"); * await batchRequest.setBlobAccessTier(blockBlobClient1, "Cool", { * conditions: { leaseId: leaseId } * }); * const batchResp = await blobBatchClient.submitBatch(batchRequest); * console.log(batchResp.subResponsesSucceededCount); * ``` * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @param batchRequest - A set of Delete or SetTier operations. * @param options - */ async submitBatch(batchRequest, options = {}) { if (!batchRequest || batchRequest.getSubRequests().size === 0) { throw new RangeError("Batch request should contain one or more sub requests."); } return tracingClient.withSpan("BlobBatchClient-submitBatch", options, async (updatedOptions) => { const batchRequestBody = batchRequest.getHttpRequestBody(); // ServiceSubmitBatchResponseModel and ContainerSubmitBatchResponse are compatible for now. const rawBatchResponse = assertResponse(await this.serviceOrContainerContext.submitBatch(utf8ByteLength(batchRequestBody), batchRequest.getMultiPartContentType(), batchRequestBody, Object.assign({}, updatedOptions))); // Parse the sub responses result, if logic reaches here(i.e. the batch request succeeded with status code 202). const batchResponseParser = new BatchResponseParser(rawBatchResponse, batchRequest.getSubRequests()); const responseSummary = await batchResponseParser.parseBatchResponse(); const res = { _response: rawBatchResponse._response, contentType: rawBatchResponse.contentType, errorCode: rawBatchResponse.errorCode, requestId: rawBatchResponse.requestId, clientRequestId: rawBatchResponse.clientRequestId, version: rawBatchResponse.version, subResponses: responseSummary.subResponses, subResponsesSucceededCount: responseSummary.subResponsesSucceededCount, subResponsesFailedCount: responseSummary.subResponsesFailedCount, }; return res; }); } } /** * A ContainerClient represents a URL to the Azure Storage container allowing you to manipulate its blobs. */ class ContainerClient extends StorageClient { /** * The name of the container. */ get containerName() { return this._containerName; } constructor(urlOrConnectionString, credentialOrPipelineOrContainerName, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { let pipeline; let url; options = options || {}; if (isPipelineLike(credentialOrPipelineOrContainerName)) { // (url: string, pipeline: Pipeline) url = urlOrConnectionString; pipeline = credentialOrPipelineOrContainerName; } else if ((coreUtil.isNode && credentialOrPipelineOrContainerName instanceof StorageSharedKeyCredential) || credentialOrPipelineOrContainerName instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipelineOrContainerName)) { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) url = urlOrConnectionString; pipeline = newPipeline(credentialOrPipelineOrContainerName, options); } else if (!credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName !== "string") { // (url: string, credential?: StorageSharedKeyCredential | AnonymousCredential | TokenCredential, options?: StoragePipelineOptions) // The second parameter is undefined. Use anonymous credential. url = urlOrConnectionString; pipeline = newPipeline(new AnonymousCredential(), options); } else if (credentialOrPipelineOrContainerName && typeof credentialOrPipelineOrContainerName === "string") { // (connectionString: string, containerName: string, blobName: string, options?: StoragePipelineOptions) const containerName = credentialOrPipelineOrContainerName; const extractedCreds = extractConnectionStringParts(urlOrConnectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } pipeline = newPipeline(sharedKeyCredential, options); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { url = appendToURLPath(extractedCreds.url, encodeURIComponent(containerName)) + "?" + extractedCreds.accountSas; pipeline = newPipeline(new AnonymousCredential(), options); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } else { throw new Error("Expecting non-empty strings for containerName parameter"); } super(url, pipeline); this._containerName = this.getContainerNameFromUrl(); this.containerContext = this.storageClientContext.container; } /** * Creates a new container under the specified account. If the container with * the same name already exists, the operation fails. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - Options to Container Create operation. * * * Example usage: * * ```js * const containerClient = blobServiceClient.getContainerClient(""); * const createContainerResponse = await containerClient.create(); * console.log("Container was created successfully", createContainerResponse.requestId); * ``` */ async create(options = {}) { return tracingClient.withSpan("ContainerClient-create", options, async (updatedOptions) => { return assertResponse(await this.containerContext.create(updatedOptions)); }); } /** * Creates a new container under the specified account. If the container with * the same name already exists, it is not changed. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * Naming rules: @see https://learn.microsoft.com/rest/api/storageservices/naming-and-referencing-containers--blobs--and-metadata * * @param options - */ async createIfNotExists(options = {}) { return tracingClient.withSpan("ContainerClient-createIfNotExists", options, async (updatedOptions) => { var _a, _b; try { const res = await this.create(updatedOptions); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerAlreadyExists") { return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } else { throw e; } } }); } /** * Returns true if the Azure container resource represented by this client exists; false otherwise. * * NOTE: use this function with care since an existing container might be deleted by other clients or * applications. Vice versa new containers with the same name might be added by other clients or * applications after this function completes. * * @param options - */ async exists(options = {}) { return tracingClient.withSpan("ContainerClient-exists", options, async (updatedOptions) => { try { await this.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, }); return true; } catch (e) { if (e.statusCode === 404) { return false; } throw e; } }); } /** * Creates a {@link BlobClient} * * @param blobName - A blob name * @returns A new BlobClient object for the given blob name. */ getBlobClient(blobName) { return new BlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates an {@link AppendBlobClient} * * @param blobName - An append blob name */ getAppendBlobClient(blobName) { return new AppendBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates a {@link BlockBlobClient} * * @param blobName - A block blob name * * * Example usage: * * ```js * const content = "Hello world!"; * * const blockBlobClient = containerClient.getBlockBlobClient(""); * const uploadBlobResponse = await blockBlobClient.upload(content, content.length); * ``` */ getBlockBlobClient(blobName) { return new BlockBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Creates a {@link PageBlobClient} * * @param blobName - A page blob name */ getPageBlobClient(blobName) { return new PageBlobClient(appendToURLPath(this.url, EscapePath(blobName)), this.pipeline); } /** * Returns all user-defined metadata and system properties for the specified * container. The data returned does not include the container's list of blobs. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-properties * * WARNING: The `metadata` object returned in the response will have its keys in lowercase, even if * they originally contained uppercase characters. This differs from the metadata keys returned by * the `listContainers` method of {@link BlobServiceClient} using the `includeMetadata` option, which * will retain their original casing. * * @param options - Options to Container Get Properties operation. */ async getProperties(options = {}) { if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("ContainerClient-getProperties", options, async (updatedOptions) => { return assertResponse(await this.containerContext.getProperties(Object.assign(Object.assign({ abortSignal: options.abortSignal }, options.conditions), { tracingOptions: updatedOptions.tracingOptions }))); }); } /** * Marks the specified container for deletion. The container and any blobs * contained within it are later deleted during garbage collection. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async delete(options = {}) { if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("ContainerClient-delete", options, async (updatedOptions) => { return assertResponse(await this.containerContext.delete({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Marks the specified container for deletion if it exists. The container and any blobs * contained within it are later deleted during garbage collection. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-container * * @param options - Options to Container Delete operation. */ async deleteIfExists(options = {}) { return tracingClient.withSpan("ContainerClient-deleteIfExists", options, async (updatedOptions) => { var _a, _b; try { const res = await this.delete(updatedOptions); return Object.assign(Object.assign({ succeeded: true }, res), { _response: res._response }); } catch (e) { if (((_a = e.details) === null || _a === void 0 ? void 0 : _a.errorCode) === "ContainerNotFound") { return Object.assign(Object.assign({ succeeded: false }, (_b = e.response) === null || _b === void 0 ? void 0 : _b.parsedHeaders), { _response: e.response }); } throw e; } }); } /** * Sets one or more user-defined name-value pairs for the specified container. * * If no option provided, or no metadata defined in the parameter, the container * metadata will be removed. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-metadata * * @param metadata - Replace existing metadata with this value. * If no value provided the existing metadata will be removed. * @param options - Options to Container Set Metadata operation. */ async setMetadata(metadata, options = {}) { if (!options.conditions) { options.conditions = {}; } if (options.conditions.ifUnmodifiedSince) { throw new RangeError("the IfUnmodifiedSince must have their default values because they are ignored by the blob service"); } return tracingClient.withSpan("ContainerClient-setMetadata", options, async (updatedOptions) => { return assertResponse(await this.containerContext.setMetadata({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, metadata, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Gets the permissions for the specified container. The permissions indicate * whether container data may be accessed publicly. * * WARNING: JavaScript Date will potentially lose precision when parsing startsOn and expiresOn strings. * For example, new Date("2018-12-31T03:44:23.8827891Z").toISOString() will get "2018-12-31T03:44:23.882Z". * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-container-acl * * @param options - Options to Container Get Access Policy operation. */ async getAccessPolicy(options = {}) { if (!options.conditions) { options.conditions = {}; } return tracingClient.withSpan("ContainerClient-getAccessPolicy", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.getAccessPolicy({ abortSignal: options.abortSignal, leaseAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, })); const res = { _response: response._response, blobPublicAccess: response.blobPublicAccess, date: response.date, etag: response.etag, errorCode: response.errorCode, lastModified: response.lastModified, requestId: response.requestId, clientRequestId: response.clientRequestId, signedIdentifiers: [], version: response.version, }; for (const identifier of response) { let accessPolicy = undefined; if (identifier.accessPolicy) { accessPolicy = { permissions: identifier.accessPolicy.permissions, }; if (identifier.accessPolicy.expiresOn) { accessPolicy.expiresOn = new Date(identifier.accessPolicy.expiresOn); } if (identifier.accessPolicy.startsOn) { accessPolicy.startsOn = new Date(identifier.accessPolicy.startsOn); } } res.signedIdentifiers.push({ accessPolicy, id: identifier.id, }); } return res; }); } /** * Sets the permissions for the specified container. The permissions indicate * whether blobs in a container may be accessed publicly. * * When you set permissions for a container, the existing permissions are replaced. * If no access or containerAcl provided, the existing container ACL will be * removed. * * When you establish a stored access policy on a container, it may take up to 30 seconds to take effect. * During this interval, a shared access signature that is associated with the stored access policy will * fail with status code 403 (Forbidden), until the access policy becomes active. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-container-acl * * @param access - The level of public access to data in the container. * @param containerAcl - Array of elements each having a unique Id and details of the access policy. * @param options - Options to Container Set Access Policy operation. */ async setAccessPolicy(access, containerAcl, options = {}) { options.conditions = options.conditions || {}; return tracingClient.withSpan("ContainerClient-setAccessPolicy", options, async (updatedOptions) => { const acl = []; for (const identifier of containerAcl || []) { acl.push({ accessPolicy: { expiresOn: identifier.accessPolicy.expiresOn ? truncatedISO8061Date(identifier.accessPolicy.expiresOn) : "", permissions: identifier.accessPolicy.permissions, startsOn: identifier.accessPolicy.startsOn ? truncatedISO8061Date(identifier.accessPolicy.startsOn) : "", }, id: identifier.id, }); } return assertResponse(await this.containerContext.setAccessPolicy({ abortSignal: options.abortSignal, access, containerAcl: acl, leaseAccessConditions: options.conditions, modifiedAccessConditions: options.conditions, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Get a {@link BlobLeaseClient} that manages leases on the container. * * @param proposeLeaseId - Initial proposed lease Id. * @returns A new BlobLeaseClient object for managing leases on the container. */ getBlobLeaseClient(proposeLeaseId) { return new BlobLeaseClient(this, proposeLeaseId); } /** * Creates a new block blob, or updates the content of an existing block blob. * * Updating an existing block blob overwrites any existing metadata on the blob. * Partial updates are not supported; the content of the existing blob is * overwritten with the new content. To perform a partial update of a block blob's, * use {@link BlockBlobClient.stageBlock} and {@link BlockBlobClient.commitBlockList}. * * This is a non-parallel uploading method, please use {@link BlockBlobClient.uploadFile}, * {@link BlockBlobClient.uploadStream} or {@link BlockBlobClient.uploadBrowserData} for better * performance with concurrency uploading. * * @see https://docs.microsoft.com/rest/api/storageservices/put-blob * * @param blobName - Name of the block blob to create or update. * @param body - Blob, string, ArrayBuffer, ArrayBufferView or a function * which returns a new Readable stream whose offset is from data source beginning. * @param contentLength - Length of body in bytes. Use Buffer.byteLength() to calculate body length for a * string including non non-Base64/Hex-encoded characters. * @param options - Options to configure the Block Blob Upload operation. * @returns Block Blob upload response data and the corresponding BlockBlobClient instance. */ async uploadBlockBlob(blobName, body, contentLength, options = {}) { return tracingClient.withSpan("ContainerClient-uploadBlockBlob", options, async (updatedOptions) => { const blockBlobClient = this.getBlockBlobClient(blobName); const response = await blockBlobClient.upload(body, contentLength, updatedOptions); return { blockBlobClient, response, }; }); } /** * Marks the specified blob or snapshot for deletion. The blob is later deleted * during garbage collection. Note that in order to delete a blob, you must delete * all of its snapshots. You can delete both at the same time with the Delete * Blob operation. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/delete-blob * * @param blobName - * @param options - Options to Blob Delete operation. * @returns Block blob deletion response data. */ async deleteBlob(blobName, options = {}) { return tracingClient.withSpan("ContainerClient-deleteBlob", options, async (updatedOptions) => { let blobClient = this.getBlobClient(blobName); if (options.versionId) { blobClient = blobClient.withVersion(options.versionId); } return blobClient.delete(updatedOptions); }); } /** * listBlobFlatSegment returns a single segment of blobs starting from the * specified Marker. Use an empty Marker to start enumeration from the beginning. * After getting a segment, process it, and then call listBlobsFlatSegment again * (passing the the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Flat Segment operation. */ async listBlobFlatSegment(marker, options = {}) { return tracingClient.withSpan("ContainerClient-listBlobFlatSegment", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.listBlobFlatSegment(Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobFlat(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }) }) }); return wrappedResponse; }); } /** * listBlobHierarchySegment returns a single segment of blobs starting from * the specified Marker. Use an empty Marker to start enumeration from the * beginning. After getting a segment, process it, and then call listBlobsHierarchicalSegment * again (passing the the previously-returned Marker) to get the next segment. * @see https://docs.microsoft.com/rest/api/storageservices/list-blobs * * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of the list to be returned with the next list operation. * @param options - Options to Container List Blob Hierarchy Segment operation. */ async listBlobHierarchySegment(delimiter, marker, options = {}) { return tracingClient.withSpan("ContainerClient-listBlobHierarchySegment", options, async (updatedOptions) => { var _a; const response = assertResponse(await this.containerContext.listBlobHierarchySegment(delimiter, Object.assign(Object.assign({ marker }, options), { tracingOptions: updatedOptions.tracingOptions }))); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: Object.assign(Object.assign({}, response._response), { parsedBody: ConvertInternalResponseOfListBlobHierarchy(response._response.parsedBody) }), segment: Object.assign(Object.assign({}, response.segment), { blobItems: response.segment.blobItems.map((blobItemInternal) => { const blobItem = Object.assign(Object.assign({}, blobItemInternal), { name: BlobNameToString(blobItemInternal.name), tags: toTags(blobItemInternal.blobTags), objectReplicationSourceProperties: parseObjectReplicationRecord(blobItemInternal.objectReplicationMetadata) }); return blobItem; }), blobPrefixes: (_a = response.segment.blobPrefixes) === null || _a === void 0 ? void 0 : _a.map((blobPrefixInternal) => { const blobPrefix = Object.assign(Object.assign({}, blobPrefixInternal), { name: BlobNameToString(blobPrefixInternal.name) }); return blobPrefix; }) }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ContainerListBlobFlatSegmentResponse * * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ listSegments(marker_1) { return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listBlobsFlatSegmentResponse; if (!!marker || marker === undefined) { do { listBlobsFlatSegmentResponse = yield tslib.__await(this.listBlobFlatSegment(marker, options)); marker = listBlobsFlatSegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listBlobsFlatSegmentResponse)); } while (marker); } }); } /** * Returns an AsyncIterableIterator of {@link BlobItem} objects * * @param options - Options to list blobs operation. */ listItems() { return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { var _a, e_1, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const listBlobsFlatSegmentResponse = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(listBlobsFlatSegmentResponse.segment.blobItems))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to list all the blobs * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js * // Get the containerClient before you run these snippets, * // Can be obtained from `blobServiceClient.getContainerClient("");` * let i = 1; * for await (const blob of containerClient.listBlobsFlat()) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * let iter = containerClient.listBlobsFlat(); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of containerClient.listBlobsFlat().byPage({ maxPageSize: 20 })) { * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = containerClient.listBlobsFlat().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * * // Gets next marker * let marker = response.continuationToken; * * // Passing next marker as continuationToken * * iterator = containerClient.listBlobsFlat().byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 blob names * for (const blob of response.segment.blobItems) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * @param options - Options to list blobs. * @returns An asyncIterableIterator that supports paging. */ listBlobsFlat(options = {}) { const include = []; if (options.includeCopy) { include.push("copy"); } if (options.includeDeleted) { include.push("deleted"); } if (options.includeMetadata) { include.push("metadata"); } if (options.includeSnapshots) { include.push("snapshots"); } if (options.includeVersions) { include.push("versions"); } if (options.includeUncommitedBlobs) { include.push("uncommittedblobs"); } if (options.includeTags) { include.push("tags"); } if (options.includeDeletedWithVersions) { include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { include.push("immutabilitypolicy"); } if (options.includeLegalHold) { include.push("legalhold"); } if (options.prefix === "") { options.prefix = undefined; } const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blobs const iter = this.listItems(updatedOptions); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); }, }; } /** * Returns an AsyncIterableIterator for ContainerListBlobHierarchySegmentResponse * * @param delimiter - The character or string used to define the virtual hierarchy * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the ContinuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The ContinuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list blobs operation. */ listHierarchySegments(delimiter_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* listHierarchySegments_1(delimiter, marker, options = {}) { let listBlobsHierarchySegmentResponse; if (!!marker || marker === undefined) { do { listBlobsHierarchySegmentResponse = yield tslib.__await(this.listBlobHierarchySegment(delimiter, marker, options)); marker = listBlobsHierarchySegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listBlobsHierarchySegmentResponse)); } while (marker); } }); } /** * Returns an AsyncIterableIterator for {@link BlobPrefix} and {@link BlobItem} objects. * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ listItemsByHierarchy(delimiter_1) { return tslib.__asyncGenerator(this, arguments, function* listItemsByHierarchy_1(delimiter, options = {}) { var _a, e_2, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.listHierarchySegments(delimiter, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const listBlobsHierarchySegmentResponse = _c; const segment = listBlobsHierarchySegmentResponse.segment; if (segment.blobPrefixes) { for (const prefix of segment.blobPrefixes) { yield yield tslib.__await(Object.assign({ kind: "prefix" }, prefix)); } } for (const blob of segment.blobItems) { yield yield tslib.__await(Object.assign({ kind: "blob" }, blob)); } } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list all the blobs by hierarchy. * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs by hierarchy in pages. * * Example using `for await` syntax: * * ```js * for await (const item of containerClient.listBlobsByHierarchy("/")) { * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { * console.log(`\tBlobItem: name - ${item.name}`); * } * } * ``` * * Example using `iter.next()`: * * ```js * let iter = containerClient.listBlobsByHierarchy("/", { prefix: "prefix1/" }); * let entity = await iter.next(); * while (!entity.done) { * let item = entity.value; * if (item.kind === "prefix") { * console.log(`\tBlobPrefix: ${item.name}`); * } else { * console.log(`\tBlobItem: name - ${item.name}`); * } * entity = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * console.log("Listing blobs by hierarchy by page"); * for await (const response of containerClient.listBlobsByHierarchy("/").byPage()) { * const segment = response.segment; * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * * Example using paging with a max page size: * * ```js * console.log("Listing blobs by hierarchy by page, specifying a prefix and a max page size"); * * let i = 1; * for await (const response of containerClient * .listBlobsByHierarchy("/", { prefix: "prefix2/sub1/" }) * .byPage({ maxPageSize: 2 })) { * console.log(`Page ${i++}`); * const segment = response.segment; * * if (segment.blobPrefixes) { * for (const prefix of segment.blobPrefixes) { * console.log(`\tBlobPrefix: ${prefix.name}`); * } * } * * for (const blob of response.segment.blobItems) { * console.log(`\tBlobItem: name - ${blob.name}`); * } * } * ``` * * @param delimiter - The character or string used to define the virtual hierarchy * @param options - Options to list blobs operation. */ listBlobsByHierarchy(delimiter, options = {}) { if (delimiter === "") { throw new RangeError("delimiter should contain one or more characters"); } const include = []; if (options.includeCopy) { include.push("copy"); } if (options.includeDeleted) { include.push("deleted"); } if (options.includeMetadata) { include.push("metadata"); } if (options.includeSnapshots) { include.push("snapshots"); } if (options.includeVersions) { include.push("versions"); } if (options.includeUncommitedBlobs) { include.push("uncommittedblobs"); } if (options.includeTags) { include.push("tags"); } if (options.includeDeletedWithVersions) { include.push("deletedwithversions"); } if (options.includeImmutabilityPolicy) { include.push("immutabilitypolicy"); } if (options.includeLegalHold) { include.push("legalhold"); } if (options.prefix === "") { options.prefix = undefined; } const updatedOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include: include } : {})); // AsyncIterableIterator to iterate over blob prefixes and blobs const iter = this.listItemsByHierarchy(delimiter, updatedOptions); return { /** * The next method, part of the iteration protocol */ async next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.listHierarchySegments(delimiter, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, updatedOptions)); }, }; } /** * The Filter Blobs operation enables callers to list blobs in the container whose tags * match a given search expression. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { return tracingClient.withSpan("ContainerClient-findBlobsByTagsSegment", options, async (updatedOptions) => { const response = assertResponse(await this.containerContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions, })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { tagValue = blob.tags.blobTagSet[0].value; } return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ContainerFindBlobsByTagsSegmentResponse. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); response.blobs = response.blobs || []; marker = response.continuationToken; yield yield tslib.__await(response); } while (marker); } }); } /** * Returns an AsyncIterableIterator for blobs. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ findBlobsByTagsItems(tagFilterSqlExpression_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { var _a, e_3, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_3_1) { e_3 = { error: e_3_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_3) throw e_3.error; } } }); } /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified container. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const blob of containerClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${blob.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = containerClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = containerClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = containerClient * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { // AsyncIterableIterator to iterate over blobs const listSegmentOptions = Object.assign({}, options); const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("ContainerClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.containerContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } getContainerNameFromUrl() { let containerName; try { // URL may look like the following // "https://myaccount.blob.core.windows.net/mycontainer?sasString"; // "https://myaccount.blob.core.windows.net/mycontainer"; // IPv4/IPv6 address hosts, Endpoints - `http://127.0.0.1:10000/devstoreaccount1/containername` // http://localhost:10001/devstoreaccount1/containername const parsedUrl = new URL(this.url); if (parsedUrl.hostname.split(".")[1] === "blob") { // "https://myaccount.blob.core.windows.net/containername". // "https://customdomain.com/containername". // .getPath() -> /containername containerName = parsedUrl.pathname.split("/")[1]; } else if (isIpEndpointStyle(parsedUrl)) { // IPv4/IPv6 address hosts... Example - http://192.0.0.10:10001/devstoreaccount1/containername // Single word domain without a [dot] in the endpoint... Example - http://localhost:10001/devstoreaccount1/containername // .getPath() -> /devstoreaccount1/containername containerName = parsedUrl.pathname.split("/")[2]; } else { // "https://customdomain.com/containername". // .getPath() -> /containername containerName = parsedUrl.pathname.split("/")[1]; } // decode the encoded containerName - to get all the special characters that might be present in it containerName = decodeURIComponent(containerName); if (!containerName) { throw new Error("Provided containerName is invalid."); } return containerName; } catch (error) { throw new Error("Unable to extract containerName with provided information."); } } /** * Only available for ContainerClient constructed with a shared key credential. * * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasUrl(options) { return new Promise((resolve) => { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), this.credential).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Only available for ContainerClient constructed with a shared key credential. * * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ generateSasStringToSign(options) { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw new RangeError("Can only generate the SAS when the client is initialized with a shared key credential"); } return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), this.credential).stringToSign; } /** * Generates a Blob Container Service Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasUrl(options, userDelegationKey) { return new Promise((resolve) => { const sas = generateBlobSASQueryParameters(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).toString(); resolve(appendToURLQuery(this.url, sas)); }); } /** * Generates string to sign for a Blob Container Service Shared Access Signature (SAS) URI * based on the client properties and parameters passed in. The SAS is signed by the input user delegation key. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-a-service-sas * * @param options - Optional parameters. * @param userDelegationKey - Return value of `blobServiceClient.getUserDelegationKey()` * @returns The SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateUserDelegationSasStringToSign(options, userDelegationKey) { return generateBlobSASQueryParametersInternal(Object.assign({ containerName: this._containerName }, options), userDelegationKey, this.accountName).stringToSign; } /** * Creates a BlobBatchClient object to conduct batch operations. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this container. */ getBlobBatchClient() { return new BlobBatchClient(this.url, this.pipeline); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the permissions granted by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant permissions for that operation. Once all the * values are set, this should be serialized with toString and set as the permissions field on an * {@link AccountSASSignatureValues} object. It is possible to construct the permissions string without this class, but * the order of the permissions is particular and this class guarantees correctness. */ class AccountSASPermissions { constructor() { /** * Permission to read resources and list queues and tables granted. */ this.read = false; /** * Permission to write resources granted. */ this.write = false; /** * Permission to delete blobs and files granted. */ this.delete = false; /** * Permission to delete versions granted. */ this.deleteVersion = false; /** * Permission to list blob containers, blobs, shares, directories, and files granted. */ this.list = false; /** * Permission to add messages, table entities, and append to blobs granted. */ this.add = false; /** * Permission to create blobs and files granted. */ this.create = false; /** * Permissions to update messages and table entities granted. */ this.update = false; /** * Permission to get and delete messages granted. */ this.process = false; /** * Specfies Tag access granted. */ this.tag = false; /** * Permission to filter blobs. */ this.filter = false; /** * Permission to set immutability policy. */ this.setImmutabilityPolicy = false; /** * Specifies that Permanent Delete is permitted. */ this.permanentDelete = false; } /** * Parse initializes the AccountSASPermissions fields from a string. * * @param permissions - */ static parse(permissions) { const accountSASPermissions = new AccountSASPermissions(); for (const c of permissions) { switch (c) { case "r": accountSASPermissions.read = true; break; case "w": accountSASPermissions.write = true; break; case "d": accountSASPermissions.delete = true; break; case "x": accountSASPermissions.deleteVersion = true; break; case "l": accountSASPermissions.list = true; break; case "a": accountSASPermissions.add = true; break; case "c": accountSASPermissions.create = true; break; case "u": accountSASPermissions.update = true; break; case "p": accountSASPermissions.process = true; break; case "t": accountSASPermissions.tag = true; break; case "f": accountSASPermissions.filter = true; break; case "i": accountSASPermissions.setImmutabilityPolicy = true; break; case "y": accountSASPermissions.permanentDelete = true; break; default: throw new RangeError(`Invalid permission character: ${c}`); } } return accountSASPermissions; } /** * Creates a {@link AccountSASPermissions} from a raw object which contains same keys as it * and boolean values for them. * * @param permissionLike - */ static from(permissionLike) { const accountSASPermissions = new AccountSASPermissions(); if (permissionLike.read) { accountSASPermissions.read = true; } if (permissionLike.write) { accountSASPermissions.write = true; } if (permissionLike.delete) { accountSASPermissions.delete = true; } if (permissionLike.deleteVersion) { accountSASPermissions.deleteVersion = true; } if (permissionLike.filter) { accountSASPermissions.filter = true; } if (permissionLike.tag) { accountSASPermissions.tag = true; } if (permissionLike.list) { accountSASPermissions.list = true; } if (permissionLike.add) { accountSASPermissions.add = true; } if (permissionLike.create) { accountSASPermissions.create = true; } if (permissionLike.update) { accountSASPermissions.update = true; } if (permissionLike.process) { accountSASPermissions.process = true; } if (permissionLike.setImmutabilityPolicy) { accountSASPermissions.setImmutabilityPolicy = true; } if (permissionLike.permanentDelete) { accountSASPermissions.permanentDelete = true; } return accountSASPermissions; } /** * Produces the SAS permissions string for an Azure Storage account. * Call this method to set AccountSASSignatureValues Permissions field. * * Using this method will guarantee the resource types are in * an order accepted by the service. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ toString() { // The order of the characters should be as specified here to ensure correctness: // https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas // Use a string array instead of string concatenating += operator for performance const permissions = []; if (this.read) { permissions.push("r"); } if (this.write) { permissions.push("w"); } if (this.delete) { permissions.push("d"); } if (this.deleteVersion) { permissions.push("x"); } if (this.filter) { permissions.push("f"); } if (this.tag) { permissions.push("t"); } if (this.list) { permissions.push("l"); } if (this.add) { permissions.push("a"); } if (this.create) { permissions.push("c"); } if (this.update) { permissions.push("u"); } if (this.process) { permissions.push("p"); } if (this.setImmutabilityPolicy) { permissions.push("i"); } if (this.permanentDelete) { permissions.push("y"); } return permissions.join(""); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the resources accessible by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant access to that resource type. Once all the * values are set, this should be serialized with toString and set as the resources field on an * {@link AccountSASSignatureValues} object. It is possible to construct the resources string without this class, but * the order of the resources is particular and this class guarantees correctness. */ class AccountSASResourceTypes { constructor() { /** * Permission to access service level APIs granted. */ this.service = false; /** * Permission to access container level APIs (Blob Containers, Tables, Queues, File Shares) granted. */ this.container = false; /** * Permission to access object level APIs (Blobs, Table Entities, Queue Messages, Files) granted. */ this.object = false; } /** * Creates an {@link AccountSASResourceTypes} from the specified resource types string. This method will throw an * Error if it encounters a character that does not correspond to a valid resource type. * * @param resourceTypes - */ static parse(resourceTypes) { const accountSASResourceTypes = new AccountSASResourceTypes(); for (const c of resourceTypes) { switch (c) { case "s": accountSASResourceTypes.service = true; break; case "c": accountSASResourceTypes.container = true; break; case "o": accountSASResourceTypes.object = true; break; default: throw new RangeError(`Invalid resource type: ${c}`); } } return accountSASResourceTypes; } /** * Converts the given resource types to a string. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * */ toString() { const resourceTypes = []; if (this.service) { resourceTypes.push("s"); } if (this.container) { resourceTypes.push("c"); } if (this.object) { resourceTypes.push("o"); } return resourceTypes.join(""); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * This is a helper class to construct a string representing the services accessible by an AccountSAS. Setting a value * to true means that any SAS which uses these permissions will grant access to that service. Once all the * values are set, this should be serialized with toString and set as the services field on an * {@link AccountSASSignatureValues} object. It is possible to construct the services string without this class, but * the order of the services is particular and this class guarantees correctness. */ class AccountSASServices { constructor() { /** * Permission to access blob resources granted. */ this.blob = false; /** * Permission to access file resources granted. */ this.file = false; /** * Permission to access queue resources granted. */ this.queue = false; /** * Permission to access table resources granted. */ this.table = false; } /** * Creates an {@link AccountSASServices} from the specified services string. This method will throw an * Error if it encounters a character that does not correspond to a valid service. * * @param services - */ static parse(services) { const accountSASServices = new AccountSASServices(); for (const c of services) { switch (c) { case "b": accountSASServices.blob = true; break; case "f": accountSASServices.file = true; break; case "q": accountSASServices.queue = true; break; case "t": accountSASServices.table = true; break; default: throw new RangeError(`Invalid service character: ${c}`); } } return accountSASServices; } /** * Converts the given services to a string. * */ toString() { const services = []; if (this.blob) { services.push("b"); } if (this.table) { services.push("t"); } if (this.queue) { services.push("q"); } if (this.file) { services.push("f"); } return services.join(""); } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** * ONLY AVAILABLE IN NODE.JS RUNTIME. * * Generates a {@link SASQueryParameters} object which contains all SAS query parameters needed to make an actual * REST request. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/constructing-an-account-sas * * @param accountSASSignatureValues - * @param sharedKeyCredential - */ function generateAccountSASQueryParameters(accountSASSignatureValues, sharedKeyCredential) { return generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) .sasQueryParameters; } function generateAccountSASQueryParametersInternal(accountSASSignatureValues, sharedKeyCredential) { const version = accountSASSignatureValues.version ? accountSASSignatureValues.version : SERVICE_VERSION; if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.setImmutabilityPolicy && version < "2020-08-04") { throw RangeError("'version' must be >= '2020-08-04' when provided 'i' permission."); } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.deleteVersion && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'x' permission."); } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.permanentDelete && version < "2019-10-10") { throw RangeError("'version' must be >= '2019-10-10' when provided 'y' permission."); } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.tag && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 't' permission."); } if (accountSASSignatureValues.permissions && accountSASSignatureValues.permissions.filter && version < "2019-12-12") { throw RangeError("'version' must be >= '2019-12-12' when provided 'f' permission."); } if (accountSASSignatureValues.encryptionScope && version < "2020-12-06") { throw RangeError("'version' must be >= '2020-12-06' when provided 'encryptionScope' in SAS."); } const parsedPermissions = AccountSASPermissions.parse(accountSASSignatureValues.permissions.toString()); const parsedServices = AccountSASServices.parse(accountSASSignatureValues.services).toString(); const parsedResourceTypes = AccountSASResourceTypes.parse(accountSASSignatureValues.resourceTypes).toString(); let stringToSign; if (version >= "2020-12-06") { stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", version, accountSASSignatureValues.encryptionScope ? accountSASSignatureValues.encryptionScope : "", "", // Account SAS requires an additional newline character ].join("\n"); } else { stringToSign = [ sharedKeyCredential.accountName, parsedPermissions, parsedServices, parsedResourceTypes, accountSASSignatureValues.startsOn ? truncatedISO8061Date(accountSASSignatureValues.startsOn, false) : "", truncatedISO8061Date(accountSASSignatureValues.expiresOn, false), accountSASSignatureValues.ipRange ? ipRangeToString(accountSASSignatureValues.ipRange) : "", accountSASSignatureValues.protocol ? accountSASSignatureValues.protocol : "", version, "", // Account SAS requires an additional newline character ].join("\n"); } const signature = sharedKeyCredential.computeHMACSHA256(stringToSign); return { sasQueryParameters: new SASQueryParameters(version, signature, parsedPermissions.toString(), parsedServices, parsedResourceTypes, accountSASSignatureValues.protocol, accountSASSignatureValues.startsOn, accountSASSignatureValues.expiresOn, accountSASSignatureValues.ipRange, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, undefined, accountSASSignatureValues.encryptionScope), stringToSign: stringToSign, }; } /** * A BlobServiceClient represents a Client to the Azure Storage Blob service allowing you * to manipulate blob containers. */ class BlobServiceClient extends StorageClient { /** * * Creates an instance of BlobServiceClient from connection string. * * @param connectionString - Account connection string or a SAS connection string of an Azure storage account. * [ Note - Account connection string can only be used in NODE.JS runtime. ] * Account connection string example - * `DefaultEndpointsProtocol=https;AccountName=myaccount;AccountKey=accountKey;EndpointSuffix=core.windows.net` * SAS connection string example - * `BlobEndpoint=https://myaccount.blob.core.windows.net/;QueueEndpoint=https://myaccount.queue.core.windows.net/;FileEndpoint=https://myaccount.file.core.windows.net/;TableEndpoint=https://myaccount.table.core.windows.net/;SharedAccessSignature=sasString` * @param options - Optional. Options to configure the HTTP pipeline. */ static fromConnectionString(connectionString, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { options = options || {}; const extractedCreds = extractConnectionStringParts(connectionString); if (extractedCreds.kind === "AccountConnString") { if (coreUtil.isNode) { const sharedKeyCredential = new StorageSharedKeyCredential(extractedCreds.accountName, extractedCreds.accountKey); if (!options.proxyOptions) { options.proxyOptions = coreRestPipeline.getDefaultProxySettings(extractedCreds.proxyUri); } const pipeline = newPipeline(sharedKeyCredential, options); return new BlobServiceClient(extractedCreds.url, pipeline); } else { throw new Error("Account connection string is only supported in Node.js environment"); } } else if (extractedCreds.kind === "SASConnString") { const pipeline = newPipeline(new AnonymousCredential(), options); return new BlobServiceClient(extractedCreds.url + "?" + extractedCreds.accountSas, pipeline); } else { throw new Error("Connection string must be either an Account connection string or a SAS connection string"); } } constructor(url, credentialOrPipeline, // Legacy, no fix for eslint error without breaking. Disable it for this interface. /* eslint-disable-next-line @azure/azure-sdk/ts-naming-options*/ options) { let pipeline; if (isPipelineLike(credentialOrPipeline)) { pipeline = credentialOrPipeline; } else if ((coreUtil.isNode && credentialOrPipeline instanceof StorageSharedKeyCredential) || credentialOrPipeline instanceof AnonymousCredential || coreAuth.isTokenCredential(credentialOrPipeline)) { pipeline = newPipeline(credentialOrPipeline, options); } else { // The second parameter is undefined. Use anonymous credential pipeline = newPipeline(new AnonymousCredential(), options); } super(url, pipeline); this.serviceContext = this.storageClientContext.service; } /** * Creates a {@link ContainerClient} object * * @param containerName - A container name * @returns A new ContainerClient object for the given container name. * * Example usage: * * ```js * const containerClient = blobServiceClient.getContainerClient(""); * ``` */ getContainerClient(containerName) { return new ContainerClient(appendToURLPath(this.url, encodeURIComponent(containerName)), this.pipeline); } /** * Create a Blob container. @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-container * * @param containerName - Name of the container to create. * @param options - Options to configure Container Create operation. * @returns Container creation response and the corresponding container client. */ async createContainer(containerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-createContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); const containerCreateResponse = await containerClient.create(updatedOptions); return { containerClient, containerCreateResponse, }; }); } /** * Deletes a Blob container. * * @param containerName - Name of the container to delete. * @param options - Options to configure Container Delete operation. * @returns Container deletion response. */ async deleteContainer(containerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-deleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(containerName); return containerClient.delete(updatedOptions); }); } /** * Restore a previously deleted Blob container. * This API is only functional if Container Soft Delete is enabled for the storage account associated with the container. * * @param deletedContainerName - Name of the previously deleted container. * @param deletedContainerVersion - Version of the previously deleted container, used to uniquely identify the deleted container. * @param options - Options to configure Container Restore operation. * @returns Container deletion response. */ async undeleteContainer(deletedContainerName, deletedContainerVersion, options = {}) { return tracingClient.withSpan("BlobServiceClient-undeleteContainer", options, async (updatedOptions) => { const containerClient = this.getContainerClient(options.destinationContainerName || deletedContainerName); // Hack to access a protected member. const containerContext = containerClient["storageClientContext"].container; const containerUndeleteResponse = assertResponse(await containerContext.restore({ deletedContainerName, deletedContainerVersion, tracingOptions: updatedOptions.tracingOptions, })); return { containerClient, containerUndeleteResponse }; }); } /** * Rename an existing Blob Container. * * @param sourceContainerName - The name of the source container. * @param destinationContainerName - The new name of the container. * @param options - Options to configure Container Rename operation. */ /* eslint-disable-next-line @typescript-eslint/ban-ts-comment */ // @ts-ignore Need to hide this interface for now. Make it public and turn on the live tests for it when the service is ready. async renameContainer(sourceContainerName, destinationContainerName, options = {}) { return tracingClient.withSpan("BlobServiceClient-renameContainer", options, async (updatedOptions) => { var _a; const containerClient = this.getContainerClient(destinationContainerName); // Hack to access a protected member. const containerContext = containerClient["storageClientContext"].container; const containerRenameResponse = assertResponse(await containerContext.rename(sourceContainerName, Object.assign(Object.assign({}, updatedOptions), { sourceLeaseId: (_a = options.sourceCondition) === null || _a === void 0 ? void 0 : _a.leaseId }))); return { containerClient, containerRenameResponse }; }); } /** * Gets the properties of a storage account’s Blob service, including properties * for Storage Analytics and CORS (Cross-Origin Resource Sharing) rules. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * * @param options - Options to the Service Get Properties operation. * @returns Response data for the Service Get Properties operation. */ async getProperties(options = {}) { return tracingClient.withSpan("BlobServiceClient-getProperties", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getProperties({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Sets properties for a storage account’s Blob service endpoint, including properties * for Storage Analytics, CORS (Cross-Origin Resource Sharing) rules and soft delete settings. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/set-blob-service-properties * * @param properties - * @param options - Options to the Service Set Properties operation. * @returns Response data for the Service Set Properties operation. */ async setProperties(properties, options = {}) { return tracingClient.withSpan("BlobServiceClient-setProperties", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.setProperties(properties, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Retrieves statistics related to replication for the Blob service. It is only * available on the secondary location endpoint when read-access geo-redundant * replication is enabled for the storage account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-stats * * @param options - Options to the Service Get Statistics operation. * @returns Response data for the Service Get Statistics operation. */ async getStatistics(options = {}) { return tracingClient.withSpan("BlobServiceClient-getStatistics", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getStatistics({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * The Get Account Information operation returns the sku name and account kind * for the specified account. * The Get Account Information operation is available on service versions beginning * with version 2018-03-28. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-account-information * * @param options - Options to the Service Get Account Info operation. * @returns Response data for the Service Get Account Info operation. */ async getAccountInfo(options = {}) { return tracingClient.withSpan("BlobServiceClient-getAccountInfo", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.getAccountInfo({ abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); }); } /** * Returns a list of the containers under the specified account. * @see https://docs.microsoft.com/en-us/rest/api/storageservices/list-containers2 * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all containers remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to the Service List Container Segment operation. * @returns Response data for the Service List Container Segment operation. */ async listContainersSegment(marker, options = {}) { return tracingClient.withSpan("BlobServiceClient-listContainersSegment", options, async (updatedOptions) => { return assertResponse(await this.serviceContext.listContainersSegment(Object.assign(Object.assign({ abortSignal: options.abortSignal, marker }, options), { include: typeof options.include === "string" ? [options.include] : options.include, tracingOptions: updatedOptions.tracingOptions }))); }); } /** * The Filter Blobs operation enables callers to list blobs across all containers whose tags * match a given search expression. Filter blobs searches across all containers within a * storage account but can be scoped within the expression to a single container. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ async findBlobsByTagsSegment(tagFilterSqlExpression, marker, options = {}) { return tracingClient.withSpan("BlobServiceClient-findBlobsByTagsSegment", options, async (updatedOptions) => { const response = assertResponse(await this.serviceContext.filterBlobs({ abortSignal: options.abortSignal, where: tagFilterSqlExpression, marker, maxPageSize: options.maxPageSize, tracingOptions: updatedOptions.tracingOptions, })); const wrappedResponse = Object.assign(Object.assign({}, response), { _response: response._response, blobs: response.blobs.map((blob) => { var _a; let tagValue = ""; if (((_a = blob.tags) === null || _a === void 0 ? void 0 : _a.blobTagSet.length) === 1) { tagValue = blob.tags.blobTagSet[0].value; } return Object.assign(Object.assign({}, blob), { tags: toTags(blob.tags), tagValue }); }) }); return wrappedResponse; }); } /** * Returns an AsyncIterableIterator for ServiceFindBlobsByTagsSegmentResponse. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param marker - A string value that identifies the portion of * the list of blobs to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all blobs remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to find blobs by tags. */ findBlobsByTagsSegments(tagFilterSqlExpression_1, marker_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsSegments_1(tagFilterSqlExpression, marker, options = {}) { let response; if (!!marker || marker === undefined) { do { response = yield tslib.__await(this.findBlobsByTagsSegment(tagFilterSqlExpression, marker, options)); response.blobs = response.blobs || []; marker = response.continuationToken; yield yield tslib.__await(response); } while (marker); } }); } /** * Returns an AsyncIterableIterator for blobs. * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to findBlobsByTagsItems. */ findBlobsByTagsItems(tagFilterSqlExpression_1) { return tslib.__asyncGenerator(this, arguments, function* findBlobsByTagsItems_1(tagFilterSqlExpression, options = {}) { var _a, e_1, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.findBlobsByTagsSegments(tagFilterSqlExpression, marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.blobs))); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_1) throw e_1.error; } } }); } /** * Returns an async iterable iterator to find all blobs with specified tag * under the specified account. * * .byPage() returns an async iterable iterator to list the blobs in pages. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-blob-service-properties * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const blob of blobServiceClient.findBlobsByTags("tagkey='tagvalue'")) { * console.log(`Blob ${i++}: ${container.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = blobServiceClient.findBlobsByTags("tagkey='tagvalue'"); * let blobItem = await iter.next(); * while (!blobItem.done) { * console.log(`Blob ${i++}: ${blobItem.value.name}`); * blobItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 20 })) { * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = blobServiceClient.findBlobsByTags("tagkey='tagvalue'").byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = blobServiceClient * .findBlobsByTags("tagkey='tagvalue'") * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints blob names * if (response.blobs) { * for (const blob of response.blobs) { * console.log(`Blob ${i++}: ${blob.name}`); * } * } * ``` * * @param tagFilterSqlExpression - The where parameter enables the caller to query blobs whose tags match a given expression. * The given expression must evaluate to true for a blob to be returned in the results. * The[OData - ABNF] filter syntax rule defines the formal grammar for the value of the where query parameter; * however, only a subset of the OData filter syntax is supported in the Blob service. * @param options - Options to find blobs by tags. */ findBlobsByTags(tagFilterSqlExpression, options = {}) { // AsyncIterableIterator to iterate over blobs const listSegmentOptions = Object.assign({}, options); const iter = this.findBlobsByTagsItems(tagFilterSqlExpression, listSegmentOptions); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.findBlobsByTagsSegments(tagFilterSqlExpression, settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } /** * Returns an AsyncIterableIterator for ServiceListContainersSegmentResponses * * @param marker - A string value that identifies the portion of * the list of containers to be returned with the next listing operation. The * operation returns the continuationToken value within the response body if the * listing operation did not return all containers remaining to be listed * with the current page. The continuationToken value can be used as the value for * the marker parameter in a subsequent call to request the next page of list * items. The marker value is opaque to the client. * @param options - Options to list containers operation. */ listSegments(marker_1) { return tslib.__asyncGenerator(this, arguments, function* listSegments_1(marker, options = {}) { let listContainersSegmentResponse; if (!!marker || marker === undefined) { do { listContainersSegmentResponse = yield tslib.__await(this.listContainersSegment(marker, options)); listContainersSegmentResponse.containerItems = listContainersSegmentResponse.containerItems || []; marker = listContainersSegmentResponse.continuationToken; yield yield tslib.__await(yield tslib.__await(listContainersSegmentResponse)); } while (marker); } }); } /** * Returns an AsyncIterableIterator for Container Items * * @param options - Options to list containers operation. */ listItems() { return tslib.__asyncGenerator(this, arguments, function* listItems_1(options = {}) { var _a, e_2, _b, _c; let marker; try { for (var _d = true, _e = tslib.__asyncValues(this.listSegments(marker, options)), _f; _f = yield tslib.__await(_e.next()), _a = _f.done, !_a; _d = true) { _c = _f.value; _d = false; const segment = _c; yield tslib.__await(yield* tslib.__asyncDelegator(tslib.__asyncValues(segment.containerItems))); } } catch (e_2_1) { e_2 = { error: e_2_1 }; } finally { try { if (!_d && !_a && (_b = _e.return)) yield tslib.__await(_b.call(_e)); } finally { if (e_2) throw e_2.error; } } }); } /** * Returns an async iterable iterator to list all the containers * under the specified account. * * .byPage() returns an async iterable iterator to list the containers in pages. * * Example using `for await` syntax: * * ```js * let i = 1; * for await (const container of blobServiceClient.listContainers()) { * console.log(`Container ${i++}: ${container.name}`); * } * ``` * * Example using `iter.next()`: * * ```js * let i = 1; * const iter = blobServiceClient.listContainers(); * let containerItem = await iter.next(); * while (!containerItem.done) { * console.log(`Container ${i++}: ${containerItem.value.name}`); * containerItem = await iter.next(); * } * ``` * * Example using `byPage()`: * * ```js * // passing optional maxPageSize in the page settings * let i = 1; * for await (const response of blobServiceClient.listContainers().byPage({ maxPageSize: 20 })) { * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * } * ``` * * Example using paging with a marker: * * ```js * let i = 1; * let iterator = blobServiceClient.listContainers().byPage({ maxPageSize: 2 }); * let response = (await iterator.next()).value; * * // Prints 2 container names * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * * // Gets next marker * let marker = response.continuationToken; * // Passing next marker as continuationToken * iterator = blobServiceClient * .listContainers() * .byPage({ continuationToken: marker, maxPageSize: 10 }); * response = (await iterator.next()).value; * * // Prints 10 container names * if (response.containerItems) { * for (const container of response.containerItems) { * console.log(`Container ${i++}: ${container.name}`); * } * } * ``` * * @param options - Options to list containers. * @returns An asyncIterableIterator that supports paging. */ listContainers(options = {}) { if (options.prefix === "") { options.prefix = undefined; } const include = []; if (options.includeDeleted) { include.push("deleted"); } if (options.includeMetadata) { include.push("metadata"); } if (options.includeSystem) { include.push("system"); } // AsyncIterableIterator to iterate over containers const listSegmentOptions = Object.assign(Object.assign({}, options), (include.length > 0 ? { include } : {})); const iter = this.listItems(listSegmentOptions); return { /** * The next method, part of the iteration protocol */ next() { return iter.next(); }, /** * The connection to the async iterator, part of the iteration protocol */ [Symbol.asyncIterator]() { return this; }, /** * Return an AsyncIterableIterator that works a page at a time */ byPage: (settings = {}) => { return this.listSegments(settings.continuationToken, Object.assign({ maxPageSize: settings.maxPageSize }, listSegmentOptions)); }, }; } /** * ONLY AVAILABLE WHEN USING BEARER TOKEN AUTHENTICATION (TokenCredential). * * Retrieves a user delegation key for the Blob service. This is only a valid operation when using * bearer token authentication. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/get-user-delegation-key * * @param startsOn - The start time for the user delegation SAS. Must be within 7 days of the current time * @param expiresOn - The end time for the user delegation SAS. Must be within 7 days of the current time */ async getUserDelegationKey(startsOn, expiresOn, options = {}) { return tracingClient.withSpan("BlobServiceClient-getUserDelegationKey", options, async (updatedOptions) => { const response = assertResponse(await this.serviceContext.getUserDelegationKey({ startsOn: truncatedISO8061Date(startsOn, false), expiresOn: truncatedISO8061Date(expiresOn, false), }, { abortSignal: options.abortSignal, tracingOptions: updatedOptions.tracingOptions, })); const userDelegationKey = { signedObjectId: response.signedObjectId, signedTenantId: response.signedTenantId, signedStartsOn: new Date(response.signedStartsOn), signedExpiresOn: new Date(response.signedExpiresOn), signedService: response.signedService, signedVersion: response.signedVersion, value: response.value, }; const res = Object.assign({ _response: response._response, requestId: response.requestId, clientRequestId: response.clientRequestId, version: response.version, date: response.date, errorCode: response.errorCode }, userDelegationKey); return res; }); } /** * Creates a BlobBatchClient object to conduct batch operations. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/blob-batch * * @returns A new BlobBatchClient object for this service. */ getBlobBatchClient() { return new BlobBatchClient(this.url, this.pipeline); } /** * Only available for BlobServiceClient constructed with a shared key credential. * * Generates a Blob account Shared Access Signature (SAS) URI based on the client properties * and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. * @param resourceTypes - Specifies the resource types associated with the shared access signature. * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateAccountSasUrl(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } if (expiresOn === undefined) { const now = new Date(); expiresOn = new Date(now.getTime() + 3600 * 1000); } const sas = generateAccountSASQueryParameters(Object.assign({ permissions, expiresOn, resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).toString(); return appendToURLQuery(this.url, sas); } /** * Only available for BlobServiceClient constructed with a shared key credential. * * Generates string to sign for a Blob account Shared Access Signature (SAS) URI based on * the client properties and parameters passed in. The SAS is signed by the shared key credential of the client. * * @see https://docs.microsoft.com/en-us/rest/api/storageservices/create-account-sas * * @param expiresOn - Optional. The time at which the shared access signature becomes invalid. Default to an hour later if not provided. * @param permissions - Specifies the list of permissions to be associated with the SAS. * @param resourceTypes - Specifies the resource types associated with the shared access signature. * @param options - Optional parameters. * @returns An account SAS URI consisting of the URI to the resource represented by this client, followed by the generated SAS token. */ generateSasStringToSign(expiresOn, permissions = AccountSASPermissions.parse("r"), resourceTypes = "sco", options = {}) { if (!(this.credential instanceof StorageSharedKeyCredential)) { throw RangeError("Can only generate the account SAS when the client is initialized with a shared key credential"); } if (expiresOn === undefined) { const now = new Date(); expiresOn = new Date(now.getTime() + 3600 * 1000); } return generateAccountSASQueryParametersInternal(Object.assign({ permissions, expiresOn, resourceTypes, services: AccountSASServices.parse("b").toString() }, options), this.credential).stringToSign; } } // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. /** Known values of {@link EncryptionAlgorithmType} that the service accepts. */ exports.KnownEncryptionAlgorithmType = void 0; (function (KnownEncryptionAlgorithmType) { KnownEncryptionAlgorithmType["AES256"] = "AES256"; })(exports.KnownEncryptionAlgorithmType || (exports.KnownEncryptionAlgorithmType = {})); Object.defineProperty(exports, "RestError", ({ enumerable: true, get: function () { return coreRestPipeline.RestError; } })); exports.AccountSASPermissions = AccountSASPermissions; exports.AccountSASResourceTypes = AccountSASResourceTypes; exports.AccountSASServices = AccountSASServices; exports.AnonymousCredential = AnonymousCredential; exports.AnonymousCredentialPolicy = AnonymousCredentialPolicy; exports.AppendBlobClient = AppendBlobClient; exports.BaseRequestPolicy = BaseRequestPolicy; exports.BlobBatch = BlobBatch; exports.BlobBatchClient = BlobBatchClient; exports.BlobClient = BlobClient; exports.BlobLeaseClient = BlobLeaseClient; exports.BlobSASPermissions = BlobSASPermissions; exports.BlobServiceClient = BlobServiceClient; exports.BlockBlobClient = BlockBlobClient; exports.ContainerClient = ContainerClient; exports.ContainerSASPermissions = ContainerSASPermissions; exports.Credential = Credential; exports.CredentialPolicy = CredentialPolicy; exports.PageBlobClient = PageBlobClient; exports.Pipeline = Pipeline; exports.SASQueryParameters = SASQueryParameters; exports.StorageBrowserPolicy = StorageBrowserPolicy; exports.StorageBrowserPolicyFactory = StorageBrowserPolicyFactory; exports.StorageOAuthScopes = StorageOAuthScopes; exports.StorageRetryPolicy = StorageRetryPolicy; exports.StorageRetryPolicyFactory = StorageRetryPolicyFactory; exports.StorageSharedKeyCredential = StorageSharedKeyCredential; exports.StorageSharedKeyCredentialPolicy = StorageSharedKeyCredentialPolicy; exports.generateAccountSASQueryParameters = generateAccountSASQueryParameters; exports.generateBlobSASQueryParameters = generateBlobSASQueryParameters; exports.getBlobServiceAccountAudience = getBlobServiceAccountAudience; exports.isPipelineLike = isPipelineLike; exports.logger = logger; exports.newPipeline = newPipeline; //# sourceMappingURL=index.js.map /***/ }), /***/ 99469: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /*! * Copyright 2015 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ResourceStream = exports.paginator = exports.Paginator = void 0; /*! * @module common/paginator */ const arrify = __nccwpck_require__(26251); const extend = __nccwpck_require__(23860); const resource_stream_1 = __nccwpck_require__(97618); Object.defineProperty(exports, "ResourceStream", ({ enumerable: true, get: function () { return resource_stream_1.ResourceStream; } })); /*! Developer Documentation * * paginator is used to auto-paginate `nextQuery` methods as well as * streamifying them. * * Before: * * search.query('done=true', function(err, results, nextQuery) { * search.query(nextQuery, function(err, results, nextQuery) {}); * }); * * After: * * search.query('done=true', function(err, results) {}); * * Methods to extend should be written to accept callbacks and return a * `nextQuery`. */ class Paginator { /** * Cache the original method, then overwrite it on the Class's prototype. * * @param {function} Class - The parent class of the methods to extend. * @param {string|string[]} methodNames - Name(s) of the methods to extend. */ // tslint:disable-next-line:variable-name extend(Class, methodNames) { methodNames = arrify(methodNames); methodNames.forEach(methodName => { const originalMethod = Class.prototype[methodName]; // map the original method to a private member Class.prototype[methodName + '_'] = originalMethod; // overwrite the original to auto-paginate /* eslint-disable @typescript-eslint/no-explicit-any */ Class.prototype[methodName] = function (...args) { const parsedArguments = paginator.parseArguments_(args); return paginator.run_(parsedArguments, originalMethod.bind(this)); }; }); } /** * Wraps paginated API calls in a readable object stream. * * This method simply calls the nextQuery recursively, emitting results to a * stream. The stream ends when `nextQuery` is null. * * `maxResults` will act as a cap for how many results are fetched and emitted * to the stream. * * @param {string} methodName - Name of the method to streamify. * @return {function} - Wrapped function. */ /* eslint-disable @typescript-eslint/no-explicit-any */ streamify(methodName) { return function ( /* eslint-disable @typescript-eslint/no-explicit-any */ ...args) { const parsedArguments = paginator.parseArguments_(args); const originalMethod = this[methodName + '_'] || this[methodName]; return paginator.runAsStream_(parsedArguments, originalMethod.bind(this)); }; } /** * Parse a pseudo-array `arguments` for a query and callback. * * @param {array} args - The original `arguments` pseduo-array that the original * method received. */ /* eslint-disable @typescript-eslint/no-explicit-any */ parseArguments_(args) { let query; let autoPaginate = true; let maxApiCalls = -1; let maxResults = -1; let callback; const firstArgument = args[0]; const lastArgument = args[args.length - 1]; if (typeof firstArgument === 'function') { callback = firstArgument; } else { query = firstArgument; } if (typeof lastArgument === 'function') { callback = lastArgument; } if (typeof query === 'object') { query = extend(true, {}, query); // Check if the user only asked for a certain amount of results. if (query.maxResults && typeof query.maxResults === 'number') { // `maxResults` is used API-wide. maxResults = query.maxResults; } else if (typeof query.pageSize === 'number') { // `pageSize` is Pub/Sub's `maxResults`. maxResults = query.pageSize; } if (query.maxApiCalls && typeof query.maxApiCalls === 'number') { maxApiCalls = query.maxApiCalls; delete query.maxApiCalls; } // maxResults is the user specified limit. if (maxResults !== -1 || query.autoPaginate === false) { autoPaginate = false; } } const parsedArguments = { query: query || {}, autoPaginate, maxApiCalls, maxResults, callback, }; parsedArguments.streamOptions = extend(true, {}, parsedArguments.query); delete parsedArguments.streamOptions.autoPaginate; delete parsedArguments.streamOptions.maxResults; delete parsedArguments.streamOptions.pageSize; return parsedArguments; } /** * This simply checks to see if `autoPaginate` is set or not, if it's true * then we buffer all results, otherwise simply call the original method. * * @param {array} parsedArguments - Parsed arguments from the original method * call. * @param {object=|string=} parsedArguments.query - Query object. This is most * commonly an object, but to make the API more simple, it can also be a * string in some places. * @param {function=} parsedArguments.callback - Callback function. * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. * @param {number} parsedArguments.maxResults - Maximum results to return. * @param {function} originalMethod - The cached method that accepts a callback * and returns `nextQuery` to receive more results. */ run_(parsedArguments, originalMethod) { const query = parsedArguments.query; const callback = parsedArguments.callback; if (!parsedArguments.autoPaginate) { return originalMethod(query, callback); } const results = new Array(); let otherArgs = []; const promise = new Promise((resolve, reject) => { const stream = paginator.runAsStream_(parsedArguments, originalMethod); stream .on('error', reject) .on('data', (data) => results.push(data)) .on('end', () => { otherArgs = stream._otherArgs || []; resolve(results); }); }); if (!callback) { return promise.then(results => [results, query, ...otherArgs]); } promise.then(results => callback(null, results, query, ...otherArgs), (err) => callback(err)); } /** * This method simply calls the nextQuery recursively, emitting results to a * stream. The stream ends when `nextQuery` is null. * * `maxResults` will act as a cap for how many results are fetched and emitted * to the stream. * * @param {object=|string=} parsedArguments.query - Query object. This is most * commonly an object, but to make the API more simple, it can also be a * string in some places. * @param {function=} parsedArguments.callback - Callback function. * @param {boolean} parsedArguments.autoPaginate - Auto-pagination enabled. * @param {boolean} parsedArguments.maxApiCalls - Maximum API calls to make. * @param {number} parsedArguments.maxResults - Maximum results to return. * @param {function} originalMethod - The cached method that accepts a callback * and returns `nextQuery` to receive more results. * @return {stream} - Readable object stream. */ /* eslint-disable @typescript-eslint/no-explicit-any */ runAsStream_(parsedArguments, originalMethod) { return new resource_stream_1.ResourceStream(parsedArguments, originalMethod); } } exports.Paginator = Paginator; const paginator = new Paginator(); exports.paginator = paginator; //# sourceMappingURL=index.js.map /***/ }), /***/ 97618: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /*! * Copyright 2019 Google Inc. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ResourceStream = void 0; const stream_1 = __nccwpck_require__(2203); class ResourceStream extends stream_1.Transform { constructor(args, requestFn) { const options = Object.assign({ objectMode: true }, args.streamOptions); super(options); this._ended = false; this._maxApiCalls = args.maxApiCalls === -1 ? Infinity : args.maxApiCalls; this._nextQuery = args.query; this._reading = false; this._requestFn = requestFn; this._requestsMade = 0; this._resultsToSend = args.maxResults === -1 ? Infinity : args.maxResults; this._otherArgs = []; } /* eslint-disable @typescript-eslint/no-explicit-any */ end(...args) { this._ended = true; return super.end(...args); } _read() { if (this._reading) { return; } this._reading = true; // Wrap in a try/catch to catch input linting errors, e.g. // an invalid BigQuery query. These errors are thrown in an // async fashion, which makes them un-catchable by the user. try { this._requestFn(this._nextQuery, (err, results, nextQuery, ...otherArgs) => { if (err) { this.destroy(err); return; } this._otherArgs = otherArgs; this._nextQuery = nextQuery; if (this._resultsToSend !== Infinity) { results = results.splice(0, this._resultsToSend); this._resultsToSend -= results.length; } let more = true; for (const result of results) { if (this._ended) { break; } more = this.push(result); } const isFinished = !this._nextQuery || this._resultsToSend < 1; const madeMaxCalls = ++this._requestsMade >= this._maxApiCalls; if (isFinished || madeMaxCalls) { this.end(); } if (more && !this._ended) { setImmediate(() => this._read()); } this._reading = false; }); } catch (e) { this.destroy(e); } } } exports.ResourceStream = ResourceStream; //# sourceMappingURL=resource-stream.js.map /***/ }), /***/ 87635: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MissingProjectIdError = exports.replaceProjectIdToken = void 0; const stream_1 = __nccwpck_require__(2203); // Copyright 2014 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /** * Populate the `{{projectId}}` placeholder. * * @throws {Error} If a projectId is required, but one is not provided. * * @param {*} - Any input value that may contain a placeholder. Arrays and objects will be looped. * @param {string} projectId - A projectId. If not provided * @return {*} - The original argument with all placeholders populated. */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function replaceProjectIdToken(value, projectId) { if (Array.isArray(value)) { value = value.map(v => replaceProjectIdToken(v, projectId)); } if (value !== null && typeof value === 'object' && !(value instanceof Buffer) && !(value instanceof stream_1.Stream) && typeof value.hasOwnProperty === 'function') { for (const opt in value) { // eslint-disable-next-line no-prototype-builtins if (value.hasOwnProperty(opt)) { value[opt] = replaceProjectIdToken(value[opt], projectId); } } } if (typeof value === 'string' && value.indexOf('{{projectId}}') > -1) { if (!projectId || projectId === '{{projectId}}') { throw new MissingProjectIdError(); } value = value.replace(/{{projectId}}/g, projectId); } return value; } exports.replaceProjectIdToken = replaceProjectIdToken; /** * Custom error type for missing project ID errors. */ class MissingProjectIdError extends Error { constructor() { super(...arguments); this.message = `Sorry, we cannot connect to Cloud Services without a project ID. You may specify one with an environment variable named "GOOGLE_CLOUD_PROJECT".`.replace(/ +/g, ' '); } } exports.MissingProjectIdError = MissingProjectIdError; //# sourceMappingURL=index.js.map /***/ }), /***/ 60206: /***/ ((__unused_webpack_module, exports) => { "use strict"; /* eslint-disable prefer-rest-params */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.callbackifyAll = exports.callbackify = exports.promisifyAll = exports.promisify = void 0; /** * Wraps a callback style function to conditionally return a promise. * * @param {function} originalMethod - The method to promisify. * @param {object=} options - Promise options. * @param {boolean} options.singular - Resolve the promise with single arg instead of an array. * @return {function} wrapped */ function promisify(originalMethod, options) { if (originalMethod.promisified_) { return originalMethod; } options = options || {}; const slice = Array.prototype.slice; // tslint:disable-next-line:no-any const wrapper = function () { let last; for (last = arguments.length - 1; last >= 0; last--) { const arg = arguments[last]; if (typeof arg === 'undefined') { continue; // skip trailing undefined. } if (typeof arg !== 'function') { break; // non-callback last argument found. } return originalMethod.apply(this, arguments); } // peel trailing undefined. const args = slice.call(arguments, 0, last + 1); // tslint:disable-next-line:variable-name let PromiseCtor = Promise; // Because dedupe will likely create a single install of // @google-cloud/common to be shared amongst all modules, we need to // localize it at the Service level. if (this && this.Promise) { PromiseCtor = this.Promise; } return new PromiseCtor((resolve, reject) => { // tslint:disable-next-line:no-any args.push((...args) => { const callbackArgs = slice.call(args); const err = callbackArgs.shift(); if (err) { return reject(err); } if (options.singular && callbackArgs.length === 1) { resolve(callbackArgs[0]); } else { resolve(callbackArgs); } }); originalMethod.apply(this, args); }); }; wrapper.promisified_ = true; return wrapper; } exports.promisify = promisify; /** * Promisifies certain Class methods. This will not promisify private or * streaming methods. * * @param {module:common/service} Class - Service class. * @param {object=} options - Configuration object. */ // tslint:disable-next-line:variable-name function promisifyAll(Class, options) { const exclude = (options && options.exclude) || []; const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); const methods = ownPropertyNames.filter(methodName => { // clang-format off return (!exclude.includes(methodName) && typeof Class.prototype[methodName] === 'function' && // is it a function? !/(^_|(Stream|_)|promise$)|^constructor$/.test(methodName) // is it promisable? ); // clang-format on }); methods.forEach(methodName => { const originalMethod = Class.prototype[methodName]; if (!originalMethod.promisified_) { Class.prototype[methodName] = exports.promisify(originalMethod, options); } }); } exports.promisifyAll = promisifyAll; /** * Wraps a promisy type function to conditionally call a callback function. * * @param {function} originalMethod - The method to callbackify. * @param {object=} options - Callback options. * @param {boolean} options.singular - Pass to the callback a single arg instead of an array. * @return {function} wrapped */ function callbackify(originalMethod) { if (originalMethod.callbackified_) { return originalMethod; } // tslint:disable-next-line:no-any const wrapper = function () { if (typeof arguments[arguments.length - 1] !== 'function') { return originalMethod.apply(this, arguments); } const cb = Array.prototype.pop.call(arguments); originalMethod.apply(this, arguments).then( // tslint:disable-next-line:no-any (res) => { res = Array.isArray(res) ? res : [res]; cb(null, ...res); }, (err) => cb(err)); }; wrapper.callbackified_ = true; return wrapper; } exports.callbackify = callbackify; /** * Callbackifies certain Class methods. This will not callbackify private or * streaming methods. * * @param {module:common/service} Class - Service class. * @param {object=} options - Configuration object. */ function callbackifyAll( // tslint:disable-next-line:variable-name Class, options) { const exclude = (options && options.exclude) || []; const ownPropertyNames = Object.getOwnPropertyNames(Class.prototype); const methods = ownPropertyNames.filter(methodName => { // clang-format off return (!exclude.includes(methodName) && typeof Class.prototype[methodName] === 'function' && // is it a function? !/^_|(Stream|_)|^constructor$/.test(methodName) // is it callbackifyable? ); // clang-format on }); methods.forEach(methodName => { const originalMethod = Class.prototype[methodName]; if (!originalMethod.callbackified_) { Class.prototype[methodName] = exports.callbackify(originalMethod); } }); } exports.callbackifyAll = callbackifyAll; //# sourceMappingURL=index.js.map /***/ }), /***/ 20607: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "v1", ({ enumerable: true, get: function () { return _v.default; } })); Object.defineProperty(exports, "v3", ({ enumerable: true, get: function () { return _v2.default; } })); Object.defineProperty(exports, "v4", ({ enumerable: true, get: function () { return _v3.default; } })); Object.defineProperty(exports, "v5", ({ enumerable: true, get: function () { return _v4.default; } })); Object.defineProperty(exports, "NIL", ({ enumerable: true, get: function () { return _nil.default; } })); Object.defineProperty(exports, "version", ({ enumerable: true, get: function () { return _version.default; } })); Object.defineProperty(exports, "validate", ({ enumerable: true, get: function () { return _validate.default; } })); Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return _stringify.default; } })); Object.defineProperty(exports, "parse", ({ enumerable: true, get: function () { return _parse.default; } })); var _v = _interopRequireDefault(__nccwpck_require__(21150)); var _v2 = _interopRequireDefault(__nccwpck_require__(19220)); var _v3 = _interopRequireDefault(__nccwpck_require__(4289)); var _v4 = _interopRequireDefault(__nccwpck_require__(36658)); var _nil = _interopRequireDefault(__nccwpck_require__(97608)); var _version = _interopRequireDefault(__nccwpck_require__(25807)); var _validate = _interopRequireDefault(__nccwpck_require__(75869)); var _stringify = _interopRequireDefault(__nccwpck_require__(77318)); var _parse = _interopRequireDefault(__nccwpck_require__(36712)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), /***/ 90039: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function md5(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('md5').update(bytes).digest(); } var _default = md5; exports["default"] = _default; /***/ }), /***/ 97608: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = '00000000-0000-0000-0000-000000000000'; exports["default"] = _default; /***/ }), /***/ 36712: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(75869)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function parse(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } let v; const arr = new Uint8Array(16); // Parse ########-....-....-....-............ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; arr[1] = v >>> 16 & 0xff; arr[2] = v >>> 8 & 0xff; arr[3] = v & 0xff; // Parse ........-####-....-....-............ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; arr[5] = v & 0xff; // Parse ........-....-####-....-............ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; arr[7] = v & 0xff; // Parse ........-....-....-####-............ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; arr[9] = v & 0xff; // Parse ........-....-....-....-############ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; arr[11] = v / 0x100000000 & 0xff; arr[12] = v >>> 24 & 0xff; arr[13] = v >>> 16 & 0xff; arr[14] = v >>> 8 & 0xff; arr[15] = v & 0xff; return arr; } var _default = parse; exports["default"] = _default; /***/ }), /***/ 42456: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; exports["default"] = _default; /***/ }), /***/ 18874: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = rng; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate let poolPtr = rnds8Pool.length; function rng() { if (poolPtr > rnds8Pool.length - 16) { _crypto.default.randomFillSync(rnds8Pool); poolPtr = 0; } return rnds8Pool.slice(poolPtr, poolPtr += 16); } /***/ }), /***/ 39490: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function sha1(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('sha1').update(bytes).digest(); } var _default = sha1; exports["default"] = _default; /***/ }), /***/ 77318: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(75869)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** * Convert array of 16 byte values to UUID string format of the form: * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ const byteToHex = []; for (let i = 0; i < 256; ++i) { byteToHex.push((i + 0x100).toString(16).substr(1)); } function stringify(arr, offset = 0) { // Note: Be careful editing this code! It's been tuned for performance // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one // of the following: // - One or more input array values don't map to a hex octet (leading to // "undefined" in the uuid) // - Invalid input values for the RFC `version` or `variant` fields if (!(0, _validate.default)(uuid)) { throw TypeError('Stringified UUID is invalid'); } return uuid; } var _default = stringify; exports["default"] = _default; /***/ }), /***/ 21150: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(18874)); var _stringify = _interopRequireDefault(__nccwpck_require__(77318)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } // **`v1()` - Generate time-based UUID** // // Inspired by https://github.com/LiosK/UUID.js // and http://docs.python.org/library/uuid.html let _nodeId; let _clockseq; // Previous uuid creation time let _lastMSecs = 0; let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details function v1(options, buf, offset) { let i = buf && offset || 0; const b = buf || new Array(16); options = options || {}; let node = options.node || _nodeId; let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not // specified. We do this lazily to minimize issues related to insufficient // system entropy. See #189 if (node == null || clockseq == null) { const seedBytes = options.random || (options.rng || _rng.default)(); if (node == null) { // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } if (clockseq == null) { // Per 4.2.2, randomize (14 bit) clockseq clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } } // UUID timestamps are 100 nano-second units since the Gregorian epoch, // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock // cycle to simulate higher resolution clock let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression if (dt < 0 && options.clockseq === undefined) { clockseq = clockseq + 1 & 0x3fff; } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new // time interval if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { nsecs = 0; } // Per 4.2.1.2 Throw error if too many uuids are requested if (nsecs >= 10000) { throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } _lastMSecs = msecs; _lastNSecs = nsecs; _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch msecs += 12219292800000; // `time_low` const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; b[i++] = tl >>> 24 & 0xff; b[i++] = tl >>> 16 & 0xff; b[i++] = tl >>> 8 & 0xff; b[i++] = tl & 0xff; // `time_mid` const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; b[i++] = tmh >>> 8 & 0xff; b[i++] = tmh & 0xff; // `time_high_and_version` b[i++] = tmh >>> 24 & 0xf | 0x10; // include version b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` b[i++] = clockseq & 0xff; // `node` for (let n = 0; n < 6; ++n) { b[i + n] = node[n]; } return buf || (0, _stringify.default)(b); } var _default = v1; exports["default"] = _default; /***/ }), /***/ 19220: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(38465)); var _md = _interopRequireDefault(__nccwpck_require__(90039)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v3 = (0, _v.default)('v3', 0x30, _md.default); var _default = v3; exports["default"] = _default; /***/ }), /***/ 38465: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = _default; exports.URL = exports.DNS = void 0; var _stringify = _interopRequireDefault(__nccwpck_require__(77318)); var _parse = _interopRequireDefault(__nccwpck_require__(36712)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function stringToBytes(str) { str = unescape(encodeURIComponent(str)); // UTF8 escape const bytes = []; for (let i = 0; i < str.length; ++i) { bytes.push(str.charCodeAt(i)); } return bytes; } const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; exports.DNS = DNS; const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; exports.URL = URL; function _default(name, version, hashfunc) { function generateUUID(value, namespace, buf, offset) { if (typeof value === 'string') { value = stringToBytes(value); } if (typeof namespace === 'string') { namespace = (0, _parse.default)(namespace); } if (namespace.length !== 16) { throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); } // Compute hash of namespace and value, Per 4.3 // Future: Use spread syntax when supported on all platforms, e.g. `bytes = // hashfunc([...namespace, ... value])` let bytes = new Uint8Array(16 + value.length); bytes.set(namespace); bytes.set(value, namespace.length); bytes = hashfunc(bytes); bytes[6] = bytes[6] & 0x0f | version; bytes[8] = bytes[8] & 0x3f | 0x80; if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = bytes[i]; } return buf; } return (0, _stringify.default)(bytes); } // Function#name is not settable on some platforms (#270) try { generateUUID.name = name; // eslint-disable-next-line no-empty } catch (err) {} // For CommonJS default export support generateUUID.DNS = DNS; generateUUID.URL = URL; return generateUUID; } /***/ }), /***/ 4289: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(18874)); var _stringify = _interopRequireDefault(__nccwpck_require__(77318)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function v4(options, buf, offset) { options = options || {}; const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` rnds[6] = rnds[6] & 0x0f | 0x40; rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = rnds[i]; } return buf; } return (0, _stringify.default)(rnds); } var _default = v4; exports["default"] = _default; /***/ }), /***/ 36658: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(38465)); var _sha = _interopRequireDefault(__nccwpck_require__(39490)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v5 = (0, _v.default)('v5', 0x50, _sha.default); var _default = v5; exports["default"] = _default; /***/ }), /***/ 75869: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _regex = _interopRequireDefault(__nccwpck_require__(42456)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function validate(uuid) { return typeof uuid === 'string' && _regex.default.test(uuid); } var _default = validate; exports["default"] = _default; /***/ }), /***/ 25807: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(75869)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function version(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } return parseInt(uuid.substr(14, 1), 16); } var _default = version; exports["default"] = _default; /***/ }), /***/ 77864: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { createTokenAuth: () => createTokenAuth }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/auth.js var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; var REGEX_IS_INSTALLATION = /^ghs_/; var REGEX_IS_USER_TO_SERVER = /^ghu_/; async function auth(token) { const isApp = token.split(/\./).length === 3; const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; return { type: "token", token, tokenType }; } // pkg/dist-src/with-authorization-prefix.js function withAuthorizationPrefix(token) { if (token.split(/\./).length === 3) { return `bearer ${token}`; } return `token ${token}`; } // pkg/dist-src/hook.js async function hook(token, request, route, parameters) { const endpoint = request.endpoint.merge( route, parameters ); endpoint.headers.authorization = withAuthorizationPrefix(token); return request(endpoint); } // pkg/dist-src/index.js var createTokenAuth = function createTokenAuth2(token) { if (!token) { throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } if (typeof token !== "string") { throw new Error( "[@octokit/auth-token] Token passed to createTokenAuth is not a string" ); } token = token.replace(/^(token|bearer) +/i, ""); return Object.assign(auth.bind(null, token), { hook: hook.bind(null, token) }); }; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 61897: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var index_exports = {}; __export(index_exports, { Octokit: () => Octokit }); module.exports = __toCommonJS(index_exports); var import_universal_user_agent = __nccwpck_require__(33843); var import_before_after_hook = __nccwpck_require__(52732); var import_request = __nccwpck_require__(66255); var import_graphql = __nccwpck_require__(70007); var import_auth_token = __nccwpck_require__(77864); // pkg/dist-src/version.js var VERSION = "5.2.1"; // pkg/dist-src/index.js var noop = () => { }; var consoleWarn = console.warn.bind(console); var consoleError = console.error.bind(console); var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; var Octokit = class { static { this.VERSION = VERSION; } static defaults(defaults) { const OctokitWithDefaults = class extends this { constructor(...args) { const options = args[0] || {}; if (typeof defaults === "function") { super(defaults(options)); return; } super( Object.assign( {}, defaults, options, options.userAgent && defaults.userAgent ? { userAgent: `${options.userAgent} ${defaults.userAgent}` } : null ) ); } }; return OctokitWithDefaults; } static { this.plugins = []; } /** * Attach a plugin (or many) to your Octokit instance. * * @example * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) */ static plugin(...newPlugins) { const currentPlugins = this.plugins; const NewOctokit = class extends this { static { this.plugins = currentPlugins.concat( newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) ); } }; return NewOctokit; } constructor(options = {}) { const hook = new import_before_after_hook.Collection(); const requestDefaults = { baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, headers: {}, request: Object.assign({}, options.request, { // @ts-ignore internal usage only, no need to type hook: hook.bind(null, "request") }), mediaType: { previews: [], format: "" } }; requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; if (options.baseUrl) { requestDefaults.baseUrl = options.baseUrl; } if (options.previews) { requestDefaults.mediaType.previews = options.previews; } if (options.timeZone) { requestDefaults.headers["time-zone"] = options.timeZone; } this.request = import_request.request.defaults(requestDefaults); this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); this.log = Object.assign( { debug: noop, info: noop, warn: consoleWarn, error: consoleError }, options.log ); this.hook = hook; if (!options.authStrategy) { if (!options.auth) { this.auth = async () => ({ type: "unauthenticated" }); } else { const auth = (0, import_auth_token.createTokenAuth)(options.auth); hook.wrap("request", auth.hook); this.auth = auth; } } else { const { authStrategy, ...otherOptions } = options; const auth = authStrategy( Object.assign( { request: this.request, log: this.log, // we pass the current octokit instance as well as its constructor options // to allow for authentication strategies that return a new octokit instance // that shares the same internal state as the current one. The original // requirement for this was the "event-octokit" authentication strategy // of https://github.com/probot/octokit-auth-probot. octokit: this, octokitOptions: otherOptions }, options.auth ) ); hook.wrap("request", auth.hook); this.auth = auth; } const classConstructor = this.constructor; for (let i = 0; i < classConstructor.plugins.length; ++i) { Object.assign(this, classConstructor.plugins[i](this, options)); } } }; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 54471: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { endpoint: () => endpoint }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/defaults.js var import_universal_user_agent = __nccwpck_require__(33843); // pkg/dist-src/version.js var VERSION = "9.0.6"; // pkg/dist-src/defaults.js var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; var DEFAULTS = { method: "GET", baseUrl: "https://api.github.com", headers: { accept: "application/vnd.github.v3+json", "user-agent": userAgent }, mediaType: { format: "" } }; // pkg/dist-src/util/lowercase-keys.js function lowercaseKeys(object) { if (!object) { return {}; } return Object.keys(object).reduce((newObj, key) => { newObj[key.toLowerCase()] = object[key]; return newObj; }, {}); } // pkg/dist-src/util/is-plain-object.js function isPlainObject(value) { if (typeof value !== "object" || value === null) return false; if (Object.prototype.toString.call(value) !== "[object Object]") return false; const proto = Object.getPrototypeOf(value); if (proto === null) return true; const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } // pkg/dist-src/util/merge-deep.js function mergeDeep(defaults, options) { const result = Object.assign({}, defaults); Object.keys(options).forEach((key) => { if (isPlainObject(options[key])) { if (!(key in defaults)) Object.assign(result, { [key]: options[key] }); else result[key] = mergeDeep(defaults[key], options[key]); } else { Object.assign(result, { [key]: options[key] }); } }); return result; } // pkg/dist-src/util/remove-undefined-properties.js function removeUndefinedProperties(obj) { for (const key in obj) { if (obj[key] === void 0) { delete obj[key]; } } return obj; } // pkg/dist-src/merge.js function merge(defaults, route, options) { if (typeof route === "string") { let [method, url] = route.split(" "); options = Object.assign(url ? { method, url } : { url: method }, options); } else { options = Object.assign({}, route); } options.headers = lowercaseKeys(options.headers); removeUndefinedProperties(options); removeUndefinedProperties(options.headers); const mergedOptions = mergeDeep(defaults || {}, options); if (options.url === "/graphql") { if (defaults && defaults.mediaType.previews?.length) { mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( (preview) => !mergedOptions.mediaType.previews.includes(preview) ).concat(mergedOptions.mediaType.previews); } mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); } return mergedOptions; } // pkg/dist-src/util/add-query-parameters.js function addQueryParameters(url, parameters) { const separator = /\?/.test(url) ? "&" : "?"; const names = Object.keys(parameters); if (names.length === 0) { return url; } return url + separator + names.map((name) => { if (name === "q") { return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } return `${name}=${encodeURIComponent(parameters[name])}`; }).join("&"); } // pkg/dist-src/util/extract-url-variable-names.js var urlVariableRegex = /\{[^{}}]+\}/g; function removeNonChars(variableName) { return variableName.replace(/(?:^\W+)|(?:(? a.concat(b), []); } // pkg/dist-src/util/omit.js function omit(object, keysToOmit) { const result = { __proto__: null }; for (const key of Object.keys(object)) { if (keysToOmit.indexOf(key) === -1) { result[key] = object[key]; } } return result; } // pkg/dist-src/util/url-template.js function encodeReserved(str) { return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { if (!/%[0-9A-Fa-f]/.test(part)) { part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); } return part; }).join(""); } function encodeUnreserved(str) { return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { return "%" + c.charCodeAt(0).toString(16).toUpperCase(); }); } function encodeValue(operator, value, key) { value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); if (key) { return encodeUnreserved(key) + "=" + value; } else { return value; } } function isDefined(value) { return value !== void 0 && value !== null; } function isKeyOperator(operator) { return operator === ";" || operator === "&" || operator === "?"; } function getValues(context, operator, key, modifier) { var value = context[key], result = []; if (isDefined(value) && value !== "") { if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { value = value.toString(); if (modifier && modifier !== "*") { value = value.substring(0, parseInt(modifier, 10)); } result.push( encodeValue(operator, value, isKeyOperator(operator) ? key : "") ); } else { if (modifier === "*") { if (Array.isArray(value)) { value.filter(isDefined).forEach(function(value2) { result.push( encodeValue(operator, value2, isKeyOperator(operator) ? key : "") ); }); } else { Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { result.push(encodeValue(operator, value[k], k)); } }); } } else { const tmp = []; if (Array.isArray(value)) { value.filter(isDefined).forEach(function(value2) { tmp.push(encodeValue(operator, value2)); }); } else { Object.keys(value).forEach(function(k) { if (isDefined(value[k])) { tmp.push(encodeUnreserved(k)); tmp.push(encodeValue(operator, value[k].toString())); } }); } if (isKeyOperator(operator)) { result.push(encodeUnreserved(key) + "=" + tmp.join(",")); } else if (tmp.length !== 0) { result.push(tmp.join(",")); } } } } else { if (operator === ";") { if (isDefined(value)) { result.push(encodeUnreserved(key)); } } else if (value === "" && (operator === "&" || operator === "?")) { result.push(encodeUnreserved(key) + "="); } else if (value === "") { result.push(""); } } return result; } function parseUrl(template) { return { expand: expand.bind(null, template) }; } function expand(template, context) { var operators = ["+", "#", ".", "/", ";", "?", "&"]; template = template.replace( /\{([^\{\}]+)\}|([^\{\}]+)/g, function(_, expression, literal) { if (expression) { let operator = ""; const values = []; if (operators.indexOf(expression.charAt(0)) !== -1) { operator = expression.charAt(0); expression = expression.substr(1); } expression.split(/,/g).forEach(function(variable) { var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); }); if (operator && operator !== "+") { var separator = ","; if (operator === "?") { separator = "&"; } else if (operator !== "#") { separator = operator; } return (values.length !== 0 ? operator : "") + values.join(separator); } else { return values.join(","); } } else { return encodeReserved(literal); } } ); if (template === "/") { return template; } else { return template.replace(/\/$/, ""); } } // pkg/dist-src/parse.js function parse(options) { let method = options.method.toUpperCase(); let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); let headers = Object.assign({}, options.headers); let body; let parameters = omit(options, [ "method", "baseUrl", "url", "headers", "request", "mediaType" ]); const urlVariableNames = extractUrlVariableNames(url); url = parseUrl(url).expand(parameters); if (!/^http/.test(url)) { url = options.baseUrl + url; } const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); const remainingParameters = omit(parameters, omittedParameters); const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); if (!isBinaryRequest) { if (options.mediaType.format) { headers.accept = headers.accept.split(/,/).map( (format) => format.replace( /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}` ) ).join(","); } if (url.endsWith("/graphql")) { if (options.mediaType.previews?.length) { const previewsFromAcceptHeader = headers.accept.match(/(? { const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; return `application/vnd.github.${preview}-preview${format}`; }).join(","); } } } if (["GET", "HEAD"].includes(method)) { url = addQueryParameters(url, remainingParameters); } else { if ("data" in remainingParameters) { body = remainingParameters.data; } else { if (Object.keys(remainingParameters).length) { body = remainingParameters; } } } if (!headers["content-type"] && typeof body !== "undefined") { headers["content-type"] = "application/json; charset=utf-8"; } if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { body = ""; } return Object.assign( { method, url, headers }, typeof body !== "undefined" ? { body } : null, options.request ? { request: options.request } : null ); } // pkg/dist-src/endpoint-with-defaults.js function endpointWithDefaults(defaults, route, options) { return parse(merge(defaults, route, options)); } // pkg/dist-src/with-defaults.js function withDefaults(oldDefaults, newDefaults) { const DEFAULTS2 = merge(oldDefaults, newDefaults); const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); return Object.assign(endpoint2, { DEFAULTS: DEFAULTS2, defaults: withDefaults.bind(null, DEFAULTS2), merge: merge.bind(null, DEFAULTS2), parse }); } // pkg/dist-src/index.js var endpoint = withDefaults(null, DEFAULTS); // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 70007: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var index_exports = {}; __export(index_exports, { GraphqlResponseError: () => GraphqlResponseError, graphql: () => graphql2, withCustomRequest: () => withCustomRequest }); module.exports = __toCommonJS(index_exports); var import_request3 = __nccwpck_require__(66255); var import_universal_user_agent = __nccwpck_require__(33843); // pkg/dist-src/version.js var VERSION = "7.1.1"; // pkg/dist-src/with-defaults.js var import_request2 = __nccwpck_require__(66255); // pkg/dist-src/graphql.js var import_request = __nccwpck_require__(66255); // pkg/dist-src/error.js function _buildMessageForResponseErrors(data) { return `Request failed due to following response errors: ` + data.errors.map((e) => ` - ${e.message}`).join("\n"); } var GraphqlResponseError = class extends Error { constructor(request2, headers, response) { super(_buildMessageForResponseErrors(response)); this.request = request2; this.headers = headers; this.response = response; this.name = "GraphqlResponseError"; this.errors = response.errors; this.data = response.data; if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } } }; // pkg/dist-src/graphql.js var NON_VARIABLE_OPTIONS = [ "method", "baseUrl", "url", "headers", "request", "query", "mediaType" ]; var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; function graphql(request2, query, options) { if (options) { if (typeof query === "string" && "query" in options) { return Promise.reject( new Error(`[@octokit/graphql] "query" cannot be used as variable name`) ); } for (const key in options) { if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue; return Promise.reject( new Error( `[@octokit/graphql] "${key}" cannot be used as variable name` ) ); } } const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; const requestOptions = Object.keys( parsedOptions ).reduce((result, key) => { if (NON_VARIABLE_OPTIONS.includes(key)) { result[key] = parsedOptions[key]; return result; } if (!result.variables) { result.variables = {}; } result.variables[key] = parsedOptions[key]; return result; }, {}); const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); } return request2(requestOptions).then((response) => { if (response.data.errors) { const headers = {}; for (const key of Object.keys(response.headers)) { headers[key] = response.headers[key]; } throw new GraphqlResponseError( requestOptions, headers, response.data ); } return response.data.data; }); } // pkg/dist-src/with-defaults.js function withDefaults(request2, newDefaults) { const newRequest = request2.defaults(newDefaults); const newApi = (query, options) => { return graphql(newRequest, query, options); }; return Object.assign(newApi, { defaults: withDefaults.bind(null, newRequest), endpoint: newRequest.endpoint }); } // pkg/dist-src/index.js var graphql2 = withDefaults(import_request3.request, { headers: { "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` }, method: "POST", url: "/graphql" }); function withCustomRequest(customRequest) { return withDefaults(customRequest, { method: "POST", url: "/graphql" }); } // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 38082: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { composePaginateRest: () => composePaginateRest, isPaginatingEndpoint: () => isPaginatingEndpoint, paginateRest: () => paginateRest, paginatingEndpoints: () => paginatingEndpoints }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/version.js var VERSION = "9.2.2"; // pkg/dist-src/normalize-paginated-list-response.js function normalizePaginatedListResponse(response) { if (!response.data) { return { ...response, data: [] }; } const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); if (!responseNeedsNormalization) return response; const incompleteResults = response.data.incomplete_results; const repositorySelection = response.data.repository_selection; const totalCount = response.data.total_count; delete response.data.incomplete_results; delete response.data.repository_selection; delete response.data.total_count; const namespaceKey = Object.keys(response.data)[0]; const data = response.data[namespaceKey]; response.data = data; if (typeof incompleteResults !== "undefined") { response.data.incomplete_results = incompleteResults; } if (typeof repositorySelection !== "undefined") { response.data.repository_selection = repositorySelection; } response.data.total_count = totalCount; return response; } // pkg/dist-src/iterator.js function iterator(octokit, route, parameters) { const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); const requestMethod = typeof route === "function" ? route : octokit.request; const method = options.method; const headers = options.headers; let url = options.url; return { [Symbol.asyncIterator]: () => ({ async next() { if (!url) return { done: true }; try { const response = await requestMethod({ method, url, headers }); const normalizedResponse = normalizePaginatedListResponse(response); url = ((normalizedResponse.headers.link || "").match( /<([^<>]+)>;\s*rel="next"/ ) || [])[1]; return { value: normalizedResponse }; } catch (error) { if (error.status !== 409) throw error; url = ""; return { value: { status: 200, headers: {}, data: [] } }; } } }) }; } // pkg/dist-src/paginate.js function paginate(octokit, route, parameters, mapFn) { if (typeof parameters === "function") { mapFn = parameters; parameters = void 0; } return gather( octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn ); } function gather(octokit, results, iterator2, mapFn) { return iterator2.next().then((result) => { if (result.done) { return results; } let earlyExit = false; function done() { earlyExit = true; } results = results.concat( mapFn ? mapFn(result.value, done) : result.value.data ); if (earlyExit) { return results; } return gather(octokit, results, iterator2, mapFn); }); } // pkg/dist-src/compose-paginate.js var composePaginateRest = Object.assign(paginate, { iterator }); // pkg/dist-src/generated/paginating-endpoints.js var paginatingEndpoints = [ "GET /advisories", "GET /app/hook/deliveries", "GET /app/installation-requests", "GET /app/installations", "GET /assignments/{assignment_id}/accepted_assignments", "GET /classrooms", "GET /classrooms/{classroom_id}/assignments", "GET /enterprises/{enterprise}/dependabot/alerts", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/actions/variables", "GET /orgs/{org}/actions/variables/{name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/codespaces/secrets", "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", "GET /orgs/{org}/copilot/billing/seats", "GET /orgs/{org}/dependabot/alerts", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/members/{username}/codespaces", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/organization-roles/{role_id}/teams", "GET /orgs/{org}/organization-roles/{role_id}/users", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/personal-access-token-requests", "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", "GET /orgs/{org}/personal-access-tokens", "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", "GET /orgs/{org}/projects", "GET /orgs/{org}/properties/values", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/rulesets", "GET /orgs/{org}/rulesets/rule-suites", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/security-advisories", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/organization-secrets", "GET /repos/{owner}/{repo}/actions/organization-variables", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/variables", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/activity", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/alerts", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/rules/branches/{branch}", "GET /repos/{owner}/{repo}/rulesets", "GET /repos/{owner}/{repo}/rulesets/rule-suites", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/security-advisories", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /repositories/{repository_id}/environments/{environment_name}/variables", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/social_accounts", "GET /user/ssh_signing_keys", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/social_accounts", "GET /users/{username}/ssh_signing_keys", "GET /users/{username}/starred", "GET /users/{username}/subscriptions" ]; // pkg/dist-src/paginating-endpoints.js function isPaginatingEndpoint(arg) { if (typeof arg === "string") { return paginatingEndpoints.includes(arg); } else { return false; } } // pkg/dist-src/index.js function paginateRest(octokit) { return { paginate: Object.assign(paginate.bind(null, octokit), { iterator: iterator.bind(null, octokit) }) }; } paginateRest.VERSION = VERSION; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 84935: /***/ ((module) => { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { legacyRestEndpointMethods: () => legacyRestEndpointMethods, restEndpointMethods: () => restEndpointMethods }); module.exports = __toCommonJS(dist_src_exports); // pkg/dist-src/version.js var VERSION = "10.4.1"; // pkg/dist-src/generated/endpoints.js var Endpoints = { actions: { addCustomLabelsToSelfHostedRunnerForOrg: [ "POST /orgs/{org}/actions/runners/{runner_id}/labels" ], addCustomLabelsToSelfHostedRunnerForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ], addSelectedRepoToOrgVariable: [ "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" ], approveWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" ], cancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" ], createEnvironmentVariable: [ "POST /repositories/{repository_id}/environments/{environment_name}/variables" ], createOrUpdateEnvironmentSecret: [ "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" ], createOrgVariable: ["POST /orgs/{org}/actions/variables"], createRegistrationTokenForOrg: [ "POST /orgs/{org}/actions/runners/registration-token" ], createRegistrationTokenForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/registration-token" ], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], createRemoveTokenForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/remove-token" ], createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], createWorkflowDispatch: [ "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" ], deleteActionsCacheById: [ "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" ], deleteActionsCacheByKey: [ "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" ], deleteArtifact: [ "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" ], deleteEnvironmentSecret: [ "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], deleteEnvironmentVariable: [ "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" ], deleteRepoVariable: [ "DELETE /repos/{owner}/{repo}/actions/variables/{name}" ], deleteSelfHostedRunnerFromOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}" ], deleteSelfHostedRunnerFromRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" ], deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], deleteWorkflowRunLogs: [ "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ], disableSelectedRepositoryGithubActionsOrganization: [ "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" ], disableWorkflow: [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" ], downloadArtifact: [ "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" ], downloadJobLogsForWorkflowRun: [ "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" ], downloadWorkflowRunAttemptLogs: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" ], downloadWorkflowRunLogs: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" ], enableSelectedRepositoryGithubActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" ], enableWorkflow: [ "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" ], forceCancelWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" ], generateRunnerJitconfigForOrg: [ "POST /orgs/{org}/actions/runners/generate-jitconfig" ], generateRunnerJitconfigForRepo: [ "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" ], getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], getActionsCacheUsageByRepoForOrg: [ "GET /orgs/{org}/actions/cache/usage-by-repository" ], getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], getAllowedActionsOrganization: [ "GET /orgs/{org}/actions/permissions/selected-actions" ], getAllowedActionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" ], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], getCustomOidcSubClaimForRepo: [ "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" ], getEnvironmentPublicKey: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" ], getEnvironmentSecret: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" ], getEnvironmentVariable: [ "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], getGithubActionsDefaultWorkflowPermissionsOrganization: [ "GET /orgs/{org}/actions/permissions/workflow" ], getGithubActionsDefaultWorkflowPermissionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/workflow" ], getGithubActionsPermissionsOrganization: [ "GET /orgs/{org}/actions/permissions" ], getGithubActionsPermissionsRepository: [ "GET /repos/{owner}/{repo}/actions/permissions" ], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], getPendingDeploymentsForRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" ], getRepoPermissions: [ "GET /repos/{owner}/{repo}/actions/permissions", {}, { renamed: ["actions", "getGithubActionsPermissionsRepository"] } ], getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], getReviewsForRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" ], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], getSelfHostedRunnerForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" ], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], getWorkflowAccessToRepository: [ "GET /repos/{owner}/{repo}/actions/permissions/access" ], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], getWorkflowRunAttempt: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" ], getWorkflowRunUsage: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" ], getWorkflowUsage: [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" ], listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], listEnvironmentSecrets: [ "GET /repositories/{repository_id}/environments/{environment_name}/secrets" ], listEnvironmentVariables: [ "GET /repositories/{repository_id}/environments/{environment_name}/variables" ], listJobsForWorkflowRun: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" ], listJobsForWorkflowRunAttempt: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" ], listLabelsForSelfHostedRunnerForOrg: [ "GET /orgs/{org}/actions/runners/{runner_id}/labels" ], listLabelsForSelfHostedRunnerForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], listOrgVariables: ["GET /orgs/{org}/actions/variables"], listRepoOrganizationSecrets: [ "GET /repos/{owner}/{repo}/actions/organization-secrets" ], listRepoOrganizationVariables: [ "GET /repos/{owner}/{repo}/actions/organization-variables" ], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], listRunnerApplicationsForRepo: [ "GET /repos/{owner}/{repo}/actions/runners/downloads" ], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" ], listSelectedReposForOrgVariable: [ "GET /orgs/{org}/actions/variables/{name}/repositories" ], listSelectedRepositoriesEnabledGithubActionsOrganization: [ "GET /orgs/{org}/actions/permissions/repositories" ], listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], listWorkflowRunArtifacts: [ "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" ], listWorkflowRuns: [ "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" ], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], reRunJobForWorkflowRun: [ "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" ], reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], reRunWorkflowFailedJobs: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" ], removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" ], removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], removeCustomLabelFromSelfHostedRunnerForOrg: [ "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" ], removeCustomLabelFromSelfHostedRunnerForRepo: [ "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" ], removeSelectedRepoFromOrgVariable: [ "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" ], reviewCustomGatesForRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" ], reviewPendingDeploymentsForRun: [ "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" ], setAllowedActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/selected-actions" ], setAllowedActionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" ], setCustomLabelsForSelfHostedRunnerForOrg: [ "PUT /orgs/{org}/actions/runners/{runner_id}/labels" ], setCustomLabelsForSelfHostedRunnerForRepo: [ "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" ], setCustomOidcSubClaimForRepo: [ "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" ], setGithubActionsDefaultWorkflowPermissionsOrganization: [ "PUT /orgs/{org}/actions/permissions/workflow" ], setGithubActionsDefaultWorkflowPermissionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/workflow" ], setGithubActionsPermissionsOrganization: [ "PUT /orgs/{org}/actions/permissions" ], setGithubActionsPermissionsRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" ], setSelectedReposForOrgVariable: [ "PUT /orgs/{org}/actions/variables/{name}/repositories" ], setSelectedRepositoriesEnabledGithubActionsOrganization: [ "PUT /orgs/{org}/actions/permissions/repositories" ], setWorkflowAccessToRepository: [ "PUT /repos/{owner}/{repo}/actions/permissions/access" ], updateEnvironmentVariable: [ "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" ], updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], updateRepoVariable: [ "PATCH /repos/{owner}/{repo}/actions/variables/{name}" ] }, activity: { checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], deleteThreadSubscription: [ "DELETE /notifications/threads/{thread_id}/subscription" ], getFeeds: ["GET /feeds"], getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], getThread: ["GET /notifications/threads/{thread_id}"], getThreadSubscriptionForAuthenticatedUser: [ "GET /notifications/threads/{thread_id}/subscription" ], listEventsForAuthenticatedUser: ["GET /users/{username}/events"], listNotificationsForAuthenticatedUser: ["GET /notifications"], listOrgEventsForAuthenticatedUser: [ "GET /users/{username}/events/orgs/{org}" ], listPublicEvents: ["GET /events"], listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], listPublicEventsForUser: ["GET /users/{username}/events/public"], listPublicOrgEvents: ["GET /orgs/{org}/events"], listReceivedEventsForUser: ["GET /users/{username}/received_events"], listReceivedPublicEventsForUser: [ "GET /users/{username}/received_events/public" ], listRepoEvents: ["GET /repos/{owner}/{repo}/events"], listRepoNotificationsForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/notifications" ], listReposStarredByAuthenticatedUser: ["GET /user/starred"], listReposStarredByUser: ["GET /users/{username}/starred"], listReposWatchedByUser: ["GET /users/{username}/subscriptions"], listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], markNotificationsAsRead: ["PUT /notifications"], markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], setThreadSubscription: [ "PUT /notifications/threads/{thread_id}/subscription" ], starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] }, apps: { addRepoToInstallation: [ "PUT /user/installations/{installation_id}/repositories/{repository_id}", {}, { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } ], addRepoToInstallationForAuthenticatedUser: [ "PUT /user/installations/{installation_id}/repositories/{repository_id}" ], checkToken: ["POST /applications/{client_id}/token"], createFromManifest: ["POST /app-manifests/{code}/conversions"], createInstallationAccessToken: [ "POST /app/installations/{installation_id}/access_tokens" ], deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteInstallation: ["DELETE /app/installations/{installation_id}"], deleteToken: ["DELETE /applications/{client_id}/token"], getAuthenticated: ["GET /app"], getBySlug: ["GET /apps/{app_slug}"], getInstallation: ["GET /app/installations/{installation_id}"], getOrgInstallation: ["GET /orgs/{org}/installation"], getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], getSubscriptionPlanForAccount: [ "GET /marketplace_listing/accounts/{account_id}" ], getSubscriptionPlanForAccountStubbed: [ "GET /marketplace_listing/stubbed/accounts/{account_id}" ], getUserInstallation: ["GET /users/{username}/installation"], getWebhookConfigForApp: ["GET /app/hook/config"], getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], listAccountsForPlanStubbed: [ "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" ], listInstallationReposForAuthenticatedUser: [ "GET /user/installations/{installation_id}/repositories" ], listInstallationRequestsForAuthenticatedApp: [ "GET /app/installation-requests" ], listInstallations: ["GET /app/installations"], listInstallationsForAuthenticatedUser: ["GET /user/installations"], listPlans: ["GET /marketplace_listing/plans"], listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], listReposAccessibleToInstallation: ["GET /installation/repositories"], listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], listSubscriptionsForAuthenticatedUserStubbed: [ "GET /user/marketplace_purchases/stubbed" ], listWebhookDeliveries: ["GET /app/hook/deliveries"], redeliverWebhookDelivery: [ "POST /app/hook/deliveries/{delivery_id}/attempts" ], removeRepoFromInstallation: [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}", {}, { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } ], removeRepoFromInstallationForAuthenticatedUser: [ "DELETE /user/installations/{installation_id}/repositories/{repository_id}" ], resetToken: ["PATCH /applications/{client_id}/token"], revokeInstallationAccessToken: ["DELETE /installation/token"], scopeToken: ["POST /applications/{client_id}/token/scoped"], suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], unsuspendInstallation: [ "DELETE /app/installations/{installation_id}/suspended" ], updateWebhookConfigForApp: ["PATCH /app/hook/config"] }, billing: { getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], getGithubActionsBillingUser: [ "GET /users/{username}/settings/billing/actions" ], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], getGithubPackagesBillingUser: [ "GET /users/{username}/settings/billing/packages" ], getSharedStorageBillingOrg: [ "GET /orgs/{org}/settings/billing/shared-storage" ], getSharedStorageBillingUser: [ "GET /users/{username}/settings/billing/shared-storage" ] }, checks: { create: ["POST /repos/{owner}/{repo}/check-runs"], createSuite: ["POST /repos/{owner}/{repo}/check-suites"], get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], listAnnotations: [ "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" ], listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], listForSuite: [ "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" ], listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], rerequestRun: [ "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" ], rerequestSuite: [ "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" ], setSuitesPreferences: [ "PATCH /repos/{owner}/{repo}/check-suites/preferences" ], update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] }, codeScanning: { deleteAnalysis: [ "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" ], getAlert: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", {}, { renamedParameters: { alert_id: "alert_number" } } ], getAnalysis: [ "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" ], getCodeqlDatabase: [ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" ], getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], listAlertInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" ], listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], listAlertsInstances: [ "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { renamed: ["codeScanning", "listAlertInstances"] } ], listCodeqlDatabases: [ "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" ], listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], updateAlert: [ "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" ], updateDefaultSetup: [ "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" ], uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] }, codesOfConduct: { getAllCodesOfConduct: ["GET /codes_of_conduct"], getConductCode: ["GET /codes_of_conduct/{key}"] }, codespaces: { addRepositoryForSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], checkPermissionsForDevcontainer: [ "GET /repos/{owner}/{repo}/codespaces/permissions_check" ], codespaceMachinesForAuthenticatedUser: [ "GET /user/codespaces/{codespace_name}/machines" ], createForAuthenticatedUser: ["POST /user/codespaces"], createOrUpdateOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}" ], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], createOrUpdateSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}" ], createWithPrForAuthenticatedUser: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" ], createWithRepoForAuthenticatedUser: [ "POST /repos/{owner}/{repo}/codespaces" ], deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], deleteFromOrganization: [ "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], deleteSecretForAuthenticatedUser: [ "DELETE /user/codespaces/secrets/{secret_name}" ], exportForAuthenticatedUser: [ "POST /user/codespaces/{codespace_name}/exports" ], getCodespacesForUserInOrg: [ "GET /orgs/{org}/members/{username}/codespaces" ], getExportDetailsForAuthenticatedUser: [ "GET /user/codespaces/{codespace_name}/exports/{export_id}" ], getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], getPublicKeyForAuthenticatedUser: [ "GET /user/codespaces/secrets/public-key" ], getRepoPublicKey: [ "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" ], getRepoSecret: [ "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" ], getSecretForAuthenticatedUser: [ "GET /user/codespaces/secrets/{secret_name}" ], listDevcontainersInRepositoryForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/devcontainers" ], listForAuthenticatedUser: ["GET /user/codespaces"], listInOrganization: [ "GET /orgs/{org}/codespaces", {}, { renamedParameters: { org_id: "org" } } ], listInRepositoryForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces" ], listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], listRepositoriesForSecretForAuthenticatedUser: [ "GET /user/codespaces/secrets/{secret_name}/repositories" ], listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ], preFlightWithRepoForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/new" ], publishForAuthenticatedUser: [ "POST /user/codespaces/{codespace_name}/publish" ], removeRepositoryForSecretForAuthenticatedUser: [ "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" ], repoMachinesForAuthenticatedUser: [ "GET /repos/{owner}/{repo}/codespaces/machines" ], setRepositoriesForSecretForAuthenticatedUser: [ "PUT /user/codespaces/secrets/{secret_name}/repositories" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" ], startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], stopInOrganization: [ "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" ], updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] }, copilot: { addCopilotSeatsForTeams: [ "POST /orgs/{org}/copilot/billing/selected_teams" ], addCopilotSeatsForUsers: [ "POST /orgs/{org}/copilot/billing/selected_users" ], cancelCopilotSeatAssignmentForTeams: [ "DELETE /orgs/{org}/copilot/billing/selected_teams" ], cancelCopilotSeatAssignmentForUsers: [ "DELETE /orgs/{org}/copilot/billing/selected_users" ], getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], getCopilotSeatDetailsForUser: [ "GET /orgs/{org}/members/{username}/copilot" ], listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] }, dependabot: { addSelectedRepoToOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" ], createOrUpdateOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}" ], createOrUpdateRepoSecret: [ "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], deleteRepoSecret: [ "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], getRepoPublicKey: [ "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" ], getRepoSecret: [ "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" ], listAlertsForEnterprise: [ "GET /enterprises/{enterprise}/dependabot/alerts" ], listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], listSelectedReposForOrgSecret: [ "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ], removeSelectedRepoFromOrgSecret: [ "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" ], setSelectedReposForOrgSecret: [ "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" ], updateAlert: [ "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" ] }, dependencyGraph: { createRepositorySnapshot: [ "POST /repos/{owner}/{repo}/dependency-graph/snapshots" ], diffRange: [ "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" ], exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] }, emojis: { get: ["GET /emojis"] }, gists: { checkIsStarred: ["GET /gists/{gist_id}/star"], create: ["POST /gists"], createComment: ["POST /gists/{gist_id}/comments"], delete: ["DELETE /gists/{gist_id}"], deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], fork: ["POST /gists/{gist_id}/forks"], get: ["GET /gists/{gist_id}"], getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], getRevision: ["GET /gists/{gist_id}/{sha}"], list: ["GET /gists"], listComments: ["GET /gists/{gist_id}/comments"], listCommits: ["GET /gists/{gist_id}/commits"], listForUser: ["GET /users/{username}/gists"], listForks: ["GET /gists/{gist_id}/forks"], listPublic: ["GET /gists/public"], listStarred: ["GET /gists/starred"], star: ["PUT /gists/{gist_id}/star"], unstar: ["DELETE /gists/{gist_id}/star"], update: ["PATCH /gists/{gist_id}"], updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] }, git: { createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], createCommit: ["POST /repos/{owner}/{repo}/git/commits"], createRef: ["POST /repos/{owner}/{repo}/git/refs"], createTag: ["POST /repos/{owner}/{repo}/git/tags"], createTree: ["POST /repos/{owner}/{repo}/git/trees"], deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] }, gitignore: { getAllTemplates: ["GET /gitignore/templates"], getTemplate: ["GET /gitignore/templates/{name}"] }, interactions: { getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], getRestrictionsForYourPublicRepos: [ "GET /user/interaction-limits", {}, { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } ], removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], removeRestrictionsForRepo: [ "DELETE /repos/{owner}/{repo}/interaction-limits" ], removeRestrictionsForYourPublicRepos: [ "DELETE /user/interaction-limits", {}, { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } ], setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], setRestrictionsForYourPublicRepos: [ "PUT /user/interaction-limits", {}, { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } ] }, issues: { addAssignees: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" ], addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], checkUserCanBeAssignedToIssue: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" ], create: ["POST /repos/{owner}/{repo}/issues"], createComment: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" ], createLabel: ["POST /repos/{owner}/{repo}/labels"], createMilestone: ["POST /repos/{owner}/{repo}/milestones"], deleteComment: [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" ], deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], deleteMilestone: [ "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" ], get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], list: ["GET /issues"], listAssignees: ["GET /repos/{owner}/{repo}/assignees"], listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], listEventsForTimeline: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" ], listForAuthenticatedUser: ["GET /user/issues"], listForOrg: ["GET /orgs/{org}/issues"], listForRepo: ["GET /repos/{owner}/{repo}/issues"], listLabelsForMilestone: [ "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" ], listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], listLabelsOnIssue: [ "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" ], listMilestones: ["GET /repos/{owner}/{repo}/milestones"], lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], removeAllLabels: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" ], removeAssignees: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" ], removeLabel: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" ], setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], updateMilestone: [ "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" ] }, licenses: { get: ["GET /licenses/{license}"], getAllCommonlyUsed: ["GET /licenses"], getForRepo: ["GET /repos/{owner}/{repo}/license"] }, markdown: { render: ["POST /markdown"], renderRaw: [ "POST /markdown/raw", { headers: { "content-type": "text/plain; charset=utf-8" } } ] }, meta: { get: ["GET /meta"], getAllVersions: ["GET /versions"], getOctocat: ["GET /octocat"], getZen: ["GET /zen"], root: ["GET /"] }, migrations: { cancelImport: [ "DELETE /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" } ], deleteArchiveForAuthenticatedUser: [ "DELETE /user/migrations/{migration_id}/archive" ], deleteArchiveForOrg: [ "DELETE /orgs/{org}/migrations/{migration_id}/archive" ], downloadArchiveForOrg: [ "GET /orgs/{org}/migrations/{migration_id}/archive" ], getArchiveForAuthenticatedUser: [ "GET /user/migrations/{migration_id}/archive" ], getCommitAuthors: [ "GET /repos/{owner}/{repo}/import/authors", {}, { deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" } ], getImportStatus: [ "GET /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" } ], getLargeFiles: [ "GET /repos/{owner}/{repo}/import/large_files", {}, { deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" } ], getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], listForAuthenticatedUser: ["GET /user/migrations"], listForOrg: ["GET /orgs/{org}/migrations"], listReposForAuthenticatedUser: [ "GET /user/migrations/{migration_id}/repositories" ], listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], listReposForUser: [ "GET /user/migrations/{migration_id}/repositories", {}, { renamed: ["migrations", "listReposForAuthenticatedUser"] } ], mapCommitAuthor: [ "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", {}, { deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" } ], setLfsPreference: [ "PATCH /repos/{owner}/{repo}/import/lfs", {}, { deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" } ], startForAuthenticatedUser: ["POST /user/migrations"], startForOrg: ["POST /orgs/{org}/migrations"], startImport: [ "PUT /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" } ], unlockRepoForAuthenticatedUser: [ "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" ], unlockRepoForOrg: [ "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" ], updateImport: [ "PATCH /repos/{owner}/{repo}/import", {}, { deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" } ] }, oidc: { getOidcCustomSubTemplateForOrg: [ "GET /orgs/{org}/actions/oidc/customization/sub" ], updateOidcCustomSubTemplateForOrg: [ "PUT /orgs/{org}/actions/oidc/customization/sub" ] }, orgs: { addSecurityManagerTeam: [ "PUT /orgs/{org}/security-managers/teams/{team_slug}" ], assignTeamToOrgRole: [ "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" ], assignUserToOrgRole: [ "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" ], blockUser: ["PUT /orgs/{org}/blocks/{username}"], cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], convertMemberToOutsideCollaborator: [ "PUT /orgs/{org}/outside_collaborators/{username}" ], createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], createInvitation: ["POST /orgs/{org}/invitations"], createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], createOrUpdateCustomPropertiesValuesForRepos: [ "PATCH /orgs/{org}/properties/values" ], createOrUpdateCustomProperty: [ "PUT /orgs/{org}/properties/schema/{custom_property_name}" ], createWebhook: ["POST /orgs/{org}/hooks"], delete: ["DELETE /orgs/{org}"], deleteCustomOrganizationRole: [ "DELETE /orgs/{org}/organization-roles/{role_id}" ], deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], enableOrDisableSecurityProductOnAllOrgRepos: [ "POST /orgs/{org}/{security_product}/{enablement}" ], get: ["GET /orgs/{org}"], getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], getCustomProperty: [ "GET /orgs/{org}/properties/schema/{custom_property_name}" ], getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], getWebhookDelivery: [ "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" ], list: ["GET /organizations"], listAppInstallations: ["GET /orgs/{org}/installations"], listBlockedUsers: ["GET /orgs/{org}/blocks"], listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listForAuthenticatedUser: ["GET /user/orgs"], listForUser: ["GET /users/{username}/orgs"], listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], listMembers: ["GET /orgs/{org}/members"], listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], listOrgRoles: ["GET /orgs/{org}/organization-roles"], listOrganizationFineGrainedPermissions: [ "GET /orgs/{org}/organization-fine-grained-permissions" ], listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], listPatGrantRepositories: [ "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" ], listPatGrantRequestRepositories: [ "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" ], listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], listPendingInvitations: ["GET /orgs/{org}/invitations"], listPublicMembers: ["GET /orgs/{org}/public_members"], listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], listWebhooks: ["GET /orgs/{org}/hooks"], patchCustomOrganizationRole: [ "PATCH /orgs/{org}/organization-roles/{role_id}" ], pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], redeliverWebhookDelivery: [ "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" ], removeCustomProperty: [ "DELETE /orgs/{org}/properties/schema/{custom_property_name}" ], removeMember: ["DELETE /orgs/{org}/members/{username}"], removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], removeOutsideCollaborator: [ "DELETE /orgs/{org}/outside_collaborators/{username}" ], removePublicMembershipForAuthenticatedUser: [ "DELETE /orgs/{org}/public_members/{username}" ], removeSecurityManagerTeam: [ "DELETE /orgs/{org}/security-managers/teams/{team_slug}" ], reviewPatGrantRequest: [ "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" ], reviewPatGrantRequestsInBulk: [ "POST /orgs/{org}/personal-access-token-requests" ], revokeAllOrgRolesTeam: [ "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" ], revokeAllOrgRolesUser: [ "DELETE /orgs/{org}/organization-roles/users/{username}" ], revokeOrgRoleTeam: [ "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" ], revokeOrgRoleUser: [ "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" ], setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], setPublicMembershipForAuthenticatedUser: [ "PUT /orgs/{org}/public_members/{username}" ], unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], update: ["PATCH /orgs/{org}"], updateMembershipForAuthenticatedUser: [ "PATCH /user/memberships/orgs/{org}" ], updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] }, packages: { deletePackageForAuthenticatedUser: [ "DELETE /user/packages/{package_type}/{package_name}" ], deletePackageForOrg: [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}" ], deletePackageForUser: [ "DELETE /users/{username}/packages/{package_type}/{package_name}" ], deletePackageVersionForAuthenticatedUser: [ "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], deletePackageVersionForOrg: [ "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], deletePackageVersionForUser: [ "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getAllPackageVersionsForAPackageOwnedByAnOrg: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", {}, { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } ], getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions", {}, { renamed: [ "packages", "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" ] } ], getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions" ], getAllPackageVersionsForPackageOwnedByOrg: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" ], getAllPackageVersionsForPackageOwnedByUser: [ "GET /users/{username}/packages/{package_type}/{package_name}/versions" ], getPackageForAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}" ], getPackageForOrganization: [ "GET /orgs/{org}/packages/{package_type}/{package_name}" ], getPackageForUser: [ "GET /users/{username}/packages/{package_type}/{package_name}" ], getPackageVersionForAuthenticatedUser: [ "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getPackageVersionForOrganization: [ "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], getPackageVersionForUser: [ "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" ], listDockerMigrationConflictingPackagesForAuthenticatedUser: [ "GET /user/docker/conflicts" ], listDockerMigrationConflictingPackagesForOrganization: [ "GET /orgs/{org}/docker/conflicts" ], listDockerMigrationConflictingPackagesForUser: [ "GET /users/{username}/docker/conflicts" ], listPackagesForAuthenticatedUser: ["GET /user/packages"], listPackagesForOrganization: ["GET /orgs/{org}/packages"], listPackagesForUser: ["GET /users/{username}/packages"], restorePackageForAuthenticatedUser: [ "POST /user/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageForOrg: [ "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageForUser: [ "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" ], restorePackageVersionForAuthenticatedUser: [ "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], restorePackageVersionForOrg: [ "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ], restorePackageVersionForUser: [ "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" ] }, projects: { addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], createCard: ["POST /projects/columns/{column_id}/cards"], createColumn: ["POST /projects/{project_id}/columns"], createForAuthenticatedUser: ["POST /user/projects"], createForOrg: ["POST /orgs/{org}/projects"], createForRepo: ["POST /repos/{owner}/{repo}/projects"], delete: ["DELETE /projects/{project_id}"], deleteCard: ["DELETE /projects/columns/cards/{card_id}"], deleteColumn: ["DELETE /projects/columns/{column_id}"], get: ["GET /projects/{project_id}"], getCard: ["GET /projects/columns/cards/{card_id}"], getColumn: ["GET /projects/columns/{column_id}"], getPermissionForUser: [ "GET /projects/{project_id}/collaborators/{username}/permission" ], listCards: ["GET /projects/columns/{column_id}/cards"], listCollaborators: ["GET /projects/{project_id}/collaborators"], listColumns: ["GET /projects/{project_id}/columns"], listForOrg: ["GET /orgs/{org}/projects"], listForRepo: ["GET /repos/{owner}/{repo}/projects"], listForUser: ["GET /users/{username}/projects"], moveCard: ["POST /projects/columns/cards/{card_id}/moves"], moveColumn: ["POST /projects/columns/{column_id}/moves"], removeCollaborator: [ "DELETE /projects/{project_id}/collaborators/{username}" ], update: ["PATCH /projects/{project_id}"], updateCard: ["PATCH /projects/columns/cards/{card_id}"], updateColumn: ["PATCH /projects/columns/{column_id}"] }, pulls: { checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], create: ["POST /repos/{owner}/{repo}/pulls"], createReplyForReviewComment: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" ], createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], createReviewComment: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], deletePendingReview: [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], deleteReviewComment: [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" ], dismissReview: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" ], get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], getReview: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], list: ["GET /repos/{owner}/{repo}/pulls"], listCommentsForReview: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" ], listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], listRequestedReviewers: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], listReviewComments: [ "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" ], listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], removeRequestedReviewers: [ "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], requestReviewers: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" ], submitReview: [ "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" ], update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], updateBranch: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" ], updateReview: [ "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" ], updateReviewComment: [ "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" ] }, rateLimit: { get: ["GET /rate_limit"] }, reactions: { createForCommitComment: [ "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], createForIssue: [ "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" ], createForIssueComment: [ "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], createForPullRequestReviewComment: [ "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ], createForRelease: [ "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" ], createForTeamDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ], createForTeamDiscussionInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ], deleteForCommitComment: [ "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" ], deleteForIssue: [ "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" ], deleteForIssueComment: [ "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" ], deleteForPullRequestComment: [ "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" ], deleteForRelease: [ "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" ], deleteForTeamDiscussion: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" ], deleteForTeamDiscussionComment: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" ], listForCommitComment: [ "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" ], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], listForIssueComment: [ "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" ], listForPullRequestReviewComment: [ "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" ], listForRelease: [ "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" ], listForTeamDiscussionCommentInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" ], listForTeamDiscussionInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" ] }, repos: { acceptInvitation: [ "PATCH /user/repository_invitations/{invitation_id}", {}, { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } ], acceptInvitationForAuthenticatedUser: [ "PATCH /user/repository_invitations/{invitation_id}" ], addAppAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], addStatusCheckContexts: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], addTeamAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], addUserAccessRestrictions: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], cancelPagesDeployment: [ "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" ], checkAutomatedSecurityFixes: [ "GET /repos/{owner}/{repo}/automated-security-fixes" ], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], checkVulnerabilityAlerts: [ "GET /repos/{owner}/{repo}/vulnerability-alerts" ], codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], compareCommitsWithBasehead: [ "GET /repos/{owner}/{repo}/compare/{basehead}" ], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], createCommitComment: [ "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" ], createCommitSignatureProtection: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], createDeployKey: ["POST /repos/{owner}/{repo}/keys"], createDeployment: ["POST /repos/{owner}/{repo}/deployments"], createDeploymentBranchPolicy: [ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ], createDeploymentProtectionRule: [ "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], createDeploymentStatus: [ "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ], createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], createForAuthenticatedUser: ["POST /user/repos"], createFork: ["POST /repos/{owner}/{repo}/forks"], createInOrg: ["POST /orgs/{org}/repos"], createOrUpdateCustomPropertiesValues: [ "PATCH /repos/{owner}/{repo}/properties/values" ], createOrUpdateEnvironment: [ "PUT /repos/{owner}/{repo}/environments/{environment_name}" ], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], createOrgRuleset: ["POST /orgs/{org}/rulesets"], createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createRelease: ["POST /repos/{owner}/{repo}/releases"], createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], createUsingTemplate: [ "POST /repos/{template_owner}/{template_repo}/generate" ], createWebhook: ["POST /repos/{owner}/{repo}/hooks"], declineInvitation: [ "DELETE /user/repository_invitations/{invitation_id}", {}, { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } ], declineInvitationForAuthenticatedUser: [ "DELETE /user/repository_invitations/{invitation_id}" ], delete: ["DELETE /repos/{owner}/{repo}"], deleteAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], deleteAdminBranchProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], deleteAnEnvironment: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}" ], deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], deleteBranchProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" ], deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], deleteCommitSignatureProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], deleteDeployment: [ "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" ], deleteDeploymentBranchPolicy: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], deleteInvitation: [ "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" ], deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], deletePullRequestReviewProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], deleteReleaseAsset: [ "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" ], deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], deleteTagProtection: [ "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" ], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], disableAutomatedSecurityFixes: [ "DELETE /repos/{owner}/{repo}/automated-security-fixes" ], disableDeploymentProtectionRule: [ "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], disablePrivateVulnerabilityReporting: [ "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" ], disableVulnerabilityAlerts: [ "DELETE /repos/{owner}/{repo}/vulnerability-alerts" ], downloadArchive: [ "GET /repos/{owner}/{repo}/zipball/{ref}", {}, { renamed: ["repos", "downloadZipballArchive"] } ], downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], enableAutomatedSecurityFixes: [ "PUT /repos/{owner}/{repo}/automated-security-fixes" ], enablePrivateVulnerabilityReporting: [ "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" ], enableVulnerabilityAlerts: [ "PUT /repos/{owner}/{repo}/vulnerability-alerts" ], generateReleaseNotes: [ "POST /repos/{owner}/{repo}/releases/generate-notes" ], get: ["GET /repos/{owner}/{repo}"], getAccessRestrictions: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" ], getAdminBranchProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], getAllDeploymentProtectionRules: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" ], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], getAllStatusCheckContexts: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" ], getAllTopics: ["GET /repos/{owner}/{repo}/topics"], getAppsWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" ], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], getBranchProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection" ], getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], getCollaboratorPermissionLevel: [ "GET /repos/{owner}/{repo}/collaborators/{username}/permission" ], getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], getCommitSignatureProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" ], getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], getCustomDeploymentProtectionRule: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" ], getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], getDeploymentBranchPolicy: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], getDeploymentStatus: [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" ], getEnvironment: [ "GET /repos/{owner}/{repo}/environments/{environment_name}" ], getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], getOrgRulesets: ["GET /orgs/{org}/rulesets"], getPages: ["GET /repos/{owner}/{repo}/pages"], getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], getPagesDeployment: [ "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" ], getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], getPullRequestReviewProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], getReadme: ["GET /repos/{owner}/{repo}/readme"], getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], getRepoRuleSuite: [ "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" ], getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], getStatusChecksProtection: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], getTeamsWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" ], getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], getUsersWithAccessToProtectedBranch: [ "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" ], getViews: ["GET /repos/{owner}/{repo}/traffic/views"], getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], getWebhookConfigForRepo: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" ], getWebhookDelivery: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" ], listActivities: ["GET /repos/{owner}/{repo}/activity"], listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], listBranches: ["GET /repos/{owner}/{repo}/branches"], listBranchesForHeadCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" ], listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], listCommentsForCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" ], listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], listCommitStatusesForRef: [ "GET /repos/{owner}/{repo}/commits/{ref}/statuses" ], listCommits: ["GET /repos/{owner}/{repo}/commits"], listContributors: ["GET /repos/{owner}/{repo}/contributors"], listCustomDeploymentRuleIntegrations: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" ], listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], listDeploymentBranchPolicies: [ "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" ], listDeploymentStatuses: [ "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" ], listDeployments: ["GET /repos/{owner}/{repo}/deployments"], listForAuthenticatedUser: ["GET /user/repos"], listForOrg: ["GET /orgs/{org}/repos"], listForUser: ["GET /users/{username}/repos"], listForks: ["GET /repos/{owner}/{repo}/forks"], listInvitations: ["GET /repos/{owner}/{repo}/invitations"], listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], listLanguages: ["GET /repos/{owner}/{repo}/languages"], listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], listPublic: ["GET /repositories"], listPullRequestsAssociatedWithCommit: [ "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" ], listReleaseAssets: [ "GET /repos/{owner}/{repo}/releases/{release_id}/assets" ], listReleases: ["GET /repos/{owner}/{repo}/releases"], listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], listTags: ["GET /repos/{owner}/{repo}/tags"], listTeams: ["GET /repos/{owner}/{repo}/teams"], listWebhookDeliveries: [ "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" ], listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], merge: ["POST /repos/{owner}/{repo}/merges"], mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], redeliverWebhookDelivery: [ "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" ], removeAppAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], removeCollaborator: [ "DELETE /repos/{owner}/{repo}/collaborators/{username}" ], removeStatusCheckContexts: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], removeStatusCheckProtection: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], removeTeamAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], removeUserAccessRestrictions: [ "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], setAdminBranchProtection: [ "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" ], setAppAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { mapToData: "apps" } ], setStatusCheckContexts: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { mapToData: "contexts" } ], setTeamAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { mapToData: "teams" } ], setUserAccessRestrictions: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { mapToData: "users" } ], testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], transfer: ["POST /repos/{owner}/{repo}/transfer"], update: ["PATCH /repos/{owner}/{repo}"], updateBranchProtection: [ "PUT /repos/{owner}/{repo}/branches/{branch}/protection" ], updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], updateDeploymentBranchPolicy: [ "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" ], updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], updateInvitation: [ "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" ], updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], updatePullRequestReviewProtection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" ], updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], updateReleaseAsset: [ "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" ], updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], updateStatusCheckPotection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { renamed: ["repos", "updateStatusCheckProtection"] } ], updateStatusCheckProtection: [ "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" ], updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], updateWebhookConfigForRepo: [ "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" ], uploadReleaseAsset: [ "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { baseUrl: "https://uploads.github.com" } ] }, search: { code: ["GET /search/code"], commits: ["GET /search/commits"], issuesAndPullRequests: ["GET /search/issues"], labels: ["GET /search/labels"], repos: ["GET /search/repositories"], topics: ["GET /search/topics"], users: ["GET /search/users"] }, secretScanning: { getAlert: [ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ], listAlertsForEnterprise: [ "GET /enterprises/{enterprise}/secret-scanning/alerts" ], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], listLocationsForAlert: [ "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" ], updateAlert: [ "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" ] }, securityAdvisories: { createFork: [ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" ], createPrivateVulnerabilityReport: [ "POST /repos/{owner}/{repo}/security-advisories/reports" ], createRepositoryAdvisory: [ "POST /repos/{owner}/{repo}/security-advisories" ], createRepositoryAdvisoryCveRequest: [ "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" ], getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], getRepositoryAdvisory: [ "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" ], listGlobalAdvisories: ["GET /advisories"], listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], updateRepositoryAdvisory: [ "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" ] }, teams: { addOrUpdateMembershipForUserInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" ], addOrUpdateProjectPermissionsInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], addOrUpdateRepoPermissionsInOrg: [ "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], checkPermissionsForProjectInOrg: [ "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], checkPermissionsForRepoInOrg: [ "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], create: ["POST /orgs/{org}/teams"], createDiscussionCommentInOrg: [ "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ], createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], deleteDiscussionCommentInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], deleteDiscussionInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], getByName: ["GET /orgs/{org}/teams/{team_slug}"], getDiscussionCommentInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], getDiscussionInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], getMembershipForUserInOrg: [ "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" ], list: ["GET /orgs/{org}/teams"], listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], listDiscussionCommentsInOrg: [ "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" ], listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], listForAuthenticatedUser: ["GET /user/teams"], listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], listPendingInvitationsInOrg: [ "GET /orgs/{org}/teams/{team_slug}/invitations" ], listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], removeMembershipForUserInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" ], removeProjectInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" ], removeRepoInOrg: [ "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" ], updateDiscussionCommentInOrg: [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" ], updateDiscussionInOrg: [ "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" ], updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] }, users: { addEmailForAuthenticated: [ "POST /user/emails", {}, { renamed: ["users", "addEmailForAuthenticatedUser"] } ], addEmailForAuthenticatedUser: ["POST /user/emails"], addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], block: ["PUT /user/blocks/{username}"], checkBlocked: ["GET /user/blocks/{username}"], checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], createGpgKeyForAuthenticated: [ "POST /user/gpg_keys", {}, { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } ], createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], createPublicSshKeyForAuthenticated: [ "POST /user/keys", {}, { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } ], createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], deleteEmailForAuthenticated: [ "DELETE /user/emails", {}, { renamed: ["users", "deleteEmailForAuthenticatedUser"] } ], deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], deleteGpgKeyForAuthenticated: [ "DELETE /user/gpg_keys/{gpg_key_id}", {}, { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } ], deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], deletePublicSshKeyForAuthenticated: [ "DELETE /user/keys/{key_id}", {}, { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } ], deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], deleteSshSigningKeyForAuthenticatedUser: [ "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" ], follow: ["PUT /user/following/{username}"], getAuthenticated: ["GET /user"], getByUsername: ["GET /users/{username}"], getContextForUser: ["GET /users/{username}/hovercard"], getGpgKeyForAuthenticated: [ "GET /user/gpg_keys/{gpg_key_id}", {}, { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } ], getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], getPublicSshKeyForAuthenticated: [ "GET /user/keys/{key_id}", {}, { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } ], getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], getSshSigningKeyForAuthenticatedUser: [ "GET /user/ssh_signing_keys/{ssh_signing_key_id}" ], list: ["GET /users"], listBlockedByAuthenticated: [ "GET /user/blocks", {}, { renamed: ["users", "listBlockedByAuthenticatedUser"] } ], listBlockedByAuthenticatedUser: ["GET /user/blocks"], listEmailsForAuthenticated: [ "GET /user/emails", {}, { renamed: ["users", "listEmailsForAuthenticatedUser"] } ], listEmailsForAuthenticatedUser: ["GET /user/emails"], listFollowedByAuthenticated: [ "GET /user/following", {}, { renamed: ["users", "listFollowedByAuthenticatedUser"] } ], listFollowedByAuthenticatedUser: ["GET /user/following"], listFollowersForAuthenticatedUser: ["GET /user/followers"], listFollowersForUser: ["GET /users/{username}/followers"], listFollowingForUser: ["GET /users/{username}/following"], listGpgKeysForAuthenticated: [ "GET /user/gpg_keys", {}, { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } ], listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], listPublicEmailsForAuthenticated: [ "GET /user/public_emails", {}, { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } ], listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], listPublicKeysForUser: ["GET /users/{username}/keys"], listPublicSshKeysForAuthenticated: [ "GET /user/keys", {}, { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } ], listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], setPrimaryEmailVisibilityForAuthenticated: [ "PATCH /user/email/visibility", {}, { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } ], setPrimaryEmailVisibilityForAuthenticatedUser: [ "PATCH /user/email/visibility" ], unblock: ["DELETE /user/blocks/{username}"], unfollow: ["DELETE /user/following/{username}"], updateAuthenticated: ["PATCH /user"] } }; var endpoints_default = Endpoints; // pkg/dist-src/endpoints-to-methods.js var endpointMethodsMap = /* @__PURE__ */ new Map(); for (const [scope, endpoints] of Object.entries(endpoints_default)) { for (const [methodName, endpoint] of Object.entries(endpoints)) { const [route, defaults, decorations] = endpoint; const [method, url] = route.split(/ /); const endpointDefaults = Object.assign( { method, url }, defaults ); if (!endpointMethodsMap.has(scope)) { endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); } endpointMethodsMap.get(scope).set(methodName, { scope, methodName, endpointDefaults, decorations }); } } var handler = { has({ scope }, methodName) { return endpointMethodsMap.get(scope).has(methodName); }, getOwnPropertyDescriptor(target, methodName) { return { value: this.get(target, methodName), // ensures method is in the cache configurable: true, writable: true, enumerable: true }; }, defineProperty(target, methodName, descriptor) { Object.defineProperty(target.cache, methodName, descriptor); return true; }, deleteProperty(target, methodName) { delete target.cache[methodName]; return true; }, ownKeys({ scope }) { return [...endpointMethodsMap.get(scope).keys()]; }, set(target, methodName, value) { return target.cache[methodName] = value; }, get({ octokit, scope, cache }, methodName) { if (cache[methodName]) { return cache[methodName]; } const method = endpointMethodsMap.get(scope).get(methodName); if (!method) { return void 0; } const { endpointDefaults, decorations } = method; if (decorations) { cache[methodName] = decorate( octokit, scope, methodName, endpointDefaults, decorations ); } else { cache[methodName] = octokit.request.defaults(endpointDefaults); } return cache[methodName]; } }; function endpointsToMethods(octokit) { const newMethods = {}; for (const scope of endpointMethodsMap.keys()) { newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); } return newMethods; } function decorate(octokit, scope, methodName, defaults, decorations) { const requestWithDefaults = octokit.request.defaults(defaults); function withDecorations(...args) { let options = requestWithDefaults.endpoint.merge(...args); if (decorations.mapToData) { options = Object.assign({}, options, { data: options[decorations.mapToData], [decorations.mapToData]: void 0 }); return requestWithDefaults(options); } if (decorations.renamed) { const [newScope, newMethodName] = decorations.renamed; octokit.log.warn( `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` ); } if (decorations.deprecated) { octokit.log.warn(decorations.deprecated); } if (decorations.renamedParameters) { const options2 = requestWithDefaults.endpoint.merge(...args); for (const [name, alias] of Object.entries( decorations.renamedParameters )) { if (name in options2) { octokit.log.warn( `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` ); if (!(alias in options2)) { options2[alias] = options2[name]; } delete options2[name]; } } return requestWithDefaults(options2); } return requestWithDefaults(...args); } return Object.assign(withDecorations, requestWithDefaults); } // pkg/dist-src/index.js function restEndpointMethods(octokit) { const api = endpointsToMethods(octokit); return { rest: api }; } restEndpointMethods.VERSION = VERSION; function legacyRestEndpointMethods(octokit) { const api = endpointsToMethods(octokit); return { ...api, rest: api }; } legacyRestEndpointMethods.VERSION = VERSION; // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 93708: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var __create = Object.create; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __getProtoOf = Object.getPrototypeOf; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( // If the importer is in node compatibility mode or this is not an ESM // file that has been converted to a CommonJS file using a Babel- // compatible transform (i.e. "__esModule" has not been set), then set // "default" to the CommonJS "module.exports" for node compatibility. isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, mod )); var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { RequestError: () => RequestError }); module.exports = __toCommonJS(dist_src_exports); var import_deprecation = __nccwpck_require__(14150); var import_once = __toESM(__nccwpck_require__(55560)); var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); var RequestError = class extends Error { constructor(message, statusCode, options) { super(message); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = "HttpError"; this.status = statusCode; let headers; if ("headers" in options && typeof options.headers !== "undefined") { headers = options.headers; } if ("response" in options) { this.response = options.response; headers = options.response.headers; } const requestCopy = Object.assign({}, options.request); if (options.request.headers.authorization) { requestCopy.headers = Object.assign({}, options.request.headers, { authorization: options.request.headers.authorization.replace( /(? { "use strict"; var __defProp = Object.defineProperty; var __getOwnPropDesc = Object.getOwnPropertyDescriptor; var __getOwnPropNames = Object.getOwnPropertyNames; var __hasOwnProp = Object.prototype.hasOwnProperty; var __export = (target, all) => { for (var name in all) __defProp(target, name, { get: all[name], enumerable: true }); }; var __copyProps = (to, from, except, desc) => { if (from && typeof from === "object" || typeof from === "function") { for (let key of __getOwnPropNames(from)) if (!__hasOwnProp.call(to, key) && key !== except) __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); } return to; }; var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); // pkg/dist-src/index.js var dist_src_exports = {}; __export(dist_src_exports, { request: () => request }); module.exports = __toCommonJS(dist_src_exports); var import_endpoint = __nccwpck_require__(54471); var import_universal_user_agent = __nccwpck_require__(33843); // pkg/dist-src/version.js var VERSION = "8.4.1"; // pkg/dist-src/is-plain-object.js function isPlainObject(value) { if (typeof value !== "object" || value === null) return false; if (Object.prototype.toString.call(value) !== "[object Object]") return false; const proto = Object.getPrototypeOf(value); if (proto === null) return true; const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); } // pkg/dist-src/fetch-wrapper.js var import_request_error = __nccwpck_require__(93708); // pkg/dist-src/get-buffer-response.js function getBufferResponse(response) { return response.arrayBuffer(); } // pkg/dist-src/fetch-wrapper.js function fetchWrapper(requestOptions) { var _a, _b, _c, _d; const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { requestOptions.body = JSON.stringify(requestOptions.body); } let headers = {}; let status; let url; let { fetch } = globalThis; if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { fetch = requestOptions.request.fetch; } if (!fetch) { throw new Error( "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" ); } return fetch(requestOptions.url, { method: requestOptions.method, body: requestOptions.body, redirect: (_c = requestOptions.request) == null ? void 0 : _c.redirect, headers: requestOptions.headers, signal: (_d = requestOptions.request) == null ? void 0 : _d.signal, // duplex must be set if request.body is ReadableStream or Async Iterables. // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. ...requestOptions.body && { duplex: "half" } }).then(async (response) => { url = response.url; status = response.status; for (const keyAndValue of response.headers) { headers[keyAndValue[0]] = keyAndValue[1]; } if ("deprecation" in headers) { const matches = headers.link && headers.link.match(/<([^<>]+)>; rel="deprecation"/); const deprecationLink = matches && matches.pop(); log.warn( `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` ); } if (status === 204 || status === 205) { return; } if (requestOptions.method === "HEAD") { if (status < 400) { return; } throw new import_request_error.RequestError(response.statusText, status, { response: { url, status, headers, data: void 0 }, request: requestOptions }); } if (status === 304) { throw new import_request_error.RequestError("Not modified", status, { response: { url, status, headers, data: await getResponseData(response) }, request: requestOptions }); } if (status >= 400) { const data = await getResponseData(response); const error = new import_request_error.RequestError(toErrorMessage(data), status, { response: { url, status, headers, data }, request: requestOptions }); throw error; } return parseSuccessResponseBody ? await getResponseData(response) : response.body; }).then((data) => { return { status, url, headers, data }; }).catch((error) => { if (error instanceof import_request_error.RequestError) throw error; else if (error.name === "AbortError") throw error; let message = error.message; if (error.name === "TypeError" && "cause" in error) { if (error.cause instanceof Error) { message = error.cause.message; } else if (typeof error.cause === "string") { message = error.cause; } } throw new import_request_error.RequestError(message, 500, { request: requestOptions }); }); } async function getResponseData(response) { const contentType = response.headers.get("content-type"); if (/application\/json/.test(contentType)) { return response.json().catch(() => response.text()).catch(() => ""); } if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { return response.text(); } return getBufferResponse(response); } function toErrorMessage(data) { if (typeof data === "string") return data; let suffix; if ("documentation_url" in data) { suffix = ` - ${data.documentation_url}`; } else { suffix = ""; } if ("message" in data) { if (Array.isArray(data.errors)) { return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; } return `${data.message}${suffix}`; } return `Unknown error: ${JSON.stringify(data)}`; } // pkg/dist-src/with-defaults.js function withDefaults(oldEndpoint, newDefaults) { const endpoint2 = oldEndpoint.defaults(newDefaults); const newApi = function(route, parameters) { const endpointOptions = endpoint2.merge(route, parameters); if (!endpointOptions.request || !endpointOptions.request.hook) { return fetchWrapper(endpoint2.parse(endpointOptions)); } const request2 = (route2, parameters2) => { return fetchWrapper( endpoint2.parse(endpoint2.merge(route2, parameters2)) ); }; Object.assign(request2, { endpoint: endpoint2, defaults: withDefaults.bind(null, endpoint2) }); return endpointOptions.request.hook(request2, endpointOptions); }; return Object.assign(newApi, { endpoint: endpoint2, defaults: withDefaults.bind(null, endpoint2) }); } // pkg/dist-src/index.js var request = withDefaults(import_endpoint.endpoint, { headers: { "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` } }); // Annotate the CommonJS export names for ESM import in node: 0 && (0); /***/ }), /***/ 37889: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ClientStreamingCall = void 0; /** * A client streaming RPC call. This means that the clients sends 0, 1, or * more messages to the server, and the server replies with exactly one * message. */ class ClientStreamingCall { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.requests = request; this.headers = headers; this.response = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * Note that it may still be valid to send more request messages. */ then(onfulfilled, onrejected) { return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter(this, void 0, void 0, function* () { let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); return { method: this.method, requestHeaders: this.requestHeaders, headers, response, status, trailers }; }); } } exports.ClientStreamingCall = ClientStreamingCall; /***/ }), /***/ 71409: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Deferred = exports.DeferredState = void 0; var DeferredState; (function (DeferredState) { DeferredState[DeferredState["PENDING"] = 0] = "PENDING"; DeferredState[DeferredState["REJECTED"] = 1] = "REJECTED"; DeferredState[DeferredState["RESOLVED"] = 2] = "RESOLVED"; })(DeferredState = exports.DeferredState || (exports.DeferredState = {})); /** * A deferred promise. This is a "controller" for a promise, which lets you * pass a promise around and reject or resolve it from the outside. * * Warning: This class is to be used with care. Using it can make code very * difficult to read. It is intended for use in library code that exposes * promises, not for regular business logic. */ class Deferred { /** * @param preventUnhandledRejectionWarning - prevents the warning * "Unhandled Promise rejection" by adding a noop rejection handler. * Working with calls returned from the runtime-rpc package in an * async function usually means awaiting one call property after * the other. This means that the "status" is not being awaited when * an earlier await for the "headers" is rejected. This causes the * "unhandled promise reject" warning. A more correct behaviour for * calls might be to become aware whether at least one of the * promises is handled and swallow the rejection warning for the * others. */ constructor(preventUnhandledRejectionWarning = true) { this._state = DeferredState.PENDING; this._promise = new Promise((resolve, reject) => { this._resolve = resolve; this._reject = reject; }); if (preventUnhandledRejectionWarning) { this._promise.catch(_ => { }); } } /** * Get the current state of the promise. */ get state() { return this._state; } /** * Get the deferred promise. */ get promise() { return this._promise; } /** * Resolve the promise. Throws if the promise is already resolved or rejected. */ resolve(value) { if (this.state !== DeferredState.PENDING) throw new Error(`cannot resolve ${DeferredState[this.state].toLowerCase()}`); this._resolve(value); this._state = DeferredState.RESOLVED; } /** * Reject the promise. Throws if the promise is already resolved or rejected. */ reject(reason) { if (this.state !== DeferredState.PENDING) throw new Error(`cannot reject ${DeferredState[this.state].toLowerCase()}`); this._reject(reason); this._state = DeferredState.REJECTED; } /** * Resolve the promise. Ignore if not pending. */ resolvePending(val) { if (this._state === DeferredState.PENDING) this.resolve(val); } /** * Reject the promise. Ignore if not pending. */ rejectPending(reason) { if (this._state === DeferredState.PENDING) this.reject(reason); } } exports.Deferred = Deferred; /***/ }), /***/ 36826: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DuplexStreamingCall = void 0; /** * A duplex streaming RPC call. This means that the clients sends an * arbitrary amount of messages to the server, while at the same time, * the server sends an arbitrary amount of messages to the client. */ class DuplexStreamingCall { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.requests = request; this.headers = headers; this.responses = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * Note that it may still be valid to send more request messages. */ then(onfulfilled, onrejected) { return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter(this, void 0, void 0, function* () { let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); return { method: this.method, requestHeaders: this.requestHeaders, headers, status, trailers, }; }); } } exports.DuplexStreamingCall = DuplexStreamingCall; /***/ }), /***/ 44420: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Public API of the rpc runtime. // Note: we do not use `export * from ...` to help tree shakers, // webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", ({ value: true })); var service_type_1 = __nccwpck_require__(56892); Object.defineProperty(exports, "ServiceType", ({ enumerable: true, get: function () { return service_type_1.ServiceType; } })); var reflection_info_1 = __nccwpck_require__(62496); Object.defineProperty(exports, "readMethodOptions", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOptions; } })); Object.defineProperty(exports, "readMethodOption", ({ enumerable: true, get: function () { return reflection_info_1.readMethodOption; } })); Object.defineProperty(exports, "readServiceOption", ({ enumerable: true, get: function () { return reflection_info_1.readServiceOption; } })); var rpc_error_1 = __nccwpck_require__(78636); Object.defineProperty(exports, "RpcError", ({ enumerable: true, get: function () { return rpc_error_1.RpcError; } })); var rpc_options_1 = __nccwpck_require__(28576); Object.defineProperty(exports, "mergeRpcOptions", ({ enumerable: true, get: function () { return rpc_options_1.mergeRpcOptions; } })); var rpc_output_stream_1 = __nccwpck_require__(72726); Object.defineProperty(exports, "RpcOutputStreamController", ({ enumerable: true, get: function () { return rpc_output_stream_1.RpcOutputStreamController; } })); var test_transport_1 = __nccwpck_require__(79122); Object.defineProperty(exports, "TestTransport", ({ enumerable: true, get: function () { return test_transport_1.TestTransport; } })); var deferred_1 = __nccwpck_require__(71409); Object.defineProperty(exports, "Deferred", ({ enumerable: true, get: function () { return deferred_1.Deferred; } })); Object.defineProperty(exports, "DeferredState", ({ enumerable: true, get: function () { return deferred_1.DeferredState; } })); var duplex_streaming_call_1 = __nccwpck_require__(36826); Object.defineProperty(exports, "DuplexStreamingCall", ({ enumerable: true, get: function () { return duplex_streaming_call_1.DuplexStreamingCall; } })); var client_streaming_call_1 = __nccwpck_require__(37889); Object.defineProperty(exports, "ClientStreamingCall", ({ enumerable: true, get: function () { return client_streaming_call_1.ClientStreamingCall; } })); var server_streaming_call_1 = __nccwpck_require__(46173); Object.defineProperty(exports, "ServerStreamingCall", ({ enumerable: true, get: function () { return server_streaming_call_1.ServerStreamingCall; } })); var unary_call_1 = __nccwpck_require__(29288); Object.defineProperty(exports, "UnaryCall", ({ enumerable: true, get: function () { return unary_call_1.UnaryCall; } })); var rpc_interceptor_1 = __nccwpck_require__(52849); Object.defineProperty(exports, "stackIntercept", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackIntercept; } })); Object.defineProperty(exports, "stackDuplexStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackDuplexStreamingInterceptors; } })); Object.defineProperty(exports, "stackClientStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackClientStreamingInterceptors; } })); Object.defineProperty(exports, "stackServerStreamingInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackServerStreamingInterceptors; } })); Object.defineProperty(exports, "stackUnaryInterceptors", ({ enumerable: true, get: function () { return rpc_interceptor_1.stackUnaryInterceptors; } })); var server_call_context_1 = __nccwpck_require__(43352); Object.defineProperty(exports, "ServerCallContextController", ({ enumerable: true, get: function () { return server_call_context_1.ServerCallContextController; } })); /***/ }), /***/ 62496: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.readServiceOption = exports.readMethodOption = exports.readMethodOptions = exports.normalizeMethodInfo = void 0; const runtime_1 = __nccwpck_require__(68886); /** * Turns PartialMethodInfo into MethodInfo. */ function normalizeMethodInfo(method, service) { var _a, _b, _c; let m = method; m.service = service; m.localName = (_a = m.localName) !== null && _a !== void 0 ? _a : runtime_1.lowerCamelCase(m.name); // noinspection PointlessBooleanExpressionJS m.serverStreaming = !!m.serverStreaming; // noinspection PointlessBooleanExpressionJS m.clientStreaming = !!m.clientStreaming; m.options = (_b = m.options) !== null && _b !== void 0 ? _b : {}; m.idempotency = (_c = m.idempotency) !== null && _c !== void 0 ? _c : undefined; return m; } exports.normalizeMethodInfo = normalizeMethodInfo; /** * Read custom method options from a generated service client. * * @deprecated use readMethodOption() */ function readMethodOptions(service, methodName, extensionName, extensionType) { var _a; const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; } exports.readMethodOptions = readMethodOptions; function readMethodOption(service, methodName, extensionName, extensionType) { var _a; const options = (_a = service.methods.find((m, i) => m.localName === methodName || i === methodName)) === null || _a === void 0 ? void 0 : _a.options; if (!options) { return undefined; } const optionVal = options[extensionName]; if (optionVal === undefined) { return optionVal; } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readMethodOption = readMethodOption; function readServiceOption(service, extensionName, extensionType) { const options = service.options; if (!options) { return undefined; } const optionVal = options[extensionName]; if (optionVal === undefined) { return optionVal; } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readServiceOption = readServiceOption; /***/ }), /***/ 78636: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.RpcError = void 0; /** * An error that occurred while calling a RPC method. */ class RpcError extends Error { constructor(message, code = 'UNKNOWN', meta) { super(message); this.name = 'RpcError'; // see https://www.typescriptlang.org/docs/handbook/release-notes/typescript-2-2.html#example Object.setPrototypeOf(this, new.target.prototype); this.code = code; this.meta = meta !== null && meta !== void 0 ? meta : {}; } toString() { const l = [this.name + ': ' + this.message]; if (this.code) { l.push(''); l.push('Code: ' + this.code); } if (this.serviceName && this.methodName) { l.push('Method: ' + this.serviceName + '/' + this.methodName); } let m = Object.entries(this.meta); if (m.length) { l.push(''); l.push('Meta:'); for (let [k, v] of m) { l.push(` ${k}: ${v}`); } } return l.join('\n'); } } exports.RpcError = RpcError; /***/ }), /***/ 52849: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.stackDuplexStreamingInterceptors = exports.stackClientStreamingInterceptors = exports.stackServerStreamingInterceptors = exports.stackUnaryInterceptors = exports.stackIntercept = void 0; const runtime_1 = __nccwpck_require__(68886); /** * Creates a "stack" of of all interceptors specified in the given `RpcOptions`. * Used by generated client implementations. * @internal */ function stackIntercept(kind, transport, method, options, input) { var _a, _b, _c, _d; if (kind == "unary") { let tail = (mtd, inp, opt) => transport.unary(mtd, inp, opt); for (const curr of ((_a = options.interceptors) !== null && _a !== void 0 ? _a : []).filter(i => i.interceptUnary).reverse()) { const next = tail; tail = (mtd, inp, opt) => curr.interceptUnary(next, mtd, inp, opt); } return tail(method, input, options); } if (kind == "serverStreaming") { let tail = (mtd, inp, opt) => transport.serverStreaming(mtd, inp, opt); for (const curr of ((_b = options.interceptors) !== null && _b !== void 0 ? _b : []).filter(i => i.interceptServerStreaming).reverse()) { const next = tail; tail = (mtd, inp, opt) => curr.interceptServerStreaming(next, mtd, inp, opt); } return tail(method, input, options); } if (kind == "clientStreaming") { let tail = (mtd, opt) => transport.clientStreaming(mtd, opt); for (const curr of ((_c = options.interceptors) !== null && _c !== void 0 ? _c : []).filter(i => i.interceptClientStreaming).reverse()) { const next = tail; tail = (mtd, opt) => curr.interceptClientStreaming(next, mtd, opt); } return tail(method, options); } if (kind == "duplex") { let tail = (mtd, opt) => transport.duplex(mtd, opt); for (const curr of ((_d = options.interceptors) !== null && _d !== void 0 ? _d : []).filter(i => i.interceptDuplex).reverse()) { const next = tail; tail = (mtd, opt) => curr.interceptDuplex(next, mtd, opt); } return tail(method, options); } runtime_1.assertNever(kind); } exports.stackIntercept = stackIntercept; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackUnaryInterceptors(transport, method, input, options) { return stackIntercept("unary", transport, method, options, input); } exports.stackUnaryInterceptors = stackUnaryInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackServerStreamingInterceptors(transport, method, input, options) { return stackIntercept("serverStreaming", transport, method, options, input); } exports.stackServerStreamingInterceptors = stackServerStreamingInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackClientStreamingInterceptors(transport, method, options) { return stackIntercept("clientStreaming", transport, method, options); } exports.stackClientStreamingInterceptors = stackClientStreamingInterceptors; /** * @deprecated replaced by `stackIntercept()`, still here to support older generated code */ function stackDuplexStreamingInterceptors(transport, method, options) { return stackIntercept("duplex", transport, method, options); } exports.stackDuplexStreamingInterceptors = stackDuplexStreamingInterceptors; /***/ }), /***/ 28576: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.mergeRpcOptions = void 0; const runtime_1 = __nccwpck_require__(68886); /** * Merges custom RPC options with defaults. Returns a new instance and keeps * the "defaults" and the "options" unmodified. * * Merges `RpcMetadata` "meta", overwriting values from "defaults" with * values from "options". Does not append values to existing entries. * * Merges "jsonOptions", including "jsonOptions.typeRegistry", by creating * a new array that contains types from "options.jsonOptions.typeRegistry" * first, then types from "defaults.jsonOptions.typeRegistry". * * Merges "binaryOptions". * * Merges "interceptors" by creating a new array that contains interceptors * from "defaults" first, then interceptors from "options". * * Works with objects that extend `RpcOptions`, but only if the added * properties are of type Date, primitive like string, boolean, or Array * of primitives. If you have other property types, you have to merge them * yourself. */ function mergeRpcOptions(defaults, options) { if (!options) return defaults; let o = {}; copy(defaults, o); copy(options, o); for (let key of Object.keys(options)) { let val = options[key]; switch (key) { case "jsonOptions": o.jsonOptions = runtime_1.mergeJsonOptions(defaults.jsonOptions, o.jsonOptions); break; case "binaryOptions": o.binaryOptions = runtime_1.mergeBinaryOptions(defaults.binaryOptions, o.binaryOptions); break; case "meta": o.meta = {}; copy(defaults.meta, o.meta); copy(options.meta, o.meta); break; case "interceptors": o.interceptors = defaults.interceptors ? defaults.interceptors.concat(val) : val.concat(); break; } } return o; } exports.mergeRpcOptions = mergeRpcOptions; function copy(a, into) { if (!a) return; let c = into; for (let [k, v] of Object.entries(a)) { if (v instanceof Date) c[k] = new Date(v.getTime()); else if (Array.isArray(v)) c[k] = v.concat(); else c[k] = v; } } /***/ }), /***/ 72726: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.RpcOutputStreamController = void 0; const deferred_1 = __nccwpck_require__(71409); const runtime_1 = __nccwpck_require__(68886); /** * A `RpcOutputStream` that you control. */ class RpcOutputStreamController { constructor() { this._lis = { nxt: [], msg: [], err: [], cmp: [], }; this._closed = false; } // --- RpcOutputStream callback API onNext(callback) { return this.addLis(callback, this._lis.nxt); } onMessage(callback) { return this.addLis(callback, this._lis.msg); } onError(callback) { return this.addLis(callback, this._lis.err); } onComplete(callback) { return this.addLis(callback, this._lis.cmp); } addLis(callback, list) { list.push(callback); return () => { let i = list.indexOf(callback); if (i >= 0) list.splice(i, 1); }; } // remove all listeners clearLis() { for (let l of Object.values(this._lis)) l.splice(0, l.length); } // --- Controller API /** * Is this stream already closed by a completion or error? */ get closed() { return this._closed !== false; } /** * Emit message, close with error, or close successfully, but only one * at a time. * Can be used to wrap a stream by using the other stream's `onNext`. */ notifyNext(message, error, complete) { runtime_1.assert((message ? 1 : 0) + (error ? 1 : 0) + (complete ? 1 : 0) <= 1, 'only one emission at a time'); if (message) this.notifyMessage(message); if (error) this.notifyError(error); if (complete) this.notifyComplete(); } /** * Emits a new message. Throws if stream is closed. * * Triggers onNext and onMessage callbacks. */ notifyMessage(message) { runtime_1.assert(!this.closed, 'stream is closed'); this.pushIt({ value: message, done: false }); this._lis.msg.forEach(l => l(message)); this._lis.nxt.forEach(l => l(message, undefined, false)); } /** * Closes the stream with an error. Throws if stream is closed. * * Triggers onNext and onError callbacks. */ notifyError(error) { runtime_1.assert(!this.closed, 'stream is closed'); this._closed = error; this.pushIt(error); this._lis.err.forEach(l => l(error)); this._lis.nxt.forEach(l => l(undefined, error, false)); this.clearLis(); } /** * Closes the stream successfully. Throws if stream is closed. * * Triggers onNext and onComplete callbacks. */ notifyComplete() { runtime_1.assert(!this.closed, 'stream is closed'); this._closed = true; this.pushIt({ value: null, done: true }); this._lis.cmp.forEach(l => l()); this._lis.nxt.forEach(l => l(undefined, undefined, true)); this.clearLis(); } /** * Creates an async iterator (that can be used with `for await {...}`) * to consume the stream. * * Some things to note: * - If an error occurs, the `for await` will throw it. * - If an error occurred before the `for await` was started, `for await` * will re-throw it. * - If the stream is already complete, the `for await` will be empty. * - If your `for await` consumes slower than the stream produces, * for example because you are relaying messages in a slow operation, * messages are queued. */ [Symbol.asyncIterator]() { // init the iterator state, enabling pushIt() if (!this._itState) { this._itState = { q: [] }; } // if we are closed, we are definitely not receiving any more messages. // but we can't let the iterator get stuck. we want to either: // a) finish the new iterator immediately, because we are completed // b) reject the new iterator, because we errored if (this._closed === true) this.pushIt({ value: null, done: true }); else if (this._closed !== false) this.pushIt(this._closed); // the async iterator return { next: () => { let state = this._itState; runtime_1.assert(state, "bad state"); // if we don't have a state here, code is broken // there should be no pending result. // did the consumer call next() before we resolved our previous result promise? runtime_1.assert(!state.p, "iterator contract broken"); // did we produce faster than the iterator consumed? // return the oldest result from the queue. let first = state.q.shift(); if (first) return ("value" in first) ? Promise.resolve(first) : Promise.reject(first); // we have no result ATM, but we promise one. // as soon as we have a result, we must resolve promise. state.p = new deferred_1.Deferred(); return state.p.promise; }, }; } // "push" a new iterator result. // this either resolves a pending promise, or enqueues the result. pushIt(result) { let state = this._itState; if (!state) return; // is the consumer waiting for us? if (state.p) { // yes, consumer is waiting for this promise. const p = state.p; runtime_1.assert(p.state == deferred_1.DeferredState.PENDING, "iterator contract broken"); // resolve the promise ("value" in result) ? p.resolve(result) : p.reject(result); // must cleanup, otherwise iterator.next() would pick it up again. delete state.p; } else { // we are producing faster than the iterator consumes. // push result onto queue. state.q.push(result); } } } exports.RpcOutputStreamController = RpcOutputStreamController; /***/ }), /***/ 43352: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ServerCallContextController = void 0; class ServerCallContextController { constructor(method, headers, deadline, sendResponseHeadersFn, defaultStatus = { code: 'OK', detail: '' }) { this._cancelled = false; this._listeners = []; this.method = method; this.headers = headers; this.deadline = deadline; this.trailers = {}; this._sendRH = sendResponseHeadersFn; this.status = defaultStatus; } /** * Set the call cancelled. * * Invokes all callbacks registered with onCancel() and * sets `cancelled = true`. */ notifyCancelled() { if (!this._cancelled) { this._cancelled = true; for (let l of this._listeners) { l(); } } } /** * Send response headers. */ sendResponseHeaders(data) { this._sendRH(data); } /** * Is the call cancelled? * * When the client closes the connection before the server * is done, the call is cancelled. * * If you want to cancel a request on the server, throw a * RpcError with the CANCELLED status code. */ get cancelled() { return this._cancelled; } /** * Add a callback for cancellation. */ onCancel(callback) { const l = this._listeners; l.push(callback); return () => { let i = l.indexOf(callback); if (i >= 0) l.splice(i, 1); }; } } exports.ServerCallContextController = ServerCallContextController; /***/ }), /***/ 46173: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ServerStreamingCall = void 0; /** * A server streaming RPC call. The client provides exactly one input message * but the server may respond with 0, 1, or more messages. */ class ServerStreamingCall { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.request = request; this.headers = headers; this.responses = response; this.status = status; this.trailers = trailers; } /** * Instead of awaiting the response status and trailers, you can * just as well await this call itself to receive the server outcome. * You should first setup some listeners to the `request` to * see the actual messages the server replied with. */ then(onfulfilled, onrejected) { return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter(this, void 0, void 0, function* () { let [headers, status, trailers] = yield Promise.all([this.headers, this.status, this.trailers]); return { method: this.method, requestHeaders: this.requestHeaders, request: this.request, headers, status, trailers, }; }); } } exports.ServerStreamingCall = ServerStreamingCall; /***/ }), /***/ 56892: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ServiceType = void 0; const reflection_info_1 = __nccwpck_require__(62496); class ServiceType { constructor(typeName, methods, options) { this.typeName = typeName; this.methods = methods.map(i => reflection_info_1.normalizeMethodInfo(i, this)); this.options = options !== null && options !== void 0 ? options : {}; } } exports.ServiceType = ServiceType; /***/ }), /***/ 79122: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TestTransport = void 0; const rpc_error_1 = __nccwpck_require__(78636); const runtime_1 = __nccwpck_require__(68886); const rpc_output_stream_1 = __nccwpck_require__(72726); const rpc_options_1 = __nccwpck_require__(28576); const unary_call_1 = __nccwpck_require__(29288); const server_streaming_call_1 = __nccwpck_require__(46173); const client_streaming_call_1 = __nccwpck_require__(37889); const duplex_streaming_call_1 = __nccwpck_require__(36826); /** * Transport for testing. */ class TestTransport { /** * Initialize with mock data. Omitted fields have default value. */ constructor(data) { /** * Suppress warning / error about uncaught rejections of * "status" and "trailers". */ this.suppressUncaughtRejections = true; this.headerDelay = 10; this.responseDelay = 50; this.betweenResponseDelay = 10; this.afterResponseDelay = 10; this.data = data !== null && data !== void 0 ? data : {}; } /** * Sent message(s) during the last operation. */ get sentMessages() { if (this.lastInput instanceof TestInputStream) { return this.lastInput.sent; } else if (typeof this.lastInput == "object") { return [this.lastInput.single]; } return []; } /** * Sending message(s) completed? */ get sendComplete() { if (this.lastInput instanceof TestInputStream) { return this.lastInput.completed; } else if (typeof this.lastInput == "object") { return true; } return false; } // Creates a promise for response headers from the mock data. promiseHeaders() { var _a; const headers = (_a = this.data.headers) !== null && _a !== void 0 ? _a : TestTransport.defaultHeaders; return headers instanceof rpc_error_1.RpcError ? Promise.reject(headers) : Promise.resolve(headers); } // Creates a promise for a single, valid, message from the mock data. promiseSingleResponse(method) { if (this.data.response instanceof rpc_error_1.RpcError) { return Promise.reject(this.data.response); } let r; if (Array.isArray(this.data.response)) { runtime_1.assert(this.data.response.length > 0); r = this.data.response[0]; } else if (this.data.response !== undefined) { r = this.data.response; } else { r = method.O.create(); } runtime_1.assert(method.O.is(r)); return Promise.resolve(r); } /** * Pushes response messages from the mock data to the output stream. * If an error response, status or trailers are mocked, the stream is * closed with the respective error. * Otherwise, stream is completed successfully. * * The returned promise resolves when the stream is closed. It should * not reject. If it does, code is broken. */ streamResponses(method, stream, abort) { return __awaiter(this, void 0, void 0, function* () { // normalize "data.response" into an array of valid output messages const messages = []; if (this.data.response === undefined) { messages.push(method.O.create()); } else if (Array.isArray(this.data.response)) { for (let msg of this.data.response) { runtime_1.assert(method.O.is(msg)); messages.push(msg); } } else if (!(this.data.response instanceof rpc_error_1.RpcError)) { runtime_1.assert(method.O.is(this.data.response)); messages.push(this.data.response); } // start the stream with an initial delay. // if the request is cancelled, notify() error and exit. try { yield delay(this.responseDelay, abort)(undefined); } catch (error) { stream.notifyError(error); return; } // if error response was mocked, notify() error (stream is now closed with error) and exit. if (this.data.response instanceof rpc_error_1.RpcError) { stream.notifyError(this.data.response); return; } // regular response messages were mocked. notify() them. for (let msg of messages) { stream.notifyMessage(msg); // add a short delay between responses // if the request is cancelled, notify() error and exit. try { yield delay(this.betweenResponseDelay, abort)(undefined); } catch (error) { stream.notifyError(error); return; } } // error status was mocked, notify() error (stream is now closed with error) and exit. if (this.data.status instanceof rpc_error_1.RpcError) { stream.notifyError(this.data.status); return; } // error trailers were mocked, notify() error (stream is now closed with error) and exit. if (this.data.trailers instanceof rpc_error_1.RpcError) { stream.notifyError(this.data.trailers); return; } // stream completed successfully stream.notifyComplete(); }); } // Creates a promise for response status from the mock data. promiseStatus() { var _a; const status = (_a = this.data.status) !== null && _a !== void 0 ? _a : TestTransport.defaultStatus; return status instanceof rpc_error_1.RpcError ? Promise.reject(status) : Promise.resolve(status); } // Creates a promise for response trailers from the mock data. promiseTrailers() { var _a; const trailers = (_a = this.data.trailers) !== null && _a !== void 0 ? _a : TestTransport.defaultTrailers; return trailers instanceof rpc_error_1.RpcError ? Promise.reject(trailers) : Promise.resolve(trailers); } maybeSuppressUncaught(...promise) { if (this.suppressUncaughtRejections) { for (let p of promise) { p.catch(() => { }); } } } mergeOptions(options) { return rpc_options_1.mergeRpcOptions({}, options); } unary(method, input, options) { var _a; const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise .catch(_ => { }) .then(delay(this.responseDelay, options.abort)) .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise .catch(_ => { }) .then(delay(this.afterResponseDelay, options.abort)) .then(_ => this.promiseStatus()), trailersPromise = responsePromise .catch(_ => { }) .then(delay(this.afterResponseDelay, options.abort)) .then(_ => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = { single: input }; return new unary_call_1.UnaryCall(method, requestHeaders, input, headersPromise, responsePromise, statusPromise, trailersPromise); } serverStreaming(method, input, options) { var _a; const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise .then(delay(this.responseDelay, options.abort)) .catch(() => { }) .then(() => this.streamResponses(method, outputStream, options.abort)) .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise .then(() => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = { single: input }; return new server_streaming_call_1.ServerStreamingCall(method, requestHeaders, input, headersPromise, outputStream, statusPromise, trailersPromise); } clientStreaming(method, options) { var _a; const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() .then(delay(this.headerDelay, options.abort)), responsePromise = headersPromise .catch(_ => { }) .then(delay(this.responseDelay, options.abort)) .then(_ => this.promiseSingleResponse(method)), statusPromise = responsePromise .catch(_ => { }) .then(delay(this.afterResponseDelay, options.abort)) .then(_ => this.promiseStatus()), trailersPromise = responsePromise .catch(_ => { }) .then(delay(this.afterResponseDelay, options.abort)) .then(_ => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = new TestInputStream(this.data, options.abort); return new client_streaming_call_1.ClientStreamingCall(method, requestHeaders, this.lastInput, headersPromise, responsePromise, statusPromise, trailersPromise); } duplex(method, options) { var _a; const requestHeaders = (_a = options.meta) !== null && _a !== void 0 ? _a : {}, headersPromise = this.promiseHeaders() .then(delay(this.headerDelay, options.abort)), outputStream = new rpc_output_stream_1.RpcOutputStreamController(), responseStreamClosedPromise = headersPromise .then(delay(this.responseDelay, options.abort)) .catch(() => { }) .then(() => this.streamResponses(method, outputStream, options.abort)) .then(delay(this.afterResponseDelay, options.abort)), statusPromise = responseStreamClosedPromise .then(() => this.promiseStatus()), trailersPromise = responseStreamClosedPromise .then(() => this.promiseTrailers()); this.maybeSuppressUncaught(statusPromise, trailersPromise); this.lastInput = new TestInputStream(this.data, options.abort); return new duplex_streaming_call_1.DuplexStreamingCall(method, requestHeaders, this.lastInput, headersPromise, outputStream, statusPromise, trailersPromise); } } exports.TestTransport = TestTransport; TestTransport.defaultHeaders = { responseHeader: "test" }; TestTransport.defaultStatus = { code: "OK", detail: "all good" }; TestTransport.defaultTrailers = { responseTrailer: "test" }; function delay(ms, abort) { return (v) => new Promise((resolve, reject) => { if (abort === null || abort === void 0 ? void 0 : abort.aborted) { reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); } else { const id = setTimeout(() => resolve(v), ms); if (abort) { abort.addEventListener("abort", ev => { clearTimeout(id); reject(new rpc_error_1.RpcError("user cancel", "CANCELLED")); }); } } }); } class TestInputStream { constructor(data, abort) { this._completed = false; this._sent = []; this.data = data; this.abort = abort; } get sent() { return this._sent; } get completed() { return this._completed; } send(message) { if (this.data.inputMessage instanceof rpc_error_1.RpcError) { return Promise.reject(this.data.inputMessage); } const delayMs = this.data.inputMessage === undefined ? 10 : this.data.inputMessage; return Promise.resolve(undefined) .then(() => { this._sent.push(message); }) .then(delay(delayMs, this.abort)); } complete() { if (this.data.inputComplete instanceof rpc_error_1.RpcError) { return Promise.reject(this.data.inputComplete); } const delayMs = this.data.inputComplete === undefined ? 10 : this.data.inputComplete; return Promise.resolve(undefined) .then(() => { this._completed = true; }) .then(delay(delayMs, this.abort)); } } /***/ }), /***/ 29288: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.UnaryCall = void 0; /** * A unary RPC call. Unary means there is exactly one input message and * exactly one output message unless an error occurred. */ class UnaryCall { constructor(method, requestHeaders, request, headers, response, status, trailers) { this.method = method; this.requestHeaders = requestHeaders; this.request = request; this.headers = headers; this.response = response; this.status = status; this.trailers = trailers; } /** * If you are only interested in the final outcome of this call, * you can await it to receive a `FinishedUnaryCall`. */ then(onfulfilled, onrejected) { return this.promiseFinished().then(value => onfulfilled ? Promise.resolve(onfulfilled(value)) : value, reason => onrejected ? Promise.resolve(onrejected(reason)) : Promise.reject(reason)); } promiseFinished() { return __awaiter(this, void 0, void 0, function* () { let [headers, response, status, trailers] = yield Promise.all([this.headers, this.response, this.status, this.trailers]); return { method: this.method, requestHeaders: this.requestHeaders, request: this.request, headers, response, status, trailers }; }); } } exports.UnaryCall = UnaryCall; /***/ }), /***/ 8602: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.assertFloat32 = exports.assertUInt32 = exports.assertInt32 = exports.assertNever = exports.assert = void 0; /** * assert that condition is true or throw error (with message) */ function assert(condition, msg) { if (!condition) { throw new Error(msg); } } exports.assert = assert; /** * assert that value cannot exist = type `never`. throw runtime error if it does. */ function assertNever(value, msg) { throw new Error(msg !== null && msg !== void 0 ? msg : 'Unexpected object: ' + value); } exports.assertNever = assertNever; const FLOAT32_MAX = 3.4028234663852886e+38, FLOAT32_MIN = -3.4028234663852886e+38, UINT32_MAX = 0xFFFFFFFF, INT32_MAX = 0X7FFFFFFF, INT32_MIN = -0X80000000; function assertInt32(arg) { if (typeof arg !== "number") throw new Error('invalid int 32: ' + typeof arg); if (!Number.isInteger(arg) || arg > INT32_MAX || arg < INT32_MIN) throw new Error('invalid int 32: ' + arg); } exports.assertInt32 = assertInt32; function assertUInt32(arg) { if (typeof arg !== "number") throw new Error('invalid uint 32: ' + typeof arg); if (!Number.isInteger(arg) || arg > UINT32_MAX || arg < 0) throw new Error('invalid uint 32: ' + arg); } exports.assertUInt32 = assertUInt32; function assertFloat32(arg) { if (typeof arg !== "number") throw new Error('invalid float 32: ' + typeof arg); if (!Number.isFinite(arg)) return; if (arg > FLOAT32_MAX || arg < FLOAT32_MIN) throw new Error('invalid float 32: ' + arg); } exports.assertFloat32 = assertFloat32; /***/ }), /***/ 26335: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.base64encode = exports.base64decode = void 0; // lookup table from base64 character to byte let encTable = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/'.split(''); // lookup table from base64 character *code* to byte because lookup by number is fast let decTable = []; for (let i = 0; i < encTable.length; i++) decTable[encTable[i].charCodeAt(0)] = i; // support base64url variants decTable["-".charCodeAt(0)] = encTable.indexOf("+"); decTable["_".charCodeAt(0)] = encTable.indexOf("/"); /** * Decodes a base64 string to a byte array. * * - ignores white-space, including line breaks and tabs * - allows inner padding (can decode concatenated base64 strings) * - does not require padding * - understands base64url encoding: * "-" instead of "+", * "_" instead of "/", * no padding */ function base64decode(base64Str) { // estimate byte size, not accounting for inner padding and whitespace let es = base64Str.length * 3 / 4; // if (es % 3 !== 0) // throw new Error('invalid base64 string'); if (base64Str[base64Str.length - 2] == '=') es -= 2; else if (base64Str[base64Str.length - 1] == '=') es -= 1; let bytes = new Uint8Array(es), bytePos = 0, // position in byte array groupPos = 0, // position in base64 group b, // current byte p = 0 // previous byte ; for (let i = 0; i < base64Str.length; i++) { b = decTable[base64Str.charCodeAt(i)]; if (b === undefined) { // noinspection FallThroughInSwitchStatementJS switch (base64Str[i]) { case '=': groupPos = 0; // reset state when padding found case '\n': case '\r': case '\t': case ' ': continue; // skip white-space, and padding default: throw Error(`invalid base64 string.`); } } switch (groupPos) { case 0: p = b; groupPos = 1; break; case 1: bytes[bytePos++] = p << 2 | (b & 48) >> 4; p = b; groupPos = 2; break; case 2: bytes[bytePos++] = (p & 15) << 4 | (b & 60) >> 2; p = b; groupPos = 3; break; case 3: bytes[bytePos++] = (p & 3) << 6 | b; groupPos = 0; break; } } if (groupPos == 1) throw Error(`invalid base64 string.`); return bytes.subarray(0, bytePos); } exports.base64decode = base64decode; /** * Encodes a byte array to a base64 string. * Adds padding at the end. * Does not insert newlines. */ function base64encode(bytes) { let base64 = '', groupPos = 0, // position in base64 group b, // current byte p = 0; // carry over from previous byte for (let i = 0; i < bytes.length; i++) { b = bytes[i]; switch (groupPos) { case 0: base64 += encTable[b >> 2]; p = (b & 3) << 4; groupPos = 1; break; case 1: base64 += encTable[p | b >> 4]; p = (b & 15) << 2; groupPos = 2; break; case 2: base64 += encTable[p | b >> 6]; base64 += encTable[b & 63]; groupPos = 0; break; } } // padding required? if (groupPos) { base64 += encTable[p]; base64 += '='; if (groupPos == 1) base64 += '='; } return base64; } exports.base64encode = base64encode; /***/ }), /***/ 54816: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.WireType = exports.mergeBinaryOptions = exports.UnknownFieldHandler = void 0; /** * This handler implements the default behaviour for unknown fields. * When reading data, unknown fields are stored on the message, in a * symbol property. * When writing data, the symbol property is queried and unknown fields * are serialized into the output again. */ var UnknownFieldHandler; (function (UnknownFieldHandler) { /** * The symbol used to store unknown fields for a message. * The property must conform to `UnknownFieldContainer`. */ UnknownFieldHandler.symbol = Symbol.for("protobuf-ts/unknown"); /** * Store an unknown field during binary read directly on the message. * This method is compatible with `BinaryReadOptions.readUnknownField`. */ UnknownFieldHandler.onRead = (typeName, message, fieldNo, wireType, data) => { let container = is(message) ? message[UnknownFieldHandler.symbol] : message[UnknownFieldHandler.symbol] = []; container.push({ no: fieldNo, wireType, data }); }; /** * Write unknown fields stored for the message to the writer. * This method is compatible with `BinaryWriteOptions.writeUnknownFields`. */ UnknownFieldHandler.onWrite = (typeName, message, writer) => { for (let { no, wireType, data } of UnknownFieldHandler.list(message)) writer.tag(no, wireType).raw(data); }; /** * List unknown fields stored for the message. * Note that there may be multiples fields with the same number. */ UnknownFieldHandler.list = (message, fieldNo) => { if (is(message)) { let all = message[UnknownFieldHandler.symbol]; return fieldNo ? all.filter(uf => uf.no == fieldNo) : all; } return []; }; /** * Returns the last unknown field by field number. */ UnknownFieldHandler.last = (message, fieldNo) => UnknownFieldHandler.list(message, fieldNo).slice(-1)[0]; const is = (message) => message && Array.isArray(message[UnknownFieldHandler.symbol]); })(UnknownFieldHandler = exports.UnknownFieldHandler || (exports.UnknownFieldHandler = {})); /** * Merges binary write or read options. Later values override earlier values. */ function mergeBinaryOptions(a, b) { return Object.assign(Object.assign({}, a), b); } exports.mergeBinaryOptions = mergeBinaryOptions; /** * Protobuf binary format wire types. * * A wire type provides just enough information to find the length of the * following value. * * See https://developers.google.com/protocol-buffers/docs/encoding#structure */ var WireType; (function (WireType) { /** * Used for int32, int64, uint32, uint64, sint32, sint64, bool, enum */ WireType[WireType["Varint"] = 0] = "Varint"; /** * Used for fixed64, sfixed64, double. * Always 8 bytes with little-endian byte order. */ WireType[WireType["Bit64"] = 1] = "Bit64"; /** * Used for string, bytes, embedded messages, packed repeated fields * * Only repeated numeric types (types which use the varint, 32-bit, * or 64-bit wire types) can be packed. In proto3, such fields are * packed by default. */ WireType[WireType["LengthDelimited"] = 2] = "LengthDelimited"; /** * Used for groups * @deprecated */ WireType[WireType["StartGroup"] = 3] = "StartGroup"; /** * Used for groups * @deprecated */ WireType[WireType["EndGroup"] = 4] = "EndGroup"; /** * Used for fixed32, sfixed32, float. * Always 4 bytes with little-endian byte order. */ WireType[WireType["Bit32"] = 5] = "Bit32"; })(WireType = exports.WireType || (exports.WireType = {})); /***/ }), /***/ 92889: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.BinaryReader = exports.binaryReadOptions = void 0; const binary_format_contract_1 = __nccwpck_require__(54816); const pb_long_1 = __nccwpck_require__(61753); const goog_varint_1 = __nccwpck_require__(93223); const defaultsRead = { readUnknownField: true, readerFactory: bytes => new BinaryReader(bytes), }; /** * Make options for reading binary data form partial options. */ function binaryReadOptions(options) { return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } exports.binaryReadOptions = binaryReadOptions; class BinaryReader { constructor(buf, textDecoder) { this.varint64 = goog_varint_1.varint64read; // dirty cast for `this` /** * Read a `uint32` field, an unsigned 32 bit varint. */ this.uint32 = goog_varint_1.varint32read; // dirty cast for `this` and access to protected `buf` this.buf = buf; this.len = buf.length; this.pos = 0; this.view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength); this.textDecoder = textDecoder !== null && textDecoder !== void 0 ? textDecoder : new TextDecoder("utf-8", { fatal: true, ignoreBOM: true, }); } /** * Reads a tag - field number and wire type. */ tag() { let tag = this.uint32(), fieldNo = tag >>> 3, wireType = tag & 7; if (fieldNo <= 0 || wireType < 0 || wireType > 5) throw new Error("illegal tag: field no " + fieldNo + " wire type " + wireType); return [fieldNo, wireType]; } /** * Skip one element on the wire and return the skipped data. * Supports WireType.StartGroup since v2.0.0-alpha.23. */ skip(wireType) { let start = this.pos; // noinspection FallThroughInSwitchStatementJS switch (wireType) { case binary_format_contract_1.WireType.Varint: while (this.buf[this.pos++] & 0x80) { // ignore } break; case binary_format_contract_1.WireType.Bit64: this.pos += 4; case binary_format_contract_1.WireType.Bit32: this.pos += 4; break; case binary_format_contract_1.WireType.LengthDelimited: let len = this.uint32(); this.pos += len; break; case binary_format_contract_1.WireType.StartGroup: // From descriptor.proto: Group type is deprecated, not supported in proto3. // But we must still be able to parse and treat as unknown. let t; while ((t = this.tag()[1]) !== binary_format_contract_1.WireType.EndGroup) { this.skip(t); } break; default: throw new Error("cant skip wire type " + wireType); } this.assertBounds(); return this.buf.subarray(start, this.pos); } /** * Throws error if position in byte array is out of range. */ assertBounds() { if (this.pos > this.len) throw new RangeError("premature EOF"); } /** * Read a `int32` field, a signed 32 bit varint. */ int32() { return this.uint32() | 0; } /** * Read a `sint32` field, a signed, zigzag-encoded 32-bit varint. */ sint32() { let zze = this.uint32(); // decode zigzag return (zze >>> 1) ^ -(zze & 1); } /** * Read a `int64` field, a signed 64-bit varint. */ int64() { return new pb_long_1.PbLong(...this.varint64()); } /** * Read a `uint64` field, an unsigned 64-bit varint. */ uint64() { return new pb_long_1.PbULong(...this.varint64()); } /** * Read a `sint64` field, a signed, zig-zag-encoded 64-bit varint. */ sint64() { let [lo, hi] = this.varint64(); // decode zig zag let s = -(lo & 1); lo = ((lo >>> 1 | (hi & 1) << 31) ^ s); hi = (hi >>> 1 ^ s); return new pb_long_1.PbLong(lo, hi); } /** * Read a `bool` field, a variant. */ bool() { let [lo, hi] = this.varint64(); return lo !== 0 || hi !== 0; } /** * Read a `fixed32` field, an unsigned, fixed-length 32-bit integer. */ fixed32() { return this.view.getUint32((this.pos += 4) - 4, true); } /** * Read a `sfixed32` field, a signed, fixed-length 32-bit integer. */ sfixed32() { return this.view.getInt32((this.pos += 4) - 4, true); } /** * Read a `fixed64` field, an unsigned, fixed-length 64 bit integer. */ fixed64() { return new pb_long_1.PbULong(this.sfixed32(), this.sfixed32()); } /** * Read a `fixed64` field, a signed, fixed-length 64-bit integer. */ sfixed64() { return new pb_long_1.PbLong(this.sfixed32(), this.sfixed32()); } /** * Read a `float` field, 32-bit floating point number. */ float() { return this.view.getFloat32((this.pos += 4) - 4, true); } /** * Read a `double` field, a 64-bit floating point number. */ double() { return this.view.getFloat64((this.pos += 8) - 8, true); } /** * Read a `bytes` field, length-delimited arbitrary data. */ bytes() { let len = this.uint32(); let start = this.pos; this.pos += len; this.assertBounds(); return this.buf.subarray(start, start + len); } /** * Read a `string` field, length-delimited data converted to UTF-8 text. */ string() { return this.textDecoder.decode(this.bytes()); } } exports.BinaryReader = BinaryReader; /***/ }), /***/ 23957: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.BinaryWriter = exports.binaryWriteOptions = void 0; const pb_long_1 = __nccwpck_require__(61753); const goog_varint_1 = __nccwpck_require__(93223); const assert_1 = __nccwpck_require__(8602); const defaultsWrite = { writeUnknownFields: true, writerFactory: () => new BinaryWriter(), }; /** * Make options for writing binary data form partial options. */ function binaryWriteOptions(options) { return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } exports.binaryWriteOptions = binaryWriteOptions; class BinaryWriter { constructor(textEncoder) { /** * Previous fork states. */ this.stack = []; this.textEncoder = textEncoder !== null && textEncoder !== void 0 ? textEncoder : new TextEncoder(); this.chunks = []; this.buf = []; } /** * Return all bytes written and reset this writer. */ finish() { this.chunks.push(new Uint8Array(this.buf)); // flush the buffer let len = 0; for (let i = 0; i < this.chunks.length; i++) len += this.chunks[i].length; let bytes = new Uint8Array(len); let offset = 0; for (let i = 0; i < this.chunks.length; i++) { bytes.set(this.chunks[i], offset); offset += this.chunks[i].length; } this.chunks = []; return bytes; } /** * Start a new fork for length-delimited data like a message * or a packed repeated field. * * Must be joined later with `join()`. */ fork() { this.stack.push({ chunks: this.chunks, buf: this.buf }); this.chunks = []; this.buf = []; return this; } /** * Join the last fork. Write its length and bytes, then * return to the previous state. */ join() { // get chunk of fork let chunk = this.finish(); // restore previous state let prev = this.stack.pop(); if (!prev) throw new Error('invalid state, fork stack empty'); this.chunks = prev.chunks; this.buf = prev.buf; // write length of chunk as varint this.uint32(chunk.byteLength); return this.raw(chunk); } /** * Writes a tag (field number and wire type). * * Equivalent to `uint32( (fieldNo << 3 | type) >>> 0 )`. * * Generated code should compute the tag ahead of time and call `uint32()`. */ tag(fieldNo, type) { return this.uint32((fieldNo << 3 | type) >>> 0); } /** * Write a chunk of raw bytes. */ raw(chunk) { if (this.buf.length) { this.chunks.push(new Uint8Array(this.buf)); this.buf = []; } this.chunks.push(chunk); return this; } /** * Write a `uint32` value, an unsigned 32 bit varint. */ uint32(value) { assert_1.assertUInt32(value); // write value as varint 32, inlined for speed while (value > 0x7f) { this.buf.push((value & 0x7f) | 0x80); value = value >>> 7; } this.buf.push(value); return this; } /** * Write a `int32` value, a signed 32 bit varint. */ int32(value) { assert_1.assertInt32(value); goog_varint_1.varint32write(value, this.buf); return this; } /** * Write a `bool` value, a variant. */ bool(value) { this.buf.push(value ? 1 : 0); return this; } /** * Write a `bytes` value, length-delimited arbitrary data. */ bytes(value) { this.uint32(value.byteLength); // write length of chunk as varint return this.raw(value); } /** * Write a `string` value, length-delimited data converted to UTF-8 text. */ string(value) { let chunk = this.textEncoder.encode(value); this.uint32(chunk.byteLength); // write length of chunk as varint return this.raw(chunk); } /** * Write a `float` value, 32-bit floating point number. */ float(value) { assert_1.assertFloat32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setFloat32(0, value, true); return this.raw(chunk); } /** * Write a `double` value, a 64-bit floating point number. */ double(value) { let chunk = new Uint8Array(8); new DataView(chunk.buffer).setFloat64(0, value, true); return this.raw(chunk); } /** * Write a `fixed32` value, an unsigned, fixed-length 32-bit integer. */ fixed32(value) { assert_1.assertUInt32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setUint32(0, value, true); return this.raw(chunk); } /** * Write a `sfixed32` value, a signed, fixed-length 32-bit integer. */ sfixed32(value) { assert_1.assertInt32(value); let chunk = new Uint8Array(4); new DataView(chunk.buffer).setInt32(0, value, true); return this.raw(chunk); } /** * Write a `sint32` value, a signed, zigzag-encoded 32-bit varint. */ sint32(value) { assert_1.assertInt32(value); // zigzag encode value = ((value << 1) ^ (value >> 31)) >>> 0; goog_varint_1.varint32write(value, this.buf); return this; } /** * Write a `fixed64` value, a signed, fixed-length 64-bit integer. */ sfixed64(value) { let chunk = new Uint8Array(8); let view = new DataView(chunk.buffer); let long = pb_long_1.PbLong.from(value); view.setInt32(0, long.lo, true); view.setInt32(4, long.hi, true); return this.raw(chunk); } /** * Write a `fixed64` value, an unsigned, fixed-length 64 bit integer. */ fixed64(value) { let chunk = new Uint8Array(8); let view = new DataView(chunk.buffer); let long = pb_long_1.PbULong.from(value); view.setInt32(0, long.lo, true); view.setInt32(4, long.hi, true); return this.raw(chunk); } /** * Write a `int64` value, a signed 64-bit varint. */ int64(value) { let long = pb_long_1.PbLong.from(value); goog_varint_1.varint64write(long.lo, long.hi, this.buf); return this; } /** * Write a `sint64` value, a signed, zig-zag-encoded 64-bit varint. */ sint64(value) { let long = pb_long_1.PbLong.from(value), // zigzag encode sign = long.hi >> 31, lo = (long.lo << 1) ^ sign, hi = ((long.hi << 1) | (long.lo >>> 31)) ^ sign; goog_varint_1.varint64write(lo, hi, this.buf); return this; } /** * Write a `uint64` value, an unsigned 64-bit varint. */ uint64(value) { let long = pb_long_1.PbULong.from(value); goog_varint_1.varint64write(long.lo, long.hi, this.buf); return this; } } exports.BinaryWriter = BinaryWriter; /***/ }), /***/ 70257: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.listEnumNumbers = exports.listEnumNames = exports.listEnumValues = exports.isEnumObject = void 0; /** * Is this a lookup object generated by Typescript, for a Typescript enum * generated by protobuf-ts? * * - No `const enum` (enum must not be inlined, we need reverse mapping). * - No string enum (we need int32 for protobuf). * - Must have a value for 0 (otherwise, we would need to support custom default values). */ function isEnumObject(arg) { if (typeof arg != 'object' || arg === null) { return false; } if (!arg.hasOwnProperty(0)) { return false; } for (let k of Object.keys(arg)) { let num = parseInt(k); if (!Number.isNaN(num)) { // is there a name for the number? let nam = arg[num]; if (nam === undefined) return false; // does the name resolve back to the number? if (arg[nam] !== num) return false; } else { // is there a number for the name? let num = arg[k]; if (num === undefined) return false; // is it a string enum? if (typeof num !== 'number') return false; // do we know the number? if (arg[num] === undefined) return false; } } return true; } exports.isEnumObject = isEnumObject; /** * Lists all values of a Typescript enum, as an array of objects with a "name" * property and a "number" property. * * Note that it is possible that a number appears more than once, because it is * possible to have aliases in an enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumValues(enumObject) { if (!isEnumObject(enumObject)) throw new Error("not a typescript enum object"); let values = []; for (let [name, number] of Object.entries(enumObject)) if (typeof number == "number") values.push({ name, number }); return values; } exports.listEnumValues = listEnumValues; /** * Lists the names of a Typescript enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumNames(enumObject) { return listEnumValues(enumObject).map(val => val.name); } exports.listEnumNames = listEnumNames; /** * Lists the numbers of a Typescript enum. * * Throws if the enum does not adhere to the rules of enums generated by * protobuf-ts. See `isEnumObject()`. */ function listEnumNumbers(enumObject) { return listEnumValues(enumObject) .map(val => val.number) .filter((num, index, arr) => arr.indexOf(num) == index); } exports.listEnumNumbers = listEnumNumbers; /***/ }), /***/ 93223: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2008 Google Inc. All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above // copyright notice, this list of conditions and the following disclaimer // in the documentation and/or other materials provided with the // distribution. // * Neither the name of Google Inc. nor the names of its // contributors may be used to endorse or promote products derived from // this software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. // // Code generated by the Protocol Buffer compiler is owned by the owner // of the input file used when generating it. This code is not // standalone and requires a support library to be linked with it. This // support library is itself covered by the above license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.varint32read = exports.varint32write = exports.int64toString = exports.int64fromString = exports.varint64write = exports.varint64read = void 0; /** * Read a 64 bit varint as two JS numbers. * * Returns tuple: * [0]: low bits * [0]: high bits * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L175 */ function varint64read() { let lowBits = 0; let highBits = 0; for (let shift = 0; shift < 28; shift += 7) { let b = this.buf[this.pos++]; lowBits |= (b & 0x7F) << shift; if ((b & 0x80) == 0) { this.assertBounds(); return [lowBits, highBits]; } } let middleByte = this.buf[this.pos++]; // last four bits of the first 32 bit number lowBits |= (middleByte & 0x0F) << 28; // 3 upper bits are part of the next 32 bit number highBits = (middleByte & 0x70) >> 4; if ((middleByte & 0x80) == 0) { this.assertBounds(); return [lowBits, highBits]; } for (let shift = 3; shift <= 31; shift += 7) { let b = this.buf[this.pos++]; highBits |= (b & 0x7F) << shift; if ((b & 0x80) == 0) { this.assertBounds(); return [lowBits, highBits]; } } throw new Error('invalid varint'); } exports.varint64read = varint64read; /** * Write a 64 bit varint, given as two JS numbers, to the given bytes array. * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/writer.js#L344 */ function varint64write(lo, hi, bytes) { for (let i = 0; i < 28; i = i + 7) { const shift = lo >>> i; const hasNext = !((shift >>> 7) == 0 && hi == 0); const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; bytes.push(byte); if (!hasNext) { return; } } const splitBits = ((lo >>> 28) & 0x0F) | ((hi & 0x07) << 4); const hasMoreBits = !((hi >> 3) == 0); bytes.push((hasMoreBits ? splitBits | 0x80 : splitBits) & 0xFF); if (!hasMoreBits) { return; } for (let i = 3; i < 31; i = i + 7) { const shift = hi >>> i; const hasNext = !((shift >>> 7) == 0); const byte = (hasNext ? shift | 0x80 : shift) & 0xFF; bytes.push(byte); if (!hasNext) { return; } } bytes.push((hi >>> 31) & 0x01); } exports.varint64write = varint64write; // constants for binary math const TWO_PWR_32_DBL = (1 << 16) * (1 << 16); /** * Parse decimal string of 64 bit integer value as two JS numbers. * * Returns tuple: * [0]: minus sign? * [1]: low bits * [2]: high bits * * Copyright 2008 Google Inc. */ function int64fromString(dec) { // Check for minus sign. let minus = dec[0] == '-'; if (minus) dec = dec.slice(1); // Work 6 decimal digits at a time, acting like we're converting base 1e6 // digits to binary. This is safe to do with floating point math because // Number.isSafeInteger(ALL_32_BITS * 1e6) == true. const base = 1e6; let lowBits = 0; let highBits = 0; function add1e6digit(begin, end) { // Note: Number('') is 0. const digit1e6 = Number(dec.slice(begin, end)); highBits *= base; lowBits = lowBits * base + digit1e6; // Carry bits from lowBits to highBits if (lowBits >= TWO_PWR_32_DBL) { highBits = highBits + ((lowBits / TWO_PWR_32_DBL) | 0); lowBits = lowBits % TWO_PWR_32_DBL; } } add1e6digit(-24, -18); add1e6digit(-18, -12); add1e6digit(-12, -6); add1e6digit(-6); return [minus, lowBits, highBits]; } exports.int64fromString = int64fromString; /** * Format 64 bit integer value (as two JS numbers) to decimal string. * * Copyright 2008 Google Inc. */ function int64toString(bitsLow, bitsHigh) { // Skip the expensive conversion if the number is small enough to use the // built-in conversions. if ((bitsHigh >>> 0) <= 0x1FFFFF) { return '' + (TWO_PWR_32_DBL * bitsHigh + (bitsLow >>> 0)); } // What this code is doing is essentially converting the input number from // base-2 to base-1e7, which allows us to represent the 64-bit range with // only 3 (very large) digits. Those digits are then trivial to convert to // a base-10 string. // The magic numbers used here are - // 2^24 = 16777216 = (1,6777216) in base-1e7. // 2^48 = 281474976710656 = (2,8147497,6710656) in base-1e7. // Split 32:32 representation into 16:24:24 representation so our // intermediate digits don't overflow. let low = bitsLow & 0xFFFFFF; let mid = (((bitsLow >>> 24) | (bitsHigh << 8)) >>> 0) & 0xFFFFFF; let high = (bitsHigh >> 16) & 0xFFFF; // Assemble our three base-1e7 digits, ignoring carries. The maximum // value in a digit at this step is representable as a 48-bit integer, which // can be stored in a 64-bit floating point number. let digitA = low + (mid * 6777216) + (high * 6710656); let digitB = mid + (high * 8147497); let digitC = (high * 2); // Apply carries from A to B and from B to C. let base = 10000000; if (digitA >= base) { digitB += Math.floor(digitA / base); digitA %= base; } if (digitB >= base) { digitC += Math.floor(digitB / base); digitB %= base; } // Convert base-1e7 digits to base-10, with optional leading zeroes. function decimalFrom1e7(digit1e7, needLeadingZeros) { let partial = digit1e7 ? String(digit1e7) : ''; if (needLeadingZeros) { return '0000000'.slice(partial.length) + partial; } return partial; } return decimalFrom1e7(digitC, /*needLeadingZeros=*/ 0) + decimalFrom1e7(digitB, /*needLeadingZeros=*/ digitC) + // If the final 1e7 digit didn't need leading zeros, we would have // returned via the trivial code path at the top. decimalFrom1e7(digitA, /*needLeadingZeros=*/ 1); } exports.int64toString = int64toString; /** * Write a 32 bit varint, signed or unsigned. Same as `varint64write(0, value, bytes)` * * Copyright 2008 Google Inc. All rights reserved. * * See https://github.com/protocolbuffers/protobuf/blob/1b18833f4f2a2f681f4e4a25cdf3b0a43115ec26/js/binary/encoder.js#L144 */ function varint32write(value, bytes) { if (value >= 0) { // write value as varint 32 while (value > 0x7f) { bytes.push((value & 0x7f) | 0x80); value = value >>> 7; } bytes.push(value); } else { for (let i = 0; i < 9; i++) { bytes.push(value & 127 | 128); value = value >> 7; } bytes.push(1); } } exports.varint32write = varint32write; /** * Read an unsigned 32 bit varint. * * See https://github.com/protocolbuffers/protobuf/blob/8a71927d74a4ce34efe2d8769fda198f52d20d12/js/experimental/runtime/kernel/buffer_decoder.js#L220 */ function varint32read() { let b = this.buf[this.pos++]; let result = b & 0x7F; if ((b & 0x80) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 0x7F) << 7; if ((b & 0x80) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 0x7F) << 14; if ((b & 0x80) == 0) { this.assertBounds(); return result; } b = this.buf[this.pos++]; result |= (b & 0x7F) << 21; if ((b & 0x80) == 0) { this.assertBounds(); return result; } // Extract only last 4 bits b = this.buf[this.pos++]; result |= (b & 0x0F) << 28; for (let readBytes = 5; ((b & 0x80) !== 0) && readBytes < 10; readBytes++) b = this.buf[this.pos++]; if ((b & 0x80) != 0) throw new Error('invalid varint'); this.assertBounds(); // Result can have 32 bits, convert it to unsigned return result >>> 0; } exports.varint32read = varint32read; /***/ }), /***/ 68886: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Public API of the protobuf-ts runtime. // Note: we do not use `export * from ...` to help tree shakers, // webpack verbose output hints that this should be useful Object.defineProperty(exports, "__esModule", ({ value: true })); // Convenience JSON typings and corresponding type guards var json_typings_1 = __nccwpck_require__(49999); Object.defineProperty(exports, "typeofJsonValue", ({ enumerable: true, get: function () { return json_typings_1.typeofJsonValue; } })); Object.defineProperty(exports, "isJsonObject", ({ enumerable: true, get: function () { return json_typings_1.isJsonObject; } })); // Base 64 encoding var base64_1 = __nccwpck_require__(26335); Object.defineProperty(exports, "base64decode", ({ enumerable: true, get: function () { return base64_1.base64decode; } })); Object.defineProperty(exports, "base64encode", ({ enumerable: true, get: function () { return base64_1.base64encode; } })); // UTF8 encoding var protobufjs_utf8_1 = __nccwpck_require__(58950); Object.defineProperty(exports, "utf8read", ({ enumerable: true, get: function () { return protobufjs_utf8_1.utf8read; } })); // Binary format contracts, options for reading and writing, for example var binary_format_contract_1 = __nccwpck_require__(54816); Object.defineProperty(exports, "WireType", ({ enumerable: true, get: function () { return binary_format_contract_1.WireType; } })); Object.defineProperty(exports, "mergeBinaryOptions", ({ enumerable: true, get: function () { return binary_format_contract_1.mergeBinaryOptions; } })); Object.defineProperty(exports, "UnknownFieldHandler", ({ enumerable: true, get: function () { return binary_format_contract_1.UnknownFieldHandler; } })); // Standard IBinaryReader implementation var binary_reader_1 = __nccwpck_require__(92889); Object.defineProperty(exports, "BinaryReader", ({ enumerable: true, get: function () { return binary_reader_1.BinaryReader; } })); Object.defineProperty(exports, "binaryReadOptions", ({ enumerable: true, get: function () { return binary_reader_1.binaryReadOptions; } })); // Standard IBinaryWriter implementation var binary_writer_1 = __nccwpck_require__(23957); Object.defineProperty(exports, "BinaryWriter", ({ enumerable: true, get: function () { return binary_writer_1.BinaryWriter; } })); Object.defineProperty(exports, "binaryWriteOptions", ({ enumerable: true, get: function () { return binary_writer_1.binaryWriteOptions; } })); // Int64 and UInt64 implementations required for the binary format var pb_long_1 = __nccwpck_require__(61753); Object.defineProperty(exports, "PbLong", ({ enumerable: true, get: function () { return pb_long_1.PbLong; } })); Object.defineProperty(exports, "PbULong", ({ enumerable: true, get: function () { return pb_long_1.PbULong; } })); // JSON format contracts, options for reading and writing, for example var json_format_contract_1 = __nccwpck_require__(29367); Object.defineProperty(exports, "jsonReadOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonReadOptions; } })); Object.defineProperty(exports, "jsonWriteOptions", ({ enumerable: true, get: function () { return json_format_contract_1.jsonWriteOptions; } })); Object.defineProperty(exports, "mergeJsonOptions", ({ enumerable: true, get: function () { return json_format_contract_1.mergeJsonOptions; } })); // Message type contract var message_type_contract_1 = __nccwpck_require__(43785); Object.defineProperty(exports, "MESSAGE_TYPE", ({ enumerable: true, get: function () { return message_type_contract_1.MESSAGE_TYPE; } })); // Message type implementation via reflection var message_type_1 = __nccwpck_require__(15106); Object.defineProperty(exports, "MessageType", ({ enumerable: true, get: function () { return message_type_1.MessageType; } })); // Reflection info, generated by the plugin, exposed to the user, used by reflection ops var reflection_info_1 = __nccwpck_require__(67910); Object.defineProperty(exports, "ScalarType", ({ enumerable: true, get: function () { return reflection_info_1.ScalarType; } })); Object.defineProperty(exports, "LongType", ({ enumerable: true, get: function () { return reflection_info_1.LongType; } })); Object.defineProperty(exports, "RepeatType", ({ enumerable: true, get: function () { return reflection_info_1.RepeatType; } })); Object.defineProperty(exports, "normalizeFieldInfo", ({ enumerable: true, get: function () { return reflection_info_1.normalizeFieldInfo; } })); Object.defineProperty(exports, "readFieldOptions", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOptions; } })); Object.defineProperty(exports, "readFieldOption", ({ enumerable: true, get: function () { return reflection_info_1.readFieldOption; } })); Object.defineProperty(exports, "readMessageOption", ({ enumerable: true, get: function () { return reflection_info_1.readMessageOption; } })); // Message operations via reflection var reflection_type_check_1 = __nccwpck_require__(25167); Object.defineProperty(exports, "ReflectionTypeCheck", ({ enumerable: true, get: function () { return reflection_type_check_1.ReflectionTypeCheck; } })); var reflection_create_1 = __nccwpck_require__(75726); Object.defineProperty(exports, "reflectionCreate", ({ enumerable: true, get: function () { return reflection_create_1.reflectionCreate; } })); var reflection_scalar_default_1 = __nccwpck_require__(19526); Object.defineProperty(exports, "reflectionScalarDefault", ({ enumerable: true, get: function () { return reflection_scalar_default_1.reflectionScalarDefault; } })); var reflection_merge_partial_1 = __nccwpck_require__(98044); Object.defineProperty(exports, "reflectionMergePartial", ({ enumerable: true, get: function () { return reflection_merge_partial_1.reflectionMergePartial; } })); var reflection_equals_1 = __nccwpck_require__(4827); Object.defineProperty(exports, "reflectionEquals", ({ enumerable: true, get: function () { return reflection_equals_1.reflectionEquals; } })); var reflection_binary_reader_1 = __nccwpck_require__(89611); Object.defineProperty(exports, "ReflectionBinaryReader", ({ enumerable: true, get: function () { return reflection_binary_reader_1.ReflectionBinaryReader; } })); var reflection_binary_writer_1 = __nccwpck_require__(66907); Object.defineProperty(exports, "ReflectionBinaryWriter", ({ enumerable: true, get: function () { return reflection_binary_writer_1.ReflectionBinaryWriter; } })); var reflection_json_reader_1 = __nccwpck_require__(46790); Object.defineProperty(exports, "ReflectionJsonReader", ({ enumerable: true, get: function () { return reflection_json_reader_1.ReflectionJsonReader; } })); var reflection_json_writer_1 = __nccwpck_require__(11094); Object.defineProperty(exports, "ReflectionJsonWriter", ({ enumerable: true, get: function () { return reflection_json_writer_1.ReflectionJsonWriter; } })); var reflection_contains_message_type_1 = __nccwpck_require__(59946); Object.defineProperty(exports, "containsMessageType", ({ enumerable: true, get: function () { return reflection_contains_message_type_1.containsMessageType; } })); // Oneof helpers var oneof_1 = __nccwpck_require__(18063); Object.defineProperty(exports, "isOneofGroup", ({ enumerable: true, get: function () { return oneof_1.isOneofGroup; } })); Object.defineProperty(exports, "setOneofValue", ({ enumerable: true, get: function () { return oneof_1.setOneofValue; } })); Object.defineProperty(exports, "getOneofValue", ({ enumerable: true, get: function () { return oneof_1.getOneofValue; } })); Object.defineProperty(exports, "clearOneofValue", ({ enumerable: true, get: function () { return oneof_1.clearOneofValue; } })); Object.defineProperty(exports, "getSelectedOneofValue", ({ enumerable: true, get: function () { return oneof_1.getSelectedOneofValue; } })); // Enum object type guard and reflection util, may be interesting to the user. var enum_object_1 = __nccwpck_require__(70257); Object.defineProperty(exports, "listEnumValues", ({ enumerable: true, get: function () { return enum_object_1.listEnumValues; } })); Object.defineProperty(exports, "listEnumNames", ({ enumerable: true, get: function () { return enum_object_1.listEnumNames; } })); Object.defineProperty(exports, "listEnumNumbers", ({ enumerable: true, get: function () { return enum_object_1.listEnumNumbers; } })); Object.defineProperty(exports, "isEnumObject", ({ enumerable: true, get: function () { return enum_object_1.isEnumObject; } })); // lowerCamelCase() is exported for plugin, rpc-runtime and other rpc packages var lower_camel_case_1 = __nccwpck_require__(4073); Object.defineProperty(exports, "lowerCamelCase", ({ enumerable: true, get: function () { return lower_camel_case_1.lowerCamelCase; } })); // assertion functions are exported for plugin, may also be useful to user var assert_1 = __nccwpck_require__(8602); Object.defineProperty(exports, "assert", ({ enumerable: true, get: function () { return assert_1.assert; } })); Object.defineProperty(exports, "assertNever", ({ enumerable: true, get: function () { return assert_1.assertNever; } })); Object.defineProperty(exports, "assertInt32", ({ enumerable: true, get: function () { return assert_1.assertInt32; } })); Object.defineProperty(exports, "assertUInt32", ({ enumerable: true, get: function () { return assert_1.assertUInt32; } })); Object.defineProperty(exports, "assertFloat32", ({ enumerable: true, get: function () { return assert_1.assertFloat32; } })); /***/ }), /***/ 29367: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.mergeJsonOptions = exports.jsonWriteOptions = exports.jsonReadOptions = void 0; const defaultsWrite = { emitDefaultValues: false, enumAsInteger: false, useProtoFieldName: false, prettySpaces: 0, }, defaultsRead = { ignoreUnknownFields: false, }; /** * Make options for reading JSON data from partial options. */ function jsonReadOptions(options) { return options ? Object.assign(Object.assign({}, defaultsRead), options) : defaultsRead; } exports.jsonReadOptions = jsonReadOptions; /** * Make options for writing JSON data from partial options. */ function jsonWriteOptions(options) { return options ? Object.assign(Object.assign({}, defaultsWrite), options) : defaultsWrite; } exports.jsonWriteOptions = jsonWriteOptions; /** * Merges JSON write or read options. Later values override earlier values. Type registries are merged. */ function mergeJsonOptions(a, b) { var _a, _b; let c = Object.assign(Object.assign({}, a), b); c.typeRegistry = [...((_a = a === null || a === void 0 ? void 0 : a.typeRegistry) !== null && _a !== void 0 ? _a : []), ...((_b = b === null || b === void 0 ? void 0 : b.typeRegistry) !== null && _b !== void 0 ? _b : [])]; return c; } exports.mergeJsonOptions = mergeJsonOptions; /***/ }), /***/ 49999: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isJsonObject = exports.typeofJsonValue = void 0; /** * Get the type of a JSON value. * Distinguishes between array, null and object. */ function typeofJsonValue(value) { let t = typeof value; if (t == "object") { if (Array.isArray(value)) return "array"; if (value === null) return "null"; } return t; } exports.typeofJsonValue = typeofJsonValue; /** * Is this a JSON object (instead of an array or null)? */ function isJsonObject(value) { return value !== null && typeof value == "object" && !Array.isArray(value); } exports.isJsonObject = isJsonObject; /***/ }), /***/ 4073: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.lowerCamelCase = void 0; /** * Converts snake_case to lowerCamelCase. * * Should behave like protoc: * https://github.com/protocolbuffers/protobuf/blob/e8ae137c96444ea313485ed1118c5e43b2099cf1/src/google/protobuf/compiler/java/java_helpers.cc#L118 */ function lowerCamelCase(snakeCase) { let capNext = false; const sb = []; for (let i = 0; i < snakeCase.length; i++) { let next = snakeCase.charAt(i); if (next == '_') { capNext = true; } else if (/\d/.test(next)) { sb.push(next); capNext = true; } else if (capNext) { sb.push(next.toUpperCase()); capNext = false; } else if (i == 0) { sb.push(next.toLowerCase()); } else { sb.push(next); } } return sb.join(''); } exports.lowerCamelCase = lowerCamelCase; /***/ }), /***/ 43785: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MESSAGE_TYPE = void 0; /** * The symbol used as a key on message objects to store the message type. * * Note that this is an experimental feature - it is here to stay, but * implementation details may change without notice. */ exports.MESSAGE_TYPE = Symbol.for("protobuf-ts/message-type"); /***/ }), /***/ 15106: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MessageType = void 0; const message_type_contract_1 = __nccwpck_require__(43785); const reflection_info_1 = __nccwpck_require__(67910); const reflection_type_check_1 = __nccwpck_require__(25167); const reflection_json_reader_1 = __nccwpck_require__(46790); const reflection_json_writer_1 = __nccwpck_require__(11094); const reflection_binary_reader_1 = __nccwpck_require__(89611); const reflection_binary_writer_1 = __nccwpck_require__(66907); const reflection_create_1 = __nccwpck_require__(75726); const reflection_merge_partial_1 = __nccwpck_require__(98044); const json_typings_1 = __nccwpck_require__(49999); const json_format_contract_1 = __nccwpck_require__(29367); const reflection_equals_1 = __nccwpck_require__(4827); const binary_writer_1 = __nccwpck_require__(23957); const binary_reader_1 = __nccwpck_require__(92889); const baseDescriptors = Object.getOwnPropertyDescriptors(Object.getPrototypeOf({})); /** * This standard message type provides reflection-based * operations to work with a message. */ class MessageType { constructor(name, fields, options) { this.defaultCheckDepth = 16; this.typeName = name; this.fields = fields.map(reflection_info_1.normalizeFieldInfo); this.options = options !== null && options !== void 0 ? options : {}; this.messagePrototype = Object.create(null, Object.assign(Object.assign({}, baseDescriptors), { [message_type_contract_1.MESSAGE_TYPE]: { value: this } })); this.refTypeCheck = new reflection_type_check_1.ReflectionTypeCheck(this); this.refJsonReader = new reflection_json_reader_1.ReflectionJsonReader(this); this.refJsonWriter = new reflection_json_writer_1.ReflectionJsonWriter(this); this.refBinReader = new reflection_binary_reader_1.ReflectionBinaryReader(this); this.refBinWriter = new reflection_binary_writer_1.ReflectionBinaryWriter(this); } create(value) { let message = reflection_create_1.reflectionCreate(this); if (value !== undefined) { reflection_merge_partial_1.reflectionMergePartial(this, message, value); } return message; } /** * Clone the message. * * Unknown fields are discarded. */ clone(message) { let copy = this.create(); reflection_merge_partial_1.reflectionMergePartial(this, copy, message); return copy; } /** * Determines whether two message of the same type have the same field values. * Checks for deep equality, traversing repeated fields, oneof groups, maps * and messages recursively. * Will also return true if both messages are `undefined`. */ equals(a, b) { return reflection_equals_1.reflectionEquals(this, a, b); } /** * Is the given value assignable to our message type * and contains no [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ is(arg, depth = this.defaultCheckDepth) { return this.refTypeCheck.is(arg, depth, false); } /** * Is the given value assignable to our message type, * regardless of [excess properties](https://www.typescriptlang.org/docs/handbook/interfaces.html#excess-property-checks)? */ isAssignable(arg, depth = this.defaultCheckDepth) { return this.refTypeCheck.is(arg, depth, true); } /** * Copy partial data into the target message. */ mergePartial(target, source) { reflection_merge_partial_1.reflectionMergePartial(this, target, source); } /** * Create a new message from binary format. */ fromBinary(data, options) { let opt = binary_reader_1.binaryReadOptions(options); return this.internalBinaryRead(opt.readerFactory(data), data.byteLength, opt); } /** * Read a new message from a JSON value. */ fromJson(json, options) { return this.internalJsonRead(json, json_format_contract_1.jsonReadOptions(options)); } /** * Read a new message from a JSON string. * This is equivalent to `T.fromJson(JSON.parse(json))`. */ fromJsonString(json, options) { let value = JSON.parse(json); return this.fromJson(value, options); } /** * Write the message to canonical JSON value. */ toJson(message, options) { return this.internalJsonWrite(message, json_format_contract_1.jsonWriteOptions(options)); } /** * Convert the message to canonical JSON string. * This is equivalent to `JSON.stringify(T.toJson(t))` */ toJsonString(message, options) { var _a; let value = this.toJson(message, options); return JSON.stringify(value, null, (_a = options === null || options === void 0 ? void 0 : options.prettySpaces) !== null && _a !== void 0 ? _a : 0); } /** * Write the message to binary format. */ toBinary(message, options) { let opt = binary_writer_1.binaryWriteOptions(options); return this.internalBinaryWrite(message, opt.writerFactory(), opt).finish(); } /** * This is an internal method. If you just want to read a message from * JSON, use `fromJson()` or `fromJsonString()`. * * Reads JSON value and merges the fields into the target * according to protobuf rules. If the target is omitted, * a new instance is created first. */ internalJsonRead(json, options, target) { if (json !== null && typeof json == "object" && !Array.isArray(json)) { let message = target !== null && target !== void 0 ? target : this.create(); this.refJsonReader.read(json, message, options); return message; } throw new Error(`Unable to parse message ${this.typeName} from JSON ${json_typings_1.typeofJsonValue(json)}.`); } /** * This is an internal method. If you just want to write a message * to JSON, use `toJson()` or `toJsonString(). * * Writes JSON value and returns it. */ internalJsonWrite(message, options) { return this.refJsonWriter.write(message, options); } /** * This is an internal method. If you just want to write a message * in binary format, use `toBinary()`. * * Serializes the message in binary format and appends it to the given * writer. Returns passed writer. */ internalBinaryWrite(message, writer, options) { this.refBinWriter.write(message, writer, options); return writer; } /** * This is an internal method. If you just want to read a message from * binary data, use `fromBinary()`. * * Reads data from binary format and merges the fields into * the target according to protobuf rules. If the target is * omitted, a new instance is created first. */ internalBinaryRead(reader, length, options, target) { let message = target !== null && target !== void 0 ? target : this.create(); this.refBinReader.read(reader, message, options, length); return message; } } exports.MessageType = MessageType; /***/ }), /***/ 18063: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getSelectedOneofValue = exports.clearOneofValue = exports.setUnknownOneofValue = exports.setOneofValue = exports.getOneofValue = exports.isOneofGroup = void 0; /** * Is the given value a valid oneof group? * * We represent protobuf `oneof` as algebraic data types (ADT) in generated * code. But when working with messages of unknown type, the ADT does not * help us. * * This type guard checks if the given object adheres to the ADT rules, which * are as follows: * * 1) Must be an object. * * 2) Must have a "oneofKind" discriminator property. * * 3) If "oneofKind" is `undefined`, no member field is selected. The object * must not have any other properties. * * 4) If "oneofKind" is a `string`, the member field with this name is * selected. * * 5) If a member field is selected, the object must have a second property * with this name. The property must not be `undefined`. * * 6) No extra properties are allowed. The object has either one property * (no selection) or two properties (selection). * */ function isOneofGroup(any) { if (typeof any != 'object' || any === null || !any.hasOwnProperty('oneofKind')) { return false; } switch (typeof any.oneofKind) { case "string": if (any[any.oneofKind] === undefined) return false; return Object.keys(any).length == 2; case "undefined": return Object.keys(any).length == 1; default: return false; } } exports.isOneofGroup = isOneofGroup; /** * Returns the value of the given field in a oneof group. */ function getOneofValue(oneof, kind) { return oneof[kind]; } exports.getOneofValue = getOneofValue; function setOneofValue(oneof, kind, value) { if (oneof.oneofKind !== undefined) { delete oneof[oneof.oneofKind]; } oneof.oneofKind = kind; if (value !== undefined) { oneof[kind] = value; } } exports.setOneofValue = setOneofValue; function setUnknownOneofValue(oneof, kind, value) { if (oneof.oneofKind !== undefined) { delete oneof[oneof.oneofKind]; } oneof.oneofKind = kind; if (value !== undefined && kind !== undefined) { oneof[kind] = value; } } exports.setUnknownOneofValue = setUnknownOneofValue; /** * Removes the selected field in a oneof group. * * Note that the recommended way to modify a oneof group is to set * a new object: * * ```ts * message.result = { oneofKind: undefined }; * ``` */ function clearOneofValue(oneof) { if (oneof.oneofKind !== undefined) { delete oneof[oneof.oneofKind]; } oneof.oneofKind = undefined; } exports.clearOneofValue = clearOneofValue; /** * Returns the selected value of the given oneof group. * * Not that the recommended way to access a oneof group is to check * the "oneofKind" property and let TypeScript narrow down the union * type for you: * * ```ts * if (message.result.oneofKind === "error") { * message.result.error; // string * } * ``` * * In the rare case you just need the value, and do not care about * which protobuf field is selected, you can use this function * for convenience. */ function getSelectedOneofValue(oneof) { if (oneof.oneofKind === undefined) { return undefined; } return oneof[oneof.oneofKind]; } exports.getSelectedOneofValue = getSelectedOneofValue; /***/ }), /***/ 61753: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PbLong = exports.PbULong = exports.detectBi = void 0; const goog_varint_1 = __nccwpck_require__(93223); let BI; function detectBi() { const dv = new DataView(new ArrayBuffer(8)); const ok = globalThis.BigInt !== undefined && typeof dv.getBigInt64 === "function" && typeof dv.getBigUint64 === "function" && typeof dv.setBigInt64 === "function" && typeof dv.setBigUint64 === "function"; BI = ok ? { MIN: BigInt("-9223372036854775808"), MAX: BigInt("9223372036854775807"), UMIN: BigInt("0"), UMAX: BigInt("18446744073709551615"), C: BigInt, V: dv, } : undefined; } exports.detectBi = detectBi; detectBi(); function assertBi(bi) { if (!bi) throw new Error("BigInt unavailable, see https://github.com/timostamm/protobuf-ts/blob/v1.0.8/MANUAL.md#bigint-support"); } // used to validate from(string) input (when bigint is unavailable) const RE_DECIMAL_STR = /^-?[0-9]+$/; // constants for binary math const TWO_PWR_32_DBL = 0x100000000; const HALF_2_PWR_32 = 0x080000000; // base class for PbLong and PbULong provides shared code class SharedPbLong { /** * Create a new instance with the given bits. */ constructor(lo, hi) { this.lo = lo | 0; this.hi = hi | 0; } /** * Is this instance equal to 0? */ isZero() { return this.lo == 0 && this.hi == 0; } /** * Convert to a native number. */ toNumber() { let result = this.hi * TWO_PWR_32_DBL + (this.lo >>> 0); if (!Number.isSafeInteger(result)) throw new Error("cannot convert to safe number"); return result; } } /** * 64-bit unsigned integer as two 32-bit values. * Converts between `string`, `number` and `bigint` representations. */ class PbULong extends SharedPbLong { /** * Create instance from a `string`, `number` or `bigint`. */ static from(value) { if (BI) // noinspection FallThroughInSwitchStatementJS switch (typeof value) { case "string": if (value == "0") return this.ZERO; if (value == "") throw new Error('string is no integer'); value = BI.C(value); case "number": if (value === 0) return this.ZERO; value = BI.C(value); case "bigint": if (!value) return this.ZERO; if (value < BI.UMIN) throw new Error('signed value for ulong'); if (value > BI.UMAX) throw new Error('ulong too large'); BI.V.setBigUint64(0, value, true); return new PbULong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); } else switch (typeof value) { case "string": if (value == "0") return this.ZERO; value = value.trim(); if (!RE_DECIMAL_STR.test(value)) throw new Error('string is no integer'); let [minus, lo, hi] = goog_varint_1.int64fromString(value); if (minus) throw new Error('signed value for ulong'); return new PbULong(lo, hi); case "number": if (value == 0) return this.ZERO; if (!Number.isSafeInteger(value)) throw new Error('number is no integer'); if (value < 0) throw new Error('signed value for ulong'); return new PbULong(value, value / TWO_PWR_32_DBL); } throw new Error('unknown value ' + typeof value); } /** * Convert to decimal string. */ toString() { return BI ? this.toBigInt().toString() : goog_varint_1.int64toString(this.lo, this.hi); } /** * Convert to native bigint. */ toBigInt() { assertBi(BI); BI.V.setInt32(0, this.lo, true); BI.V.setInt32(4, this.hi, true); return BI.V.getBigUint64(0, true); } } exports.PbULong = PbULong; /** * ulong 0 singleton. */ PbULong.ZERO = new PbULong(0, 0); /** * 64-bit signed integer as two 32-bit values. * Converts between `string`, `number` and `bigint` representations. */ class PbLong extends SharedPbLong { /** * Create instance from a `string`, `number` or `bigint`. */ static from(value) { if (BI) // noinspection FallThroughInSwitchStatementJS switch (typeof value) { case "string": if (value == "0") return this.ZERO; if (value == "") throw new Error('string is no integer'); value = BI.C(value); case "number": if (value === 0) return this.ZERO; value = BI.C(value); case "bigint": if (!value) return this.ZERO; if (value < BI.MIN) throw new Error('signed long too small'); if (value > BI.MAX) throw new Error('signed long too large'); BI.V.setBigInt64(0, value, true); return new PbLong(BI.V.getInt32(0, true), BI.V.getInt32(4, true)); } else switch (typeof value) { case "string": if (value == "0") return this.ZERO; value = value.trim(); if (!RE_DECIMAL_STR.test(value)) throw new Error('string is no integer'); let [minus, lo, hi] = goog_varint_1.int64fromString(value); if (minus) { if (hi > HALF_2_PWR_32 || (hi == HALF_2_PWR_32 && lo != 0)) throw new Error('signed long too small'); } else if (hi >= HALF_2_PWR_32) throw new Error('signed long too large'); let pbl = new PbLong(lo, hi); return minus ? pbl.negate() : pbl; case "number": if (value == 0) return this.ZERO; if (!Number.isSafeInteger(value)) throw new Error('number is no integer'); return value > 0 ? new PbLong(value, value / TWO_PWR_32_DBL) : new PbLong(-value, -value / TWO_PWR_32_DBL).negate(); } throw new Error('unknown value ' + typeof value); } /** * Do we have a minus sign? */ isNegative() { return (this.hi & HALF_2_PWR_32) !== 0; } /** * Negate two's complement. * Invert all the bits and add one to the result. */ negate() { let hi = ~this.hi, lo = this.lo; if (lo) lo = ~lo + 1; else hi += 1; return new PbLong(lo, hi); } /** * Convert to decimal string. */ toString() { if (BI) return this.toBigInt().toString(); if (this.isNegative()) { let n = this.negate(); return '-' + goog_varint_1.int64toString(n.lo, n.hi); } return goog_varint_1.int64toString(this.lo, this.hi); } /** * Convert to native bigint. */ toBigInt() { assertBi(BI); BI.V.setInt32(0, this.lo, true); BI.V.setInt32(4, this.hi, true); return BI.V.getBigInt64(0, true); } } exports.PbLong = PbLong; /** * long 0 singleton. */ PbLong.ZERO = new PbLong(0, 0); /***/ }), /***/ 58950: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) 2016, Daniel Wirtz All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions are // met: // // * Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // * Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // * Neither the name of its author, nor the names of its contributors // may be used to endorse or promote products derived from this software // without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS // "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT // LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR // A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT // OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT // LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, // DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY // THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT // (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE // OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.utf8read = void 0; const fromCharCodes = (chunk) => String.fromCharCode.apply(String, chunk); /** * @deprecated This function will no longer be exported with the next major * release, since protobuf-ts has switch to TextDecoder API. If you need this * function, please migrate to @protobufjs/utf8. For context, see * https://github.com/timostamm/protobuf-ts/issues/184 * * Reads UTF8 bytes as a string. * * See [protobufjs / utf8](https://github.com/protobufjs/protobuf.js/blob/9893e35b854621cce64af4bf6be2cff4fb892796/lib/utf8/index.js#L40) * * Copyright (c) 2016, Daniel Wirtz */ function utf8read(bytes) { if (bytes.length < 1) return ""; let pos = 0, // position in bytes parts = [], chunk = [], i = 0, // char offset t; // temporary let len = bytes.length; while (pos < len) { t = bytes[pos++]; if (t < 128) chunk[i++] = t; else if (t > 191 && t < 224) chunk[i++] = (t & 31) << 6 | bytes[pos++] & 63; else if (t > 239 && t < 365) { t = ((t & 7) << 18 | (bytes[pos++] & 63) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63) - 0x10000; chunk[i++] = 0xD800 + (t >> 10); chunk[i++] = 0xDC00 + (t & 1023); } else chunk[i++] = (t & 15) << 12 | (bytes[pos++] & 63) << 6 | bytes[pos++] & 63; if (i > 8191) { parts.push(fromCharCodes(chunk)); i = 0; } } if (parts.length) { if (i) parts.push(fromCharCodes(chunk.slice(0, i))); return parts.join(""); } return fromCharCodes(chunk.slice(0, i)); } exports.utf8read = utf8read; /***/ }), /***/ 89611: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ReflectionBinaryReader = void 0; const binary_format_contract_1 = __nccwpck_require__(54816); const reflection_info_1 = __nccwpck_require__(67910); const reflection_long_convert_1 = __nccwpck_require__(63402); const reflection_scalar_default_1 = __nccwpck_require__(19526); /** * Reads proto3 messages in binary format using reflection information. * * https://developers.google.com/protocol-buffers/docs/encoding */ class ReflectionBinaryReader { constructor(info) { this.info = info; } prepare() { var _a; if (!this.fieldNoToField) { const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; this.fieldNoToField = new Map(fieldsInput.map(field => [field.no, field])); } } /** * Reads a message from binary format into the target message. * * Repeated fields are appended. Map entries are added, overwriting * existing keys. * * If a message field is already present, it will be merged with the * new data. */ read(reader, message, options, length) { this.prepare(); const end = length === undefined ? reader.len : reader.pos + length; while (reader.pos < end) { // read the tag and find the field const [fieldNo, wireType] = reader.tag(), field = this.fieldNoToField.get(fieldNo); if (!field) { let u = options.readUnknownField; if (u == "throw") throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) for ${this.info.typeName}`); let d = reader.skip(wireType); if (u !== false) (u === true ? binary_format_contract_1.UnknownFieldHandler.onRead : u)(this.info.typeName, message, fieldNo, wireType, d); continue; } // target object for the field we are reading let target = message, repeated = field.repeat, localName = field.localName; // if field is member of oneof ADT, use ADT as target if (field.oneof) { target = target[field.oneof]; // if other oneof member selected, set new ADT if (target.oneofKind !== localName) target = message[field.oneof] = { oneofKind: localName }; } // we have handled oneof above, we just have read the value into `target[localName]` switch (field.kind) { case "scalar": case "enum": let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; let L = field.kind == "scalar" ? field.L : undefined; if (repeated) { let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values if (wireType == binary_format_contract_1.WireType.LengthDelimited && T != reflection_info_1.ScalarType.STRING && T != reflection_info_1.ScalarType.BYTES) { let e = reader.uint32() + reader.pos; while (reader.pos < e) arr.push(this.scalar(reader, T, L)); } else arr.push(this.scalar(reader, T, L)); } else target[localName] = this.scalar(reader, T, L); break; case "message": if (repeated) { let arr = target[localName]; // safe to assume presence of array, oneof cannot contain repeated values let msg = field.T().internalBinaryRead(reader, reader.uint32(), options); arr.push(msg); } else target[localName] = field.T().internalBinaryRead(reader, reader.uint32(), options, target[localName]); break; case "map": let [mapKey, mapVal] = this.mapEntry(field, reader, options); // safe to assume presence of map object, oneof cannot contain repeated values target[localName][mapKey] = mapVal; break; } } } /** * Read a map field, expecting key field = 1, value field = 2 */ mapEntry(field, reader, options) { let length = reader.uint32(); let end = reader.pos + length; let key = undefined; // javascript only allows number or string for object properties let val = undefined; while (reader.pos < end) { let [fieldNo, wireType] = reader.tag(); switch (fieldNo) { case 1: if (field.K == reflection_info_1.ScalarType.BOOL) key = reader.bool().toString(); else // long types are read as string, number types are okay as number key = this.scalar(reader, field.K, reflection_info_1.LongType.STRING); break; case 2: switch (field.V.kind) { case "scalar": val = this.scalar(reader, field.V.T, field.V.L); break; case "enum": val = reader.int32(); break; case "message": val = field.V.T().internalBinaryRead(reader, reader.uint32(), options); break; } break; default: throw new Error(`Unknown field ${fieldNo} (wire type ${wireType}) in map entry for ${this.info.typeName}#${field.name}`); } } if (key === undefined) { let keyRaw = reflection_scalar_default_1.reflectionScalarDefault(field.K); key = field.K == reflection_info_1.ScalarType.BOOL ? keyRaw.toString() : keyRaw; } if (val === undefined) switch (field.V.kind) { case "scalar": val = reflection_scalar_default_1.reflectionScalarDefault(field.V.T, field.V.L); break; case "enum": val = 0; break; case "message": val = field.V.T().create(); break; } return [key, val]; } scalar(reader, type, longType) { switch (type) { case reflection_info_1.ScalarType.INT32: return reader.int32(); case reflection_info_1.ScalarType.STRING: return reader.string(); case reflection_info_1.ScalarType.BOOL: return reader.bool(); case reflection_info_1.ScalarType.DOUBLE: return reader.double(); case reflection_info_1.ScalarType.FLOAT: return reader.float(); case reflection_info_1.ScalarType.INT64: return reflection_long_convert_1.reflectionLongConvert(reader.int64(), longType); case reflection_info_1.ScalarType.UINT64: return reflection_long_convert_1.reflectionLongConvert(reader.uint64(), longType); case reflection_info_1.ScalarType.FIXED64: return reflection_long_convert_1.reflectionLongConvert(reader.fixed64(), longType); case reflection_info_1.ScalarType.FIXED32: return reader.fixed32(); case reflection_info_1.ScalarType.BYTES: return reader.bytes(); case reflection_info_1.ScalarType.UINT32: return reader.uint32(); case reflection_info_1.ScalarType.SFIXED32: return reader.sfixed32(); case reflection_info_1.ScalarType.SFIXED64: return reflection_long_convert_1.reflectionLongConvert(reader.sfixed64(), longType); case reflection_info_1.ScalarType.SINT32: return reader.sint32(); case reflection_info_1.ScalarType.SINT64: return reflection_long_convert_1.reflectionLongConvert(reader.sint64(), longType); } } } exports.ReflectionBinaryReader = ReflectionBinaryReader; /***/ }), /***/ 66907: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ReflectionBinaryWriter = void 0; const binary_format_contract_1 = __nccwpck_require__(54816); const reflection_info_1 = __nccwpck_require__(67910); const assert_1 = __nccwpck_require__(8602); const pb_long_1 = __nccwpck_require__(61753); /** * Writes proto3 messages in binary format using reflection information. * * https://developers.google.com/protocol-buffers/docs/encoding */ class ReflectionBinaryWriter { constructor(info) { this.info = info; } prepare() { if (!this.fields) { const fieldsInput = this.info.fields ? this.info.fields.concat() : []; this.fields = fieldsInput.sort((a, b) => a.no - b.no); } } /** * Writes the message to binary format. */ write(message, writer, options) { this.prepare(); for (const field of this.fields) { let value, // this will be our field value, whether it is member of a oneof or not emitDefault, // whether we emit the default value (only true for oneof members) repeated = field.repeat, localName = field.localName; // handle oneof ADT if (field.oneof) { const group = message[field.oneof]; if (group.oneofKind !== localName) continue; // if field is not selected, skip value = group[localName]; emitDefault = true; } else { value = message[localName]; emitDefault = false; } // we have handled oneof above. we just have to honor `emitDefault`. switch (field.kind) { case "scalar": case "enum": let T = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; if (repeated) { assert_1.assert(Array.isArray(value)); if (repeated == reflection_info_1.RepeatType.PACKED) this.packed(writer, T, field.no, value); else for (const item of value) this.scalar(writer, T, field.no, item, true); } else if (value === undefined) assert_1.assert(field.opt); else this.scalar(writer, T, field.no, value, emitDefault || field.opt); break; case "message": if (repeated) { assert_1.assert(Array.isArray(value)); for (const item of value) this.message(writer, options, field.T(), field.no, item); } else { this.message(writer, options, field.T(), field.no, value); } break; case "map": assert_1.assert(typeof value == 'object' && value !== null); for (const [key, val] of Object.entries(value)) this.mapEntry(writer, options, field, key, val); break; } } let u = options.writeUnknownFields; if (u !== false) (u === true ? binary_format_contract_1.UnknownFieldHandler.onWrite : u)(this.info.typeName, message, writer); } mapEntry(writer, options, field, key, value) { writer.tag(field.no, binary_format_contract_1.WireType.LengthDelimited); writer.fork(); // javascript only allows number or string for object properties // we convert from our representation to the protobuf type let keyValue = key; switch (field.K) { case reflection_info_1.ScalarType.INT32: case reflection_info_1.ScalarType.FIXED32: case reflection_info_1.ScalarType.UINT32: case reflection_info_1.ScalarType.SFIXED32: case reflection_info_1.ScalarType.SINT32: keyValue = Number.parseInt(key); break; case reflection_info_1.ScalarType.BOOL: assert_1.assert(key == 'true' || key == 'false'); keyValue = key == 'true'; break; } // write key, expecting key field number = 1 this.scalar(writer, field.K, 1, keyValue, true); // write value, expecting value field number = 2 switch (field.V.kind) { case 'scalar': this.scalar(writer, field.V.T, 2, value, true); break; case 'enum': this.scalar(writer, reflection_info_1.ScalarType.INT32, 2, value, true); break; case 'message': this.message(writer, options, field.V.T(), 2, value); break; } writer.join(); } message(writer, options, handler, fieldNo, value) { if (value === undefined) return; handler.internalBinaryWrite(value, writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited).fork(), options); writer.join(); } /** * Write a single scalar value. */ scalar(writer, type, fieldNo, value, emitDefault) { let [wireType, method, isDefault] = this.scalarInfo(type, value); if (!isDefault || emitDefault) { writer.tag(fieldNo, wireType); writer[method](value); } } /** * Write an array of scalar values in packed format. */ packed(writer, type, fieldNo, value) { if (!value.length) return; assert_1.assert(type !== reflection_info_1.ScalarType.BYTES && type !== reflection_info_1.ScalarType.STRING); // write tag writer.tag(fieldNo, binary_format_contract_1.WireType.LengthDelimited); // begin length-delimited writer.fork(); // write values without tags let [, method,] = this.scalarInfo(type); for (let i = 0; i < value.length; i++) writer[method](value[i]); // end length delimited writer.join(); } /** * Get information for writing a scalar value. * * Returns tuple: * [0]: appropriate WireType * [1]: name of the appropriate method of IBinaryWriter * [2]: whether the given value is a default value * * If argument `value` is omitted, [2] is always false. */ scalarInfo(type, value) { let t = binary_format_contract_1.WireType.Varint; let m; let i = value === undefined; let d = value === 0; switch (type) { case reflection_info_1.ScalarType.INT32: m = "int32"; break; case reflection_info_1.ScalarType.STRING: d = i || !value.length; t = binary_format_contract_1.WireType.LengthDelimited; m = "string"; break; case reflection_info_1.ScalarType.BOOL: d = value === false; m = "bool"; break; case reflection_info_1.ScalarType.UINT32: m = "uint32"; break; case reflection_info_1.ScalarType.DOUBLE: t = binary_format_contract_1.WireType.Bit64; m = "double"; break; case reflection_info_1.ScalarType.FLOAT: t = binary_format_contract_1.WireType.Bit32; m = "float"; break; case reflection_info_1.ScalarType.INT64: d = i || pb_long_1.PbLong.from(value).isZero(); m = "int64"; break; case reflection_info_1.ScalarType.UINT64: d = i || pb_long_1.PbULong.from(value).isZero(); m = "uint64"; break; case reflection_info_1.ScalarType.FIXED64: d = i || pb_long_1.PbULong.from(value).isZero(); t = binary_format_contract_1.WireType.Bit64; m = "fixed64"; break; case reflection_info_1.ScalarType.BYTES: d = i || !value.byteLength; t = binary_format_contract_1.WireType.LengthDelimited; m = "bytes"; break; case reflection_info_1.ScalarType.FIXED32: t = binary_format_contract_1.WireType.Bit32; m = "fixed32"; break; case reflection_info_1.ScalarType.SFIXED32: t = binary_format_contract_1.WireType.Bit32; m = "sfixed32"; break; case reflection_info_1.ScalarType.SFIXED64: d = i || pb_long_1.PbLong.from(value).isZero(); t = binary_format_contract_1.WireType.Bit64; m = "sfixed64"; break; case reflection_info_1.ScalarType.SINT32: m = "sint32"; break; case reflection_info_1.ScalarType.SINT64: d = i || pb_long_1.PbLong.from(value).isZero(); m = "sint64"; break; } return [t, m, i || d]; } } exports.ReflectionBinaryWriter = ReflectionBinaryWriter; /***/ }), /***/ 59946: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.containsMessageType = void 0; const message_type_contract_1 = __nccwpck_require__(43785); /** * Check if the provided object is a proto message. * * Note that this is an experimental feature - it is here to stay, but * implementation details may change without notice. */ function containsMessageType(msg) { return msg[message_type_contract_1.MESSAGE_TYPE] != null; } exports.containsMessageType = containsMessageType; /***/ }), /***/ 75726: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.reflectionCreate = void 0; const reflection_scalar_default_1 = __nccwpck_require__(19526); const message_type_contract_1 = __nccwpck_require__(43785); /** * Creates an instance of the generic message, using the field * information. */ function reflectionCreate(type) { /** * This ternary can be removed in the next major version. * The `Object.create()` code path utilizes a new `messagePrototype` * property on the `IMessageType` which has this same `MESSAGE_TYPE` * non-enumerable property on it. Doing it this way means that we only * pay the cost of `Object.defineProperty()` once per `IMessageType` * class of once per "instance". The falsy code path is only provided * for backwards compatibility in cases where the runtime library is * updated without also updating the generated code. */ const msg = type.messagePrototype ? Object.create(type.messagePrototype) : Object.defineProperty({}, message_type_contract_1.MESSAGE_TYPE, { value: type }); for (let field of type.fields) { let name = field.localName; if (field.opt) continue; if (field.oneof) msg[field.oneof] = { oneofKind: undefined }; else if (field.repeat) msg[name] = []; else switch (field.kind) { case "scalar": msg[name] = reflection_scalar_default_1.reflectionScalarDefault(field.T, field.L); break; case "enum": // we require 0 to be default value for all enums msg[name] = 0; break; case "map": msg[name] = {}; break; } } return msg; } exports.reflectionCreate = reflectionCreate; /***/ }), /***/ 4827: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.reflectionEquals = void 0; const reflection_info_1 = __nccwpck_require__(67910); /** * Determines whether two message of the same type have the same field values. * Checks for deep equality, traversing repeated fields, oneof groups, maps * and messages recursively. * Will also return true if both messages are `undefined`. */ function reflectionEquals(info, a, b) { if (a === b) return true; if (!a || !b) return false; for (let field of info.fields) { let localName = field.localName; let val_a = field.oneof ? a[field.oneof][localName] : a[localName]; let val_b = field.oneof ? b[field.oneof][localName] : b[localName]; switch (field.kind) { case "enum": case "scalar": let t = field.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.T; if (!(field.repeat ? repeatedPrimitiveEq(t, val_a, val_b) : primitiveEq(t, val_a, val_b))) return false; break; case "map": if (!(field.V.kind == "message" ? repeatedMsgEq(field.V.T(), objectValues(val_a), objectValues(val_b)) : repeatedPrimitiveEq(field.V.kind == "enum" ? reflection_info_1.ScalarType.INT32 : field.V.T, objectValues(val_a), objectValues(val_b)))) return false; break; case "message": let T = field.T(); if (!(field.repeat ? repeatedMsgEq(T, val_a, val_b) : T.equals(val_a, val_b))) return false; break; } } return true; } exports.reflectionEquals = reflectionEquals; const objectValues = Object.values; function primitiveEq(type, a, b) { if (a === b) return true; if (type !== reflection_info_1.ScalarType.BYTES) return false; let ba = a; let bb = b; if (ba.length !== bb.length) return false; for (let i = 0; i < ba.length; i++) if (ba[i] != bb[i]) return false; return true; } function repeatedPrimitiveEq(type, a, b) { if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) if (!primitiveEq(type, a[i], b[i])) return false; return true; } function repeatedMsgEq(type, a, b) { if (a.length !== b.length) return false; for (let i = 0; i < a.length; i++) if (!type.equals(a[i], b[i])) return false; return true; } /***/ }), /***/ 67910: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.readMessageOption = exports.readFieldOption = exports.readFieldOptions = exports.normalizeFieldInfo = exports.RepeatType = exports.LongType = exports.ScalarType = void 0; const lower_camel_case_1 = __nccwpck_require__(4073); /** * Scalar value types. This is a subset of field types declared by protobuf * enum google.protobuf.FieldDescriptorProto.Type The types GROUP and MESSAGE * are omitted, but the numerical values are identical. */ var ScalarType; (function (ScalarType) { // 0 is reserved for errors. // Order is weird for historical reasons. ScalarType[ScalarType["DOUBLE"] = 1] = "DOUBLE"; ScalarType[ScalarType["FLOAT"] = 2] = "FLOAT"; // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT64 if // negative values are likely. ScalarType[ScalarType["INT64"] = 3] = "INT64"; ScalarType[ScalarType["UINT64"] = 4] = "UINT64"; // Not ZigZag encoded. Negative numbers take 10 bytes. Use TYPE_SINT32 if // negative values are likely. ScalarType[ScalarType["INT32"] = 5] = "INT32"; ScalarType[ScalarType["FIXED64"] = 6] = "FIXED64"; ScalarType[ScalarType["FIXED32"] = 7] = "FIXED32"; ScalarType[ScalarType["BOOL"] = 8] = "BOOL"; ScalarType[ScalarType["STRING"] = 9] = "STRING"; // Tag-delimited aggregate. // Group type is deprecated and not supported in proto3. However, Proto3 // implementations should still be able to parse the group wire format and // treat group fields as unknown fields. // TYPE_GROUP = 10, // TYPE_MESSAGE = 11, // Length-delimited aggregate. // New in version 2. ScalarType[ScalarType["BYTES"] = 12] = "BYTES"; ScalarType[ScalarType["UINT32"] = 13] = "UINT32"; // TYPE_ENUM = 14, ScalarType[ScalarType["SFIXED32"] = 15] = "SFIXED32"; ScalarType[ScalarType["SFIXED64"] = 16] = "SFIXED64"; ScalarType[ScalarType["SINT32"] = 17] = "SINT32"; ScalarType[ScalarType["SINT64"] = 18] = "SINT64"; })(ScalarType = exports.ScalarType || (exports.ScalarType = {})); /** * JavaScript representation of 64 bit integral types. Equivalent to the * field option "jstype". * * By default, protobuf-ts represents 64 bit types as `bigint`. * * You can change the default behaviour by enabling the plugin parameter * `long_type_string`, which will represent 64 bit types as `string`. * * Alternatively, you can change the behaviour for individual fields * with the field option "jstype": * * ```protobuf * uint64 my_field = 1 [jstype = JS_STRING]; * uint64 other_field = 2 [jstype = JS_NUMBER]; * ``` */ var LongType; (function (LongType) { /** * Use JavaScript `bigint`. * * Field option `[jstype = JS_NORMAL]`. */ LongType[LongType["BIGINT"] = 0] = "BIGINT"; /** * Use JavaScript `string`. * * Field option `[jstype = JS_STRING]`. */ LongType[LongType["STRING"] = 1] = "STRING"; /** * Use JavaScript `number`. * * Large values will loose precision. * * Field option `[jstype = JS_NUMBER]`. */ LongType[LongType["NUMBER"] = 2] = "NUMBER"; })(LongType = exports.LongType || (exports.LongType = {})); /** * Protobuf 2.1.0 introduced packed repeated fields. * Setting the field option `[packed = true]` enables packing. * * In proto3, all repeated fields are packed by default. * Setting the field option `[packed = false]` disables packing. * * Packed repeated fields are encoded with a single tag, * then a length-delimiter, then the element values. * * Unpacked repeated fields are encoded with a tag and * value for each element. * * `bytes` and `string` cannot be packed. */ var RepeatType; (function (RepeatType) { /** * The field is not repeated. */ RepeatType[RepeatType["NO"] = 0] = "NO"; /** * The field is repeated and should be packed. * Invalid for `bytes` and `string`, they cannot be packed. */ RepeatType[RepeatType["PACKED"] = 1] = "PACKED"; /** * The field is repeated but should not be packed. * The only valid repeat type for repeated `bytes` and `string`. */ RepeatType[RepeatType["UNPACKED"] = 2] = "UNPACKED"; })(RepeatType = exports.RepeatType || (exports.RepeatType = {})); /** * Turns PartialFieldInfo into FieldInfo. */ function normalizeFieldInfo(field) { var _a, _b, _c, _d; field.localName = (_a = field.localName) !== null && _a !== void 0 ? _a : lower_camel_case_1.lowerCamelCase(field.name); field.jsonName = (_b = field.jsonName) !== null && _b !== void 0 ? _b : lower_camel_case_1.lowerCamelCase(field.name); field.repeat = (_c = field.repeat) !== null && _c !== void 0 ? _c : RepeatType.NO; field.opt = (_d = field.opt) !== null && _d !== void 0 ? _d : (field.repeat ? false : field.oneof ? false : field.kind == "message"); return field; } exports.normalizeFieldInfo = normalizeFieldInfo; /** * Read custom field options from a generated message type. * * @deprecated use readFieldOption() */ function readFieldOptions(messageType, fieldName, extensionName, extensionType) { var _a; const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; return options && options[extensionName] ? extensionType.fromJson(options[extensionName]) : undefined; } exports.readFieldOptions = readFieldOptions; function readFieldOption(messageType, fieldName, extensionName, extensionType) { var _a; const options = (_a = messageType.fields.find((m, i) => m.localName == fieldName || i == fieldName)) === null || _a === void 0 ? void 0 : _a.options; if (!options) { return undefined; } const optionVal = options[extensionName]; if (optionVal === undefined) { return optionVal; } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readFieldOption = readFieldOption; function readMessageOption(messageType, extensionName, extensionType) { const options = messageType.options; const optionVal = options[extensionName]; if (optionVal === undefined) { return optionVal; } return extensionType ? extensionType.fromJson(optionVal) : optionVal; } exports.readMessageOption = readMessageOption; /***/ }), /***/ 46790: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ReflectionJsonReader = void 0; const json_typings_1 = __nccwpck_require__(49999); const base64_1 = __nccwpck_require__(26335); const reflection_info_1 = __nccwpck_require__(67910); const pb_long_1 = __nccwpck_require__(61753); const assert_1 = __nccwpck_require__(8602); const reflection_long_convert_1 = __nccwpck_require__(63402); /** * Reads proto3 messages in canonical JSON format using reflection information. * * https://developers.google.com/protocol-buffers/docs/proto3#json */ class ReflectionJsonReader { constructor(info) { this.info = info; } prepare() { var _a; if (this.fMap === undefined) { this.fMap = {}; const fieldsInput = (_a = this.info.fields) !== null && _a !== void 0 ? _a : []; for (const field of fieldsInput) { this.fMap[field.name] = field; this.fMap[field.jsonName] = field; this.fMap[field.localName] = field; } } } // Cannot parse JSON for #. assert(condition, fieldName, jsonValue) { if (!condition) { let what = json_typings_1.typeofJsonValue(jsonValue); if (what == "number" || what == "boolean") what = jsonValue.toString(); throw new Error(`Cannot parse JSON ${what} for ${this.info.typeName}#${fieldName}`); } } /** * Reads a message from canonical JSON format into the target message. * * Repeated fields are appended. Map entries are added, overwriting * existing keys. * * If a message field is already present, it will be merged with the * new data. */ read(input, message, options) { this.prepare(); const oneofsHandled = []; for (const [jsonKey, jsonValue] of Object.entries(input)) { const field = this.fMap[jsonKey]; if (!field) { if (!options.ignoreUnknownFields) throw new Error(`Found unknown field while reading ${this.info.typeName} from JSON format. JSON key: ${jsonKey}`); continue; } const localName = field.localName; // handle oneof ADT let target; // this will be the target for the field value, whether it is member of a oneof or not if (field.oneof) { if (jsonValue === null && (field.kind !== 'enum' || field.T()[0] !== 'google.protobuf.NullValue')) { continue; } // since json objects are unordered by specification, it is not possible to take the last of multiple oneofs if (oneofsHandled.includes(field.oneof)) throw new Error(`Multiple members of the oneof group "${field.oneof}" of ${this.info.typeName} are present in JSON.`); oneofsHandled.push(field.oneof); target = message[field.oneof] = { oneofKind: localName }; } else { target = message; } // we have handled oneof above. we just have read the value into `target`. if (field.kind == 'map') { if (jsonValue === null) { continue; } // check input this.assert(json_typings_1.isJsonObject(jsonValue), field.name, jsonValue); // our target to put map entries into const fieldObj = target[localName]; // read entries for (const [jsonObjKey, jsonObjValue] of Object.entries(jsonValue)) { this.assert(jsonObjValue !== null, field.name + " map value", null); // read value let val; switch (field.V.kind) { case "message": val = field.V.T().internalJsonRead(jsonObjValue, options); break; case "enum": val = this.enum(field.V.T(), jsonObjValue, field.name, options.ignoreUnknownFields); if (val === false) continue; break; case "scalar": val = this.scalar(jsonObjValue, field.V.T, field.V.L, field.name); break; } this.assert(val !== undefined, field.name + " map value", jsonObjValue); // read key let key = jsonObjKey; if (field.K == reflection_info_1.ScalarType.BOOL) key = key == "true" ? true : key == "false" ? false : key; key = this.scalar(key, field.K, reflection_info_1.LongType.STRING, field.name).toString(); fieldObj[key] = val; } } else if (field.repeat) { if (jsonValue === null) continue; // check input this.assert(Array.isArray(jsonValue), field.name, jsonValue); // our target to put array entries into const fieldArr = target[localName]; // read array entries for (const jsonItem of jsonValue) { this.assert(jsonItem !== null, field.name, null); let val; switch (field.kind) { case "message": val = field.T().internalJsonRead(jsonItem, options); break; case "enum": val = this.enum(field.T(), jsonItem, field.name, options.ignoreUnknownFields); if (val === false) continue; break; case "scalar": val = this.scalar(jsonItem, field.T, field.L, field.name); break; } this.assert(val !== undefined, field.name, jsonValue); fieldArr.push(val); } } else { switch (field.kind) { case "message": if (jsonValue === null && field.T().typeName != 'google.protobuf.Value') { this.assert(field.oneof === undefined, field.name + " (oneof member)", null); continue; } target[localName] = field.T().internalJsonRead(jsonValue, options, target[localName]); break; case "enum": let val = this.enum(field.T(), jsonValue, field.name, options.ignoreUnknownFields); if (val === false) continue; target[localName] = val; break; case "scalar": target[localName] = this.scalar(jsonValue, field.T, field.L, field.name); break; } } } } /** * Returns `false` for unrecognized string representations. * * google.protobuf.NullValue accepts only JSON `null` (or the old `"NULL_VALUE"`). */ enum(type, json, fieldName, ignoreUnknownFields) { if (type[0] == 'google.protobuf.NullValue') assert_1.assert(json === null || json === "NULL_VALUE", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} only accepts null.`); if (json === null) // we require 0 to be default value for all enums return 0; switch (typeof json) { case "number": assert_1.assert(Number.isInteger(json), `Unable to parse field ${this.info.typeName}#${fieldName}, enum can only be integral number, got ${json}.`); return json; case "string": let localEnumName = json; if (type[2] && json.substring(0, type[2].length) === type[2]) // lookup without the shared prefix localEnumName = json.substring(type[2].length); let enumNumber = type[1][localEnumName]; if (typeof enumNumber === 'undefined' && ignoreUnknownFields) { return false; } assert_1.assert(typeof enumNumber == "number", `Unable to parse field ${this.info.typeName}#${fieldName}, enum ${type[0]} has no value for "${json}".`); return enumNumber; } assert_1.assert(false, `Unable to parse field ${this.info.typeName}#${fieldName}, cannot parse enum value from ${typeof json}".`); } scalar(json, type, longType, fieldName) { let e; try { switch (type) { // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". // Either numbers or strings are accepted. Exponent notation is also accepted. case reflection_info_1.ScalarType.DOUBLE: case reflection_info_1.ScalarType.FLOAT: if (json === null) return .0; if (json === "NaN") return Number.NaN; if (json === "Infinity") return Number.POSITIVE_INFINITY; if (json === "-Infinity") return Number.NEGATIVE_INFINITY; if (json === "") { e = "empty string"; break; } if (typeof json == "string" && json.trim().length !== json.length) { e = "extra whitespace"; break; } if (typeof json != "string" && typeof json != "number") { break; } let float = Number(json); if (Number.isNaN(float)) { e = "not a number"; break; } if (!Number.isFinite(float)) { // infinity and -infinity are handled by string representation above, so this is an error e = "too large or small"; break; } if (type == reflection_info_1.ScalarType.FLOAT) assert_1.assertFloat32(float); return float; // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. case reflection_info_1.ScalarType.INT32: case reflection_info_1.ScalarType.FIXED32: case reflection_info_1.ScalarType.SFIXED32: case reflection_info_1.ScalarType.SINT32: case reflection_info_1.ScalarType.UINT32: if (json === null) return 0; let int32; if (typeof json == "number") int32 = json; else if (json === "") e = "empty string"; else if (typeof json == "string") { if (json.trim().length !== json.length) e = "extra whitespace"; else int32 = Number(json); } if (int32 === undefined) break; if (type == reflection_info_1.ScalarType.UINT32) assert_1.assertUInt32(int32); else assert_1.assertInt32(int32); return int32; // int64, fixed64, uint64: JSON value will be a decimal string. Either numbers or strings are accepted. case reflection_info_1.ScalarType.INT64: case reflection_info_1.ScalarType.SFIXED64: case reflection_info_1.ScalarType.SINT64: if (json === null) return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); if (typeof json != "number" && typeof json != "string") break; return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.from(json), longType); case reflection_info_1.ScalarType.FIXED64: case reflection_info_1.ScalarType.UINT64: if (json === null) return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); if (typeof json != "number" && typeof json != "string") break; return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.from(json), longType); // bool: case reflection_info_1.ScalarType.BOOL: if (json === null) return false; if (typeof json !== "boolean") break; return json; // string: case reflection_info_1.ScalarType.STRING: if (json === null) return ""; if (typeof json !== "string") { e = "extra whitespace"; break; } try { encodeURIComponent(json); } catch (e) { e = "invalid UTF8"; break; } return json; // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. // Either standard or URL-safe base64 encoding with/without paddings are accepted. case reflection_info_1.ScalarType.BYTES: if (json === null || json === "") return new Uint8Array(0); if (typeof json !== 'string') break; return base64_1.base64decode(json); } } catch (error) { e = error.message; } this.assert(false, fieldName + (e ? " - " + e : ""), json); } } exports.ReflectionJsonReader = ReflectionJsonReader; /***/ }), /***/ 11094: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ReflectionJsonWriter = void 0; const base64_1 = __nccwpck_require__(26335); const pb_long_1 = __nccwpck_require__(61753); const reflection_info_1 = __nccwpck_require__(67910); const assert_1 = __nccwpck_require__(8602); /** * Writes proto3 messages in canonical JSON format using reflection * information. * * https://developers.google.com/protocol-buffers/docs/proto3#json */ class ReflectionJsonWriter { constructor(info) { var _a; this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; } /** * Converts the message to a JSON object, based on the field descriptors. */ write(message, options) { const json = {}, source = message; for (const field of this.fields) { // field is not part of a oneof, simply write as is if (!field.oneof) { let jsonValue = this.field(field, source[field.localName], options); if (jsonValue !== undefined) json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; continue; } // field is part of a oneof const group = source[field.oneof]; if (group.oneofKind !== field.localName) continue; // not selected, skip const opt = field.kind == 'scalar' || field.kind == 'enum' ? Object.assign(Object.assign({}, options), { emitDefaultValues: true }) : options; let jsonValue = this.field(field, group[field.localName], opt); assert_1.assert(jsonValue !== undefined); json[options.useProtoFieldName ? field.name : field.jsonName] = jsonValue; } return json; } field(field, value, options) { let jsonValue = undefined; if (field.kind == 'map') { assert_1.assert(typeof value == "object" && value !== null); const jsonObj = {}; switch (field.V.kind) { case "scalar": for (const [entryKey, entryValue] of Object.entries(value)) { const val = this.scalar(field.V.T, entryValue, field.name, false, true); assert_1.assert(val !== undefined); jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key } break; case "message": const messageType = field.V.T(); for (const [entryKey, entryValue] of Object.entries(value)) { const val = this.message(messageType, entryValue, field.name, options); assert_1.assert(val !== undefined); jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key } break; case "enum": const enumInfo = field.V.T(); for (const [entryKey, entryValue] of Object.entries(value)) { assert_1.assert(entryValue === undefined || typeof entryValue == 'number'); const val = this.enum(enumInfo, entryValue, field.name, false, true, options.enumAsInteger); assert_1.assert(val !== undefined); jsonObj[entryKey.toString()] = val; // JSON standard allows only (double quoted) string as property key } break; } if (options.emitDefaultValues || Object.keys(jsonObj).length > 0) jsonValue = jsonObj; } else if (field.repeat) { assert_1.assert(Array.isArray(value)); const jsonArr = []; switch (field.kind) { case "scalar": for (let i = 0; i < value.length; i++) { const val = this.scalar(field.T, value[i], field.name, field.opt, true); assert_1.assert(val !== undefined); jsonArr.push(val); } break; case "enum": const enumInfo = field.T(); for (let i = 0; i < value.length; i++) { assert_1.assert(value[i] === undefined || typeof value[i] == 'number'); const val = this.enum(enumInfo, value[i], field.name, field.opt, true, options.enumAsInteger); assert_1.assert(val !== undefined); jsonArr.push(val); } break; case "message": const messageType = field.T(); for (let i = 0; i < value.length; i++) { const val = this.message(messageType, value[i], field.name, options); assert_1.assert(val !== undefined); jsonArr.push(val); } break; } // add converted array to json output if (options.emitDefaultValues || jsonArr.length > 0 || options.emitDefaultValues) jsonValue = jsonArr; } else { switch (field.kind) { case "scalar": jsonValue = this.scalar(field.T, value, field.name, field.opt, options.emitDefaultValues); break; case "enum": jsonValue = this.enum(field.T(), value, field.name, field.opt, options.emitDefaultValues, options.enumAsInteger); break; case "message": jsonValue = this.message(field.T(), value, field.name, options); break; } } return jsonValue; } /** * Returns `null` as the default for google.protobuf.NullValue. */ enum(type, value, fieldName, optional, emitDefaultValues, enumAsInteger) { if (type[0] == 'google.protobuf.NullValue') return !emitDefaultValues && !optional ? undefined : null; if (value === undefined) { assert_1.assert(optional); return undefined; } if (value === 0 && !emitDefaultValues && !optional) // we require 0 to be default value for all enums return undefined; assert_1.assert(typeof value == 'number'); assert_1.assert(Number.isInteger(value)); if (enumAsInteger || !type[1].hasOwnProperty(value)) // if we don't now the enum value, just return the number return value; if (type[2]) // restore the dropped prefix return type[2] + type[1][value]; return type[1][value]; } message(type, value, fieldName, options) { if (value === undefined) return options.emitDefaultValues ? null : undefined; return type.internalJsonWrite(value, options); } scalar(type, value, fieldName, optional, emitDefaultValues) { if (value === undefined) { assert_1.assert(optional); return undefined; } const ed = emitDefaultValues || optional; // noinspection FallThroughInSwitchStatementJS switch (type) { // int32, fixed32, uint32: JSON value will be a decimal number. Either numbers or strings are accepted. case reflection_info_1.ScalarType.INT32: case reflection_info_1.ScalarType.SFIXED32: case reflection_info_1.ScalarType.SINT32: if (value === 0) return ed ? 0 : undefined; assert_1.assertInt32(value); return value; case reflection_info_1.ScalarType.FIXED32: case reflection_info_1.ScalarType.UINT32: if (value === 0) return ed ? 0 : undefined; assert_1.assertUInt32(value); return value; // float, double: JSON value will be a number or one of the special string values "NaN", "Infinity", and "-Infinity". // Either numbers or strings are accepted. Exponent notation is also accepted. case reflection_info_1.ScalarType.FLOAT: assert_1.assertFloat32(value); case reflection_info_1.ScalarType.DOUBLE: if (value === 0) return ed ? 0 : undefined; assert_1.assert(typeof value == 'number'); if (Number.isNaN(value)) return 'NaN'; if (value === Number.POSITIVE_INFINITY) return 'Infinity'; if (value === Number.NEGATIVE_INFINITY) return '-Infinity'; return value; // string: case reflection_info_1.ScalarType.STRING: if (value === "") return ed ? '' : undefined; assert_1.assert(typeof value == 'string'); return value; // bool: case reflection_info_1.ScalarType.BOOL: if (value === false) return ed ? false : undefined; assert_1.assert(typeof value == 'boolean'); return value; // JSON value will be a decimal string. Either numbers or strings are accepted. case reflection_info_1.ScalarType.UINT64: case reflection_info_1.ScalarType.FIXED64: assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); let ulong = pb_long_1.PbULong.from(value); if (ulong.isZero() && !ed) return undefined; return ulong.toString(); // JSON value will be a decimal string. Either numbers or strings are accepted. case reflection_info_1.ScalarType.INT64: case reflection_info_1.ScalarType.SFIXED64: case reflection_info_1.ScalarType.SINT64: assert_1.assert(typeof value == 'number' || typeof value == 'string' || typeof value == 'bigint'); let long = pb_long_1.PbLong.from(value); if (long.isZero() && !ed) return undefined; return long.toString(); // bytes: JSON value will be the data encoded as a string using standard base64 encoding with paddings. // Either standard or URL-safe base64 encoding with/without paddings are accepted. case reflection_info_1.ScalarType.BYTES: assert_1.assert(value instanceof Uint8Array); if (!value.byteLength) return ed ? "" : undefined; return base64_1.base64encode(value); } } } exports.ReflectionJsonWriter = ReflectionJsonWriter; /***/ }), /***/ 63402: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.reflectionLongConvert = void 0; const reflection_info_1 = __nccwpck_require__(67910); /** * Utility method to convert a PbLong or PbUlong to a JavaScript * representation during runtime. * * Works with generated field information, `undefined` is equivalent * to `STRING`. */ function reflectionLongConvert(long, type) { switch (type) { case reflection_info_1.LongType.BIGINT: return long.toBigInt(); case reflection_info_1.LongType.NUMBER: return long.toNumber(); default: // case undefined: // case LongType.STRING: return long.toString(); } } exports.reflectionLongConvert = reflectionLongConvert; /***/ }), /***/ 98044: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.reflectionMergePartial = void 0; /** * Copy partial data into the target message. * * If a singular scalar or enum field is present in the source, it * replaces the field in the target. * * If a singular message field is present in the source, it is merged * with the target field by calling mergePartial() of the responsible * message type. * * If a repeated field is present in the source, its values replace * all values in the target array, removing extraneous values. * Repeated message fields are copied, not merged. * * If a map field is present in the source, entries are added to the * target map, replacing entries with the same key. Entries that only * exist in the target remain. Entries with message values are copied, * not merged. * * Note that this function differs from protobuf merge semantics, * which appends repeated fields. */ function reflectionMergePartial(info, target, source) { let fieldValue, // the field value we are working with input = source, output; // where we want our field value to go for (let field of info.fields) { let name = field.localName; if (field.oneof) { const group = input[field.oneof]; // this is the oneof`s group in the source if ((group === null || group === void 0 ? void 0 : group.oneofKind) == undefined) { // the user is free to omit continue; // we skip this field, and all other members too } fieldValue = group[name]; // our value comes from the the oneof group of the source output = target[field.oneof]; // and our output is the oneof group of the target output.oneofKind = group.oneofKind; // always update discriminator if (fieldValue == undefined) { delete output[name]; // remove any existing value continue; // skip further work on field } } else { fieldValue = input[name]; // we are using the source directly output = target; // we want our field value to go directly into the target if (fieldValue == undefined) { continue; // skip further work on field, existing value is used as is } } if (field.repeat) output[name].length = fieldValue.length; // resize target array to match source array // now we just work with `fieldValue` and `output` to merge the value switch (field.kind) { case "scalar": case "enum": if (field.repeat) for (let i = 0; i < fieldValue.length; i++) output[name][i] = fieldValue[i]; // not a reference type else output[name] = fieldValue; // not a reference type break; case "message": let T = field.T(); if (field.repeat) for (let i = 0; i < fieldValue.length; i++) output[name][i] = T.create(fieldValue[i]); else if (output[name] === undefined) output[name] = T.create(fieldValue); // nothing to merge with else T.mergePartial(output[name], fieldValue); break; case "map": // Map and repeated fields are simply overwritten, not appended or merged switch (field.V.kind) { case "scalar": case "enum": Object.assign(output[name], fieldValue); // elements are not reference types break; case "message": let T = field.V.T(); for (let k of Object.keys(fieldValue)) output[name][k] = T.create(fieldValue[k]); break; } break; } } } exports.reflectionMergePartial = reflectionMergePartial; /***/ }), /***/ 19526: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.reflectionScalarDefault = void 0; const reflection_info_1 = __nccwpck_require__(67910); const reflection_long_convert_1 = __nccwpck_require__(63402); const pb_long_1 = __nccwpck_require__(61753); /** * Creates the default value for a scalar type. */ function reflectionScalarDefault(type, longType = reflection_info_1.LongType.STRING) { switch (type) { case reflection_info_1.ScalarType.BOOL: return false; case reflection_info_1.ScalarType.UINT64: case reflection_info_1.ScalarType.FIXED64: return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbULong.ZERO, longType); case reflection_info_1.ScalarType.INT64: case reflection_info_1.ScalarType.SFIXED64: case reflection_info_1.ScalarType.SINT64: return reflection_long_convert_1.reflectionLongConvert(pb_long_1.PbLong.ZERO, longType); case reflection_info_1.ScalarType.DOUBLE: case reflection_info_1.ScalarType.FLOAT: return 0.0; case reflection_info_1.ScalarType.BYTES: return new Uint8Array(0); case reflection_info_1.ScalarType.STRING: return ""; default: // case ScalarType.INT32: // case ScalarType.UINT32: // case ScalarType.SINT32: // case ScalarType.FIXED32: // case ScalarType.SFIXED32: return 0; } } exports.reflectionScalarDefault = reflectionScalarDefault; /***/ }), /***/ 25167: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ReflectionTypeCheck = void 0; const reflection_info_1 = __nccwpck_require__(67910); const oneof_1 = __nccwpck_require__(18063); // noinspection JSMethodCanBeStatic class ReflectionTypeCheck { constructor(info) { var _a; this.fields = (_a = info.fields) !== null && _a !== void 0 ? _a : []; } prepare() { if (this.data) return; const req = [], known = [], oneofs = []; for (let field of this.fields) { if (field.oneof) { if (!oneofs.includes(field.oneof)) { oneofs.push(field.oneof); req.push(field.oneof); known.push(field.oneof); } } else { known.push(field.localName); switch (field.kind) { case "scalar": case "enum": if (!field.opt || field.repeat) req.push(field.localName); break; case "message": if (field.repeat) req.push(field.localName); break; case "map": req.push(field.localName); break; } } } this.data = { req, known, oneofs: Object.values(oneofs) }; } /** * Is the argument a valid message as specified by the * reflection information? * * Checks all field types recursively. The `depth` * specifies how deep into the structure the check will be. * * With a depth of 0, only the presence of fields * is checked. * * With a depth of 1 or more, the field types are checked. * * With a depth of 2 or more, the members of map, repeated * and message fields are checked. * * Message fields will be checked recursively with depth - 1. * * The number of map entries / repeated values being checked * is < depth. */ is(message, depth, allowExcessProperties = false) { if (depth < 0) return true; if (message === null || message === undefined || typeof message != 'object') return false; this.prepare(); let keys = Object.keys(message), data = this.data; // if a required field is missing in arg, this cannot be a T if (keys.length < data.req.length || data.req.some(n => !keys.includes(n))) return false; if (!allowExcessProperties) { // if the arg contains a key we dont know, this is not a literal T if (keys.some(k => !data.known.includes(k))) return false; } // "With a depth of 0, only the presence and absence of fields is checked." // "With a depth of 1 or more, the field types are checked." if (depth < 1) { return true; } // check oneof group for (const name of data.oneofs) { const group = message[name]; if (!oneof_1.isOneofGroup(group)) return false; if (group.oneofKind === undefined) continue; const field = this.fields.find(f => f.localName === group.oneofKind); if (!field) return false; // we found no field, but have a kind, something is wrong if (!this.field(group[group.oneofKind], field, allowExcessProperties, depth)) return false; } // check types for (const field of this.fields) { if (field.oneof !== undefined) continue; if (!this.field(message[field.localName], field, allowExcessProperties, depth)) return false; } return true; } field(arg, field, allowExcessProperties, depth) { let repeated = field.repeat; switch (field.kind) { case "scalar": if (arg === undefined) return field.opt; if (repeated) return this.scalars(arg, field.T, depth, field.L); return this.scalar(arg, field.T, field.L); case "enum": if (arg === undefined) return field.opt; if (repeated) return this.scalars(arg, reflection_info_1.ScalarType.INT32, depth); return this.scalar(arg, reflection_info_1.ScalarType.INT32); case "message": if (arg === undefined) return true; if (repeated) return this.messages(arg, field.T(), allowExcessProperties, depth); return this.message(arg, field.T(), allowExcessProperties, depth); case "map": if (typeof arg != 'object' || arg === null) return false; if (depth < 2) return true; if (!this.mapKeys(arg, field.K, depth)) return false; switch (field.V.kind) { case "scalar": return this.scalars(Object.values(arg), field.V.T, depth, field.V.L); case "enum": return this.scalars(Object.values(arg), reflection_info_1.ScalarType.INT32, depth); case "message": return this.messages(Object.values(arg), field.V.T(), allowExcessProperties, depth); } break; } return true; } message(arg, type, allowExcessProperties, depth) { if (allowExcessProperties) { return type.isAssignable(arg, depth); } return type.is(arg, depth); } messages(arg, type, allowExcessProperties, depth) { if (!Array.isArray(arg)) return false; if (depth < 2) return true; if (allowExcessProperties) { for (let i = 0; i < arg.length && i < depth; i++) if (!type.isAssignable(arg[i], depth - 1)) return false; } else { for (let i = 0; i < arg.length && i < depth; i++) if (!type.is(arg[i], depth - 1)) return false; } return true; } scalar(arg, type, longType) { let argType = typeof arg; switch (type) { case reflection_info_1.ScalarType.UINT64: case reflection_info_1.ScalarType.FIXED64: case reflection_info_1.ScalarType.INT64: case reflection_info_1.ScalarType.SFIXED64: case reflection_info_1.ScalarType.SINT64: switch (longType) { case reflection_info_1.LongType.BIGINT: return argType == "bigint"; case reflection_info_1.LongType.NUMBER: return argType == "number" && !isNaN(arg); default: return argType == "string"; } case reflection_info_1.ScalarType.BOOL: return argType == 'boolean'; case reflection_info_1.ScalarType.STRING: return argType == 'string'; case reflection_info_1.ScalarType.BYTES: return arg instanceof Uint8Array; case reflection_info_1.ScalarType.DOUBLE: case reflection_info_1.ScalarType.FLOAT: return argType == 'number' && !isNaN(arg); default: // case ScalarType.UINT32: // case ScalarType.FIXED32: // case ScalarType.INT32: // case ScalarType.SINT32: // case ScalarType.SFIXED32: return argType == 'number' && Number.isInteger(arg); } } scalars(arg, type, depth, longType) { if (!Array.isArray(arg)) return false; if (depth < 2) return true; if (Array.isArray(arg)) for (let i = 0; i < arg.length && i < depth; i++) if (!this.scalar(arg[i], type, longType)) return false; return true; } mapKeys(map, type, depth) { let keys = Object.keys(map); switch (type) { case reflection_info_1.ScalarType.INT32: case reflection_info_1.ScalarType.FIXED32: case reflection_info_1.ScalarType.SFIXED32: case reflection_info_1.ScalarType.SINT32: case reflection_info_1.ScalarType.UINT32: return this.scalars(keys.slice(0, depth).map(k => parseInt(k)), type, depth); case reflection_info_1.ScalarType.BOOL: return this.scalars(keys.slice(0, depth).map(k => k == 'true' ? true : k == 'false' ? false : k), type, depth); default: return this.scalars(keys, type, depth, reflection_info_1.LongType.STRING); } } } exports.ReflectionTypeCheck = ReflectionTypeCheck; /***/ }), /***/ 48662: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function once(emitter, name, { signal } = {}) { return new Promise((resolve, reject) => { function cleanup() { signal === null || signal === void 0 ? void 0 : signal.removeEventListener('abort', cleanup); emitter.removeListener(name, onEvent); emitter.removeListener('error', onError); } function onEvent(...args) { cleanup(); resolve(args); } function onError(err) { cleanup(); reject(err); } signal === null || signal === void 0 ? void 0 : signal.addEventListener('abort', cleanup); emitter.on(name, onEvent); emitter.on('error', onError); }); } exports["default"] = once; //# sourceMappingURL=index.js.map /***/ }), /***/ 17413: /***/ ((module, exports, __nccwpck_require__) => { "use strict"; /** * @author Toru Nagashima * See LICENSE file in root directory for full license. */ Object.defineProperty(exports, "__esModule", ({ value: true })); var eventTargetShim = __nccwpck_require__(16577); /** * The signal class. * @see https://dom.spec.whatwg.org/#abortsignal */ class AbortSignal extends eventTargetShim.EventTarget { /** * AbortSignal cannot be constructed directly. */ constructor() { super(); throw new TypeError("AbortSignal cannot be constructed directly"); } /** * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise. */ get aborted() { const aborted = abortedFlags.get(this); if (typeof aborted !== "boolean") { throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? "null" : typeof this}`); } return aborted; } } eventTargetShim.defineEventAttribute(AbortSignal.prototype, "abort"); /** * Create an AbortSignal object. */ function createAbortSignal() { const signal = Object.create(AbortSignal.prototype); eventTargetShim.EventTarget.call(signal); abortedFlags.set(signal, false); return signal; } /** * Abort a given signal. */ function abortSignal(signal) { if (abortedFlags.get(signal) !== false) { return; } abortedFlags.set(signal, true); signal.dispatchEvent({ type: "abort" }); } /** * Aborted flag for each instances. */ const abortedFlags = new WeakMap(); // Properties should be enumerable. Object.defineProperties(AbortSignal.prototype, { aborted: { enumerable: true }, }); // `toString()` should return `"[object AbortSignal]"` if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, { configurable: true, value: "AbortSignal", }); } /** * The AbortController. * @see https://dom.spec.whatwg.org/#abortcontroller */ class AbortController { /** * Initialize this controller. */ constructor() { signals.set(this, createAbortSignal()); } /** * Returns the `AbortSignal` object associated with this object. */ get signal() { return getSignal(this); } /** * Abort and signal to any observers that the associated activity is to be aborted. */ abort() { abortSignal(getSignal(this)); } } /** * Associated signals. */ const signals = new WeakMap(); /** * Get the associated signal of a given controller. */ function getSignal(controller) { const signal = signals.get(controller); if (signal == null) { throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? "null" : typeof controller}`); } return signal; } // Properties should be enumerable. Object.defineProperties(AbortController.prototype, { signal: { enumerable: true }, abort: { enumerable: true }, }); if (typeof Symbol === "function" && typeof Symbol.toStringTag === "symbol") { Object.defineProperty(AbortController.prototype, Symbol.toStringTag, { configurable: true, value: "AbortController", }); } exports.AbortController = AbortController; exports.AbortSignal = AbortSignal; exports["default"] = AbortController; module.exports = AbortController module.exports.AbortController = module.exports["default"] = AbortController module.exports.AbortSignal = AbortSignal //# sourceMappingURL=abort-controller.js.map /***/ }), /***/ 15183: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.req = exports.json = exports.toBuffer = void 0; const http = __importStar(__nccwpck_require__(58611)); const https = __importStar(__nccwpck_require__(65692)); async function toBuffer(stream) { let length = 0; const chunks = []; for await (const chunk of stream) { length += chunk.length; chunks.push(chunk); } return Buffer.concat(chunks, length); } exports.toBuffer = toBuffer; // eslint-disable-next-line @typescript-eslint/no-explicit-any async function json(stream) { const buf = await toBuffer(stream); const str = buf.toString('utf8'); try { return JSON.parse(str); } catch (_err) { const err = _err; err.message += ` (input: ${str})`; throw err; } } exports.json = json; function req(url, opts = {}) { const href = typeof url === 'string' ? url : url.href; const req = (href.startsWith('https:') ? https : http).request(url, opts); const promise = new Promise((resolve, reject) => { req .once('response', resolve) .once('error', reject) .end(); }); req.then = promise.then.bind(promise); return req; } exports.req = req; //# sourceMappingURL=helpers.js.map /***/ }), /***/ 98894: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Agent = void 0; const net = __importStar(__nccwpck_require__(69278)); const http = __importStar(__nccwpck_require__(58611)); const https_1 = __nccwpck_require__(65692); __exportStar(__nccwpck_require__(15183), exports); const INTERNAL = Symbol('AgentBaseInternalState'); class Agent extends http.Agent { constructor(opts) { super(opts); this[INTERNAL] = {}; } /** * Determine whether this is an `http` or `https` request. */ isSecureEndpoint(options) { if (options) { // First check the `secureEndpoint` property explicitly, since this // means that a parent `Agent` is "passing through" to this instance. // eslint-disable-next-line @typescript-eslint/no-explicit-any if (typeof options.secureEndpoint === 'boolean') { return options.secureEndpoint; } // If no explicit `secure` endpoint, check if `protocol` property is // set. This will usually be the case since using a full string URL // or `URL` instance should be the most common usage. if (typeof options.protocol === 'string') { return options.protocol === 'https:'; } } // Finally, if no `protocol` property was set, then fall back to // checking the stack trace of the current call stack, and try to // detect the "https" module. const { stack } = new Error(); if (typeof stack !== 'string') return false; return stack .split('\n') .some((l) => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); } // In order to support async signatures in `connect()` and Node's native // connection pooling in `http.Agent`, the array of sockets for each origin // has to be updated synchronously. This is so the length of the array is // accurate when `addRequest()` is next called. We achieve this by creating a // fake socket and adding it to `sockets[origin]` and incrementing // `totalSocketCount`. incrementSockets(name) { // If `maxSockets` and `maxTotalSockets` are both Infinity then there is no // need to create a fake socket because Node.js native connection pooling // will never be invoked. if (this.maxSockets === Infinity && this.maxTotalSockets === Infinity) { return null; } // All instances of `sockets` are expected TypeScript errors. The // alternative is to add it as a private property of this class but that // will break TypeScript subclassing. if (!this.sockets[name]) { // @ts-expect-error `sockets` is readonly in `@types/node` this.sockets[name] = []; } const fakeSocket = new net.Socket({ writable: false }); this.sockets[name].push(fakeSocket); // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` this.totalSocketCount++; return fakeSocket; } decrementSockets(name, socket) { if (!this.sockets[name] || socket === null) { return; } const sockets = this.sockets[name]; const index = sockets.indexOf(socket); if (index !== -1) { sockets.splice(index, 1); // @ts-expect-error `totalSocketCount` isn't defined in `@types/node` this.totalSocketCount--; if (sockets.length === 0) { // @ts-expect-error `sockets` is readonly in `@types/node` delete this.sockets[name]; } } } // In order to properly update the socket pool, we need to call `getName()` on // the core `https.Agent` if it is a secureEndpoint. getName(options) { const secureEndpoint = typeof options.secureEndpoint === 'boolean' ? options.secureEndpoint : this.isSecureEndpoint(options); if (secureEndpoint) { // @ts-expect-error `getName()` isn't defined in `@types/node` return https_1.Agent.prototype.getName.call(this, options); } // @ts-expect-error `getName()` isn't defined in `@types/node` return super.getName(options); } createSocket(req, options, cb) { const connectOpts = { ...options, secureEndpoint: this.isSecureEndpoint(options), }; const name = this.getName(connectOpts); const fakeSocket = this.incrementSockets(name); Promise.resolve() .then(() => this.connect(req, connectOpts)) .then((socket) => { this.decrementSockets(name, fakeSocket); if (socket instanceof http.Agent) { try { // @ts-expect-error `addRequest()` isn't defined in `@types/node` return socket.addRequest(req, connectOpts); } catch (err) { return cb(err); } } this[INTERNAL].currentSocket = socket; // @ts-expect-error `createSocket()` isn't defined in `@types/node` super.createSocket(req, options, cb); }, (err) => { this.decrementSockets(name, fakeSocket); cb(err); }); } createConnection() { const socket = this[INTERNAL].currentSocket; this[INTERNAL].currentSocket = undefined; if (!socket) { throw new Error('No socket was returned in the `connect()` function'); } return socket; } get defaultPort() { return (this[INTERNAL].defaultPort ?? (this.protocol === 'https:' ? 443 : 80)); } set defaultPort(v) { if (this[INTERNAL]) { this[INTERNAL].defaultPort = v; } } get protocol() { return (this[INTERNAL].protocol ?? (this.isSecureEndpoint() ? 'https:' : 'http:')); } set protocol(v) { if (this[INTERNAL]) { this[INTERNAL].protocol = v; } } } exports.Agent = Agent; //# sourceMappingURL=index.js.map /***/ }), /***/ 26251: /***/ ((module) => { "use strict"; const arrify = value => { if (value === null || value === undefined) { return []; } if (Array.isArray(value)) { return value; } if (typeof value === 'string') { return [value]; } if (typeof value[Symbol.iterator] === 'function') { return [...value]; } return [value]; }; module.exports = arrify; /***/ }), /***/ 45195: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { // Packages var retrier = __nccwpck_require__(5546); function retry(fn, opts) { function run(resolve, reject) { var options = opts || {}; var op; // Default `randomize` to true if (!('randomize' in options)) { options.randomize = true; } op = retrier.operation(options); // We allow the user to abort retrying // this makes sense in the cases where // knowledge is obtained that retrying // would be futile (e.g.: auth errors) function bail(err) { reject(err || new Error('Aborted')); } function onError(err, num) { if (err.bail) { bail(err); return; } if (!op.retry(err)) { reject(op.mainError()); } else if (options.onRetry) { options.onRetry(err, num); } } function runAttempt(num) { var val; try { val = fn(bail, num); } catch (err) { onError(err, num); return; } Promise.resolve(val) .then(resolve) .catch(function catchIt(err) { onError(err, num); }); } op.attempt(runAttempt); } return new Promise(run); } module.exports = retry; /***/ }), /***/ 31324: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = { parallel : __nccwpck_require__(83857), serial : __nccwpck_require__(31054), serialOrdered : __nccwpck_require__(53961) }; /***/ }), /***/ 24818: /***/ ((module) => { // API module.exports = abort; /** * Aborts leftover active jobs * * @param {object} state - current state object */ function abort(state) { Object.keys(state.jobs).forEach(clean.bind(state)); // reset leftover jobs state.jobs = {}; } /** * Cleans up leftover job by invoking abort function for the provided job id * * @this state * @param {string|number} key - job id to abort */ function clean(key) { if (typeof this.jobs[key] == 'function') { this.jobs[key](); } } /***/ }), /***/ 78452: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var defer = __nccwpck_require__(29200); // API module.exports = async; /** * Runs provided callback asynchronously * even if callback itself is not * * @param {function} callback - callback to invoke * @returns {function} - augmented callback */ function async(callback) { var isAsync = false; // check if async happened defer(function() { isAsync = true; }); return function async_callback(err, result) { if (isAsync) { callback(err, result); } else { defer(function nextTick_callback() { callback(err, result); }); } }; } /***/ }), /***/ 29200: /***/ ((module) => { module.exports = defer; /** * Runs provided function on next iteration of the event loop * * @param {function} fn - function to run */ function defer(fn) { var nextTick = typeof setImmediate == 'function' ? setImmediate : ( typeof process == 'object' && typeof process.nextTick == 'function' ? process.nextTick : null ); if (nextTick) { nextTick(fn); } else { setTimeout(fn, 0); } } /***/ }), /***/ 24902: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var async = __nccwpck_require__(78452) , abort = __nccwpck_require__(24818) ; // API module.exports = iterate; /** * Iterates over each job object * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {object} state - current job status * @param {function} callback - invoked when all elements processed */ function iterate(list, iterator, state, callback) { // store current index var key = state['keyedList'] ? state['keyedList'][state.index] : state.index; state.jobs[key] = runJob(iterator, key, list[key], function(error, output) { // don't repeat yourself // skip secondary callbacks if (!(key in state.jobs)) { return; } // clean up jobs delete state.jobs[key]; if (error) { // don't process rest of the results // stop still active jobs // and reset the list abort(state); } else { state.results[key] = output; } // return salvaged results callback(error, state.results); }); } /** * Runs iterator over provided job element * * @param {function} iterator - iterator to invoke * @param {string|number} key - key/index of the element in the list of jobs * @param {mixed} item - job description * @param {function} callback - invoked after iterator is done with the job * @returns {function|mixed} - job abort function or something else */ function runJob(iterator, key, item, callback) { var aborter; // allow shortcut if iterator expects only two arguments if (iterator.length == 2) { aborter = iterator(item, async(callback)); } // otherwise go with full three arguments else { aborter = iterator(item, key, async(callback)); } return aborter; } /***/ }), /***/ 81721: /***/ ((module) => { // API module.exports = state; /** * Creates initial state object * for iteration over list * * @param {array|object} list - list to iterate over * @param {function|null} sortMethod - function to use for keys sort, * or `null` to keep them as is * @returns {object} - initial state object */ function state(list, sortMethod) { var isNamedList = !Array.isArray(list) , initState = { index : 0, keyedList: isNamedList || sortMethod ? Object.keys(list) : null, jobs : {}, results : isNamedList ? {} : [], size : isNamedList ? Object.keys(list).length : list.length } ; if (sortMethod) { // sort array keys based on it's values // sort object's keys just on own merit initState.keyedList.sort(isNamedList ? sortMethod : function(a, b) { return sortMethod(list[a], list[b]); }); } return initState; } /***/ }), /***/ 33351: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var abort = __nccwpck_require__(24818) , async = __nccwpck_require__(78452) ; // API module.exports = terminator; /** * Terminates jobs in the attached state context * * @this AsyncKitState# * @param {function} callback - final callback to invoke after termination */ function terminator(callback) { if (!Object.keys(this.jobs).length) { return; } // fast forward iteration index this.index = this.size; // abort jobs abort(this); // send back results we have so far async(callback)(null, this.results); } /***/ }), /***/ 83857: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var iterate = __nccwpck_require__(24902) , initState = __nccwpck_require__(81721) , terminator = __nccwpck_require__(33351) ; // Public API module.exports = parallel; /** * Runs iterator over provided array elements in parallel * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function parallel(list, iterator, callback) { var state = initState(list); while (state.index < (state['keyedList'] || list).length) { iterate(list, iterator, state, function(error, result) { if (error) { callback(error, result); return; } // looks like it's the last one if (Object.keys(state.jobs).length === 0) { callback(null, state.results); return; } }); state.index++; } return terminator.bind(state, callback); } /***/ }), /***/ 31054: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var serialOrdered = __nccwpck_require__(53961); // Public API module.exports = serial; /** * Runs iterator over provided array elements in series * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function serial(list, iterator, callback) { return serialOrdered(list, iterator, null, callback); } /***/ }), /***/ 53961: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var iterate = __nccwpck_require__(24902) , initState = __nccwpck_require__(81721) , terminator = __nccwpck_require__(33351) ; // Public API module.exports = serialOrdered; // sorting helpers module.exports.ascending = ascending; module.exports.descending = descending; /** * Runs iterator over provided sorted array elements in series * * @param {array|object} list - array or object (named list) to iterate over * @param {function} iterator - iterator to run * @param {function} sortMethod - custom sort function * @param {function} callback - invoked when all elements processed * @returns {function} - jobs terminator */ function serialOrdered(list, iterator, sortMethod, callback) { var state = initState(list, sortMethod); iterate(list, iterator, state, function iteratorHandler(error, result) { if (error) { callback(error, result); return; } state.index++; // are we there yet? if (state.index < (state['keyedList'] || list).length) { iterate(list, iterator, state, iteratorHandler); return; } // done here callback(null, state.results); }); return terminator.bind(state, callback); } /* * -- Sort methods */ /** * sort helper to sort array elements in ascending order * * @param {mixed} a - an item to compare * @param {mixed} b - an item to compare * @returns {number} - comparison result */ function ascending(a, b) { return a < b ? -1 : a > b ? 1 : 0; } /** * sort helper to sort array elements in descending order * * @param {mixed} a - an item to compare * @param {mixed} b - an item to compare * @returns {number} - comparison result */ function descending(a, b) { return -1 * ascending(a, b); } /***/ }), /***/ 45416: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var CombinedStream = __nccwpck_require__(35630); var util = __nccwpck_require__(39023); var path = __nccwpck_require__(16928); var http = __nccwpck_require__(58611); var https = __nccwpck_require__(65692); var parseUrl = (__nccwpck_require__(87016).parse); var fs = __nccwpck_require__(79896); var Stream = (__nccwpck_require__(2203).Stream); var mime = __nccwpck_require__(14096); var asynckit = __nccwpck_require__(31324); var setToStringTag = __nccwpck_require__(88700); var hasOwn = __nccwpck_require__(54076); var populate = __nccwpck_require__(54957); /** * Create readable "multipart/form-data" streams. * Can be used to submit forms * and file uploads to other web applications. * * @constructor * @param {object} options - Properties to be added/overriden for FormData and CombinedStream */ function FormData(options) { if (!(this instanceof FormData)) { return new FormData(options); } this._overheadLength = 0; this._valueLength = 0; this._valuesToMeasure = []; CombinedStream.call(this); options = options || {}; // eslint-disable-line no-param-reassign for (var option in options) { // eslint-disable-line no-restricted-syntax this[option] = options[option]; } } // make it a Stream util.inherits(FormData, CombinedStream); FormData.LINE_BREAK = '\r\n'; FormData.DEFAULT_CONTENT_TYPE = 'application/octet-stream'; FormData.prototype.append = function (field, value, options) { options = options || {}; // eslint-disable-line no-param-reassign // allow filename as single option if (typeof options === 'string') { options = { filename: options }; // eslint-disable-line no-param-reassign } var append = CombinedStream.prototype.append.bind(this); // all that streamy business can't handle numbers if (typeof value === 'number' || value == null) { value = String(value); // eslint-disable-line no-param-reassign } // https://github.com/felixge/node-form-data/issues/38 if (Array.isArray(value)) { /* * Please convert your array into string * the way web server expects it */ this._error(new Error('Arrays are not supported.')); return; } var header = this._multiPartHeader(field, value, options); var footer = this._multiPartFooter(); append(header); append(value); append(footer); // pass along options.knownLength this._trackLength(header, value, options); }; FormData.prototype._trackLength = function (header, value, options) { var valueLength = 0; /* * used w/ getLengthSync(), when length is known. * e.g. for streaming directly from a remote server, * w/ a known file a size, and not wanting to wait for * incoming file to finish to get its size. */ if (options.knownLength != null) { valueLength += Number(options.knownLength); } else if (Buffer.isBuffer(value)) { valueLength = value.length; } else if (typeof value === 'string') { valueLength = Buffer.byteLength(value); } this._valueLength += valueLength; // @check why add CRLF? does this account for custom/multiple CRLFs? this._overheadLength += Buffer.byteLength(header) + FormData.LINE_BREAK.length; // empty or either doesn't have path or not an http response or not a stream if (!value || (!value.path && !(value.readable && hasOwn(value, 'httpVersion')) && !(value instanceof Stream))) { return; } // no need to bother with the length if (!options.knownLength) { this._valuesToMeasure.push(value); } }; FormData.prototype._lengthRetriever = function (value, callback) { if (hasOwn(value, 'fd')) { // take read range into a account // `end` = Infinity –> read file till the end // // TODO: Looks like there is bug in Node fs.createReadStream // it doesn't respect `end` options without `start` options // Fix it when node fixes it. // https://github.com/joyent/node/issues/7819 if (value.end != undefined && value.end != Infinity && value.start != undefined) { // when end specified // no need to calculate range // inclusive, starts with 0 callback(null, value.end + 1 - (value.start ? value.start : 0)); // eslint-disable-line callback-return // not that fast snoopy } else { // still need to fetch file size from fs fs.stat(value.path, function (err, stat) { if (err) { callback(err); return; } // update final size based on the range options var fileSize = stat.size - (value.start ? value.start : 0); callback(null, fileSize); }); } // or http response } else if (hasOwn(value, 'httpVersion')) { callback(null, Number(value.headers['content-length'])); // eslint-disable-line callback-return // or request stream http://github.com/mikeal/request } else if (hasOwn(value, 'httpModule')) { // wait till response come back value.on('response', function (response) { value.pause(); callback(null, Number(response.headers['content-length'])); }); value.resume(); // something else } else { callback('Unknown stream'); // eslint-disable-line callback-return } }; FormData.prototype._multiPartHeader = function (field, value, options) { /* * custom header specified (as string)? * it becomes responsible for boundary * (e.g. to handle extra CRLFs on .NET servers) */ if (typeof options.header === 'string') { return options.header; } var contentDisposition = this._getContentDisposition(value, options); var contentType = this._getContentType(value, options); var contents = ''; var headers = { // add custom disposition as third element or keep it two elements if not 'Content-Disposition': ['form-data', 'name="' + field + '"'].concat(contentDisposition || []), // if no content type. allow it to be empty array 'Content-Type': [].concat(contentType || []) }; // allow custom headers. if (typeof options.header === 'object') { populate(headers, options.header); } var header; for (var prop in headers) { // eslint-disable-line no-restricted-syntax if (hasOwn(headers, prop)) { header = headers[prop]; // skip nullish headers. if (header == null) { continue; // eslint-disable-line no-restricted-syntax, no-continue } // convert all headers to arrays. if (!Array.isArray(header)) { header = [header]; } // add non-empty headers. if (header.length) { contents += prop + ': ' + header.join('; ') + FormData.LINE_BREAK; } } } return '--' + this.getBoundary() + FormData.LINE_BREAK + contents + FormData.LINE_BREAK; }; FormData.prototype._getContentDisposition = function (value, options) { // eslint-disable-line consistent-return var filename; if (typeof options.filepath === 'string') { // custom filepath for relative paths filename = path.normalize(options.filepath).replace(/\\/g, '/'); } else if (options.filename || (value && (value.name || value.path))) { /* * custom filename take precedence * formidable and the browser add a name property * fs- and request- streams have path property */ filename = path.basename(options.filename || (value && (value.name || value.path))); } else if (value && value.readable && hasOwn(value, 'httpVersion')) { // or try http response filename = path.basename(value.client._httpMessage.path || ''); } if (filename) { return 'filename="' + filename + '"'; } }; FormData.prototype._getContentType = function (value, options) { // use custom content-type above all var contentType = options.contentType; // or try `name` from formidable, browser if (!contentType && value && value.name) { contentType = mime.lookup(value.name); } // or try `path` from fs-, request- streams if (!contentType && value && value.path) { contentType = mime.lookup(value.path); } // or if it's http-reponse if (!contentType && value && value.readable && hasOwn(value, 'httpVersion')) { contentType = value.headers['content-type']; } // or guess it from the filepath or filename if (!contentType && (options.filepath || options.filename)) { contentType = mime.lookup(options.filepath || options.filename); } // fallback to the default content type if `value` is not simple value if (!contentType && value && typeof value === 'object') { contentType = FormData.DEFAULT_CONTENT_TYPE; } return contentType; }; FormData.prototype._multiPartFooter = function () { return function (next) { var footer = FormData.LINE_BREAK; var lastPart = this._streams.length === 0; if (lastPart) { footer += this._lastBoundary(); } next(footer); }.bind(this); }; FormData.prototype._lastBoundary = function () { return '--' + this.getBoundary() + '--' + FormData.LINE_BREAK; }; FormData.prototype.getHeaders = function (userHeaders) { var header; var formHeaders = { 'content-type': 'multipart/form-data; boundary=' + this.getBoundary() }; for (header in userHeaders) { // eslint-disable-line no-restricted-syntax if (hasOwn(userHeaders, header)) { formHeaders[header.toLowerCase()] = userHeaders[header]; } } return formHeaders; }; FormData.prototype.setBoundary = function (boundary) { if (typeof boundary !== 'string') { throw new TypeError('FormData boundary must be a string'); } this._boundary = boundary; }; FormData.prototype.getBoundary = function () { if (!this._boundary) { this._generateBoundary(); } return this._boundary; }; FormData.prototype.getBuffer = function () { var dataBuffer = new Buffer.alloc(0); // eslint-disable-line new-cap var boundary = this.getBoundary(); // Create the form content. Add Line breaks to the end of data. for (var i = 0, len = this._streams.length; i < len; i++) { if (typeof this._streams[i] !== 'function') { // Add content to the buffer. if (Buffer.isBuffer(this._streams[i])) { dataBuffer = Buffer.concat([dataBuffer, this._streams[i]]); } else { dataBuffer = Buffer.concat([dataBuffer, Buffer.from(this._streams[i])]); } // Add break after content. if (typeof this._streams[i] !== 'string' || this._streams[i].substring(2, boundary.length + 2) !== boundary) { dataBuffer = Buffer.concat([dataBuffer, Buffer.from(FormData.LINE_BREAK)]); } } } // Add the footer and return the Buffer object. return Buffer.concat([dataBuffer, Buffer.from(this._lastBoundary())]); }; FormData.prototype._generateBoundary = function () { // This generates a 50 character boundary similar to those used by Firefox. // They are optimized for boyer-moore parsing. var boundary = '--------------------------'; for (var i = 0; i < 24; i++) { boundary += Math.floor(Math.random() * 10).toString(16); } this._boundary = boundary; }; // Note: getLengthSync DOESN'T calculate streams length // As workaround one can calculate file size manually and add it as knownLength option FormData.prototype.getLengthSync = function () { var knownLength = this._overheadLength + this._valueLength; // Don't get confused, there are 3 "internal" streams for each keyval pair so it basically checks if there is any value added to the form if (this._streams.length) { knownLength += this._lastBoundary().length; } // https://github.com/form-data/form-data/issues/40 if (!this.hasKnownLength()) { /* * Some async length retrievers are present * therefore synchronous length calculation is false. * Please use getLength(callback) to get proper length */ this._error(new Error('Cannot calculate proper length in synchronous way.')); } return knownLength; }; // Public API to check if length of added values is known // https://github.com/form-data/form-data/issues/196 // https://github.com/form-data/form-data/issues/262 FormData.prototype.hasKnownLength = function () { var hasKnownLength = true; if (this._valuesToMeasure.length) { hasKnownLength = false; } return hasKnownLength; }; FormData.prototype.getLength = function (cb) { var knownLength = this._overheadLength + this._valueLength; if (this._streams.length) { knownLength += this._lastBoundary().length; } if (!this._valuesToMeasure.length) { process.nextTick(cb.bind(this, null, knownLength)); return; } asynckit.parallel(this._valuesToMeasure, this._lengthRetriever, function (err, values) { if (err) { cb(err); return; } values.forEach(function (length) { knownLength += length; }); cb(null, knownLength); }); }; FormData.prototype.submit = function (params, cb) { var request; var options; var defaults = { method: 'post' }; // parse provided url if it's string or treat it as options object if (typeof params === 'string') { params = parseUrl(params); // eslint-disable-line no-param-reassign /* eslint sort-keys: 0 */ options = populate({ port: params.port, path: params.pathname, host: params.hostname, protocol: params.protocol }, defaults); } else { // use custom params options = populate(params, defaults); // if no port provided use default one if (!options.port) { options.port = options.protocol === 'https:' ? 443 : 80; } } // put that good code in getHeaders to some use options.headers = this.getHeaders(params.headers); // https if specified, fallback to http in any other case if (options.protocol === 'https:') { request = https.request(options); } else { request = http.request(options); } // get content length and fire away this.getLength(function (err, length) { if (err && err !== 'Unknown stream') { this._error(err); return; } // add content length if (length) { request.setHeader('Content-Length', length); } this.pipe(request); if (cb) { var onResponse; var callback = function (error, responce) { request.removeListener('error', callback); request.removeListener('response', onResponse); return cb.call(this, error, responce); // eslint-disable-line no-invalid-this }; onResponse = callback.bind(this, null); request.on('error', callback); request.on('response', onResponse); } }.bind(this)); return request; }; FormData.prototype._error = function (err) { if (!this.error) { this.error = err; this.pause(); this.emit('error', err); } }; FormData.prototype.toString = function () { return '[object FormData]'; }; setToStringTag(FormData, 'FormData'); // Public API module.exports = FormData; /***/ }), /***/ 54957: /***/ ((module) => { "use strict"; // populates missing values module.exports = function (dst, src) { Object.keys(src).forEach(function (prop) { dst[prop] = dst[prop] || src[prop]; // eslint-disable-line no-param-reassign }); return dst; }; /***/ }), /***/ 59380: /***/ ((module) => { "use strict"; module.exports = balanced; function balanced(a, b, str) { if (a instanceof RegExp) a = maybeMatch(a, str); if (b instanceof RegExp) b = maybeMatch(b, str); var r = range(a, b, str); return r && { start: r[0], end: r[1], pre: str.slice(0, r[0]), body: str.slice(r[0] + a.length, r[1]), post: str.slice(r[1] + b.length) }; } function maybeMatch(reg, str) { var m = str.match(reg); return m ? m[0] : null; } balanced.range = range; function range(a, b, str) { var begs, beg, left, right, result; var ai = str.indexOf(a); var bi = str.indexOf(b, ai + 1); var i = ai; if (ai >= 0 && bi > 0) { if(a===b) { return [ai, bi]; } begs = []; left = str.length; while (i >= 0 && !result) { if (i == ai) { begs.push(i); ai = str.indexOf(a, i + 1); } else if (begs.length == 1) { result = [ begs.pop(), bi ]; } else { beg = begs.pop(); if (beg < left) { left = beg; right = bi; } bi = str.indexOf(b, i + 1); } i = ai < bi && ai >= 0 ? ai : bi; } if (begs.length) { result = [ left, right ]; } } return result; } /***/ }), /***/ 38793: /***/ ((__unused_webpack_module, exports) => { "use strict"; exports.byteLength = byteLength exports.toByteArray = toByteArray exports.fromByteArray = fromByteArray var lookup = [] var revLookup = [] var Arr = typeof Uint8Array !== 'undefined' ? Uint8Array : Array var code = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/' for (var i = 0, len = code.length; i < len; ++i) { lookup[i] = code[i] revLookup[code.charCodeAt(i)] = i } // Support decoding URL-safe base64 strings, as Node.js does. // See: https://en.wikipedia.org/wiki/Base64#URL_applications revLookup['-'.charCodeAt(0)] = 62 revLookup['_'.charCodeAt(0)] = 63 function getLens (b64) { var len = b64.length if (len % 4 > 0) { throw new Error('Invalid string. Length must be a multiple of 4') } // Trim off extra bytes after placeholder bytes are found // See: https://github.com/beatgammit/base64-js/issues/42 var validLen = b64.indexOf('=') if (validLen === -1) validLen = len var placeHoldersLen = validLen === len ? 0 : 4 - (validLen % 4) return [validLen, placeHoldersLen] } // base64 is 4/3 + up to two characters of the original data function byteLength (b64) { var lens = getLens(b64) var validLen = lens[0] var placeHoldersLen = lens[1] return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen } function _byteLength (b64, validLen, placeHoldersLen) { return ((validLen + placeHoldersLen) * 3 / 4) - placeHoldersLen } function toByteArray (b64) { var tmp var lens = getLens(b64) var validLen = lens[0] var placeHoldersLen = lens[1] var arr = new Arr(_byteLength(b64, validLen, placeHoldersLen)) var curByte = 0 // if there are placeholders, only get up to the last complete 4 chars var len = placeHoldersLen > 0 ? validLen - 4 : validLen var i for (i = 0; i < len; i += 4) { tmp = (revLookup[b64.charCodeAt(i)] << 18) | (revLookup[b64.charCodeAt(i + 1)] << 12) | (revLookup[b64.charCodeAt(i + 2)] << 6) | revLookup[b64.charCodeAt(i + 3)] arr[curByte++] = (tmp >> 16) & 0xFF arr[curByte++] = (tmp >> 8) & 0xFF arr[curByte++] = tmp & 0xFF } if (placeHoldersLen === 2) { tmp = (revLookup[b64.charCodeAt(i)] << 2) | (revLookup[b64.charCodeAt(i + 1)] >> 4) arr[curByte++] = tmp & 0xFF } if (placeHoldersLen === 1) { tmp = (revLookup[b64.charCodeAt(i)] << 10) | (revLookup[b64.charCodeAt(i + 1)] << 4) | (revLookup[b64.charCodeAt(i + 2)] >> 2) arr[curByte++] = (tmp >> 8) & 0xFF arr[curByte++] = tmp & 0xFF } return arr } function tripletToBase64 (num) { return lookup[num >> 18 & 0x3F] + lookup[num >> 12 & 0x3F] + lookup[num >> 6 & 0x3F] + lookup[num & 0x3F] } function encodeChunk (uint8, start, end) { var tmp var output = [] for (var i = start; i < end; i += 3) { tmp = ((uint8[i] << 16) & 0xFF0000) + ((uint8[i + 1] << 8) & 0xFF00) + (uint8[i + 2] & 0xFF) output.push(tripletToBase64(tmp)) } return output.join('') } function fromByteArray (uint8) { var tmp var len = uint8.length var extraBytes = len % 3 // if we have 1 byte left, pad 2 bytes var parts = [] var maxChunkLength = 16383 // must be multiple of 3 // go through the array every three bytes, we'll deal with trailing stuff later for (var i = 0, len2 = len - extraBytes; i < len2; i += maxChunkLength) { parts.push(encodeChunk(uint8, i, (i + maxChunkLength) > len2 ? len2 : (i + maxChunkLength))) } // pad the end with zeros, but make sure to not forget the extra bytes if (extraBytes === 1) { tmp = uint8[len - 1] parts.push( lookup[tmp >> 2] + lookup[(tmp << 4) & 0x3F] + '==' ) } else if (extraBytes === 2) { tmp = (uint8[len - 2] << 8) + uint8[len - 1] parts.push( lookup[tmp >> 10] + lookup[(tmp >> 4) & 0x3F] + lookup[(tmp << 2) & 0x3F] + '=' ) } return parts.join('') } /***/ }), /***/ 52732: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var register = __nccwpck_require__(11063); var addHook = __nccwpck_require__(22027); var removeHook = __nccwpck_require__(59934); // bind with array of arguments: https://stackoverflow.com/a/21792913 var bind = Function.bind; var bindable = bind.bind(bind); function bindApi(hook, state, name) { var removeHookRef = bindable(removeHook, null).apply( null, name ? [state, name] : [state] ); hook.api = { remove: removeHookRef }; hook.remove = removeHookRef; ["before", "error", "after", "wrap"].forEach(function (kind) { var args = name ? [state, kind, name] : [state, kind]; hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); }); } function HookSingular() { var singularHookName = "h"; var singularHookState = { registry: {}, }; var singularHook = register.bind(null, singularHookState, singularHookName); bindApi(singularHook, singularHookState, singularHookName); return singularHook; } function HookCollection() { var state = { registry: {}, }; var hook = register.bind(null, state); bindApi(hook, state); return hook; } var collectionHookDeprecationMessageDisplayed = false; function Hook() { if (!collectionHookDeprecationMessageDisplayed) { console.warn( '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' ); collectionHookDeprecationMessageDisplayed = true; } return HookCollection(); } Hook.Singular = HookSingular.bind(); Hook.Collection = HookCollection.bind(); module.exports = Hook; // expose constructors as a named property for TypeScript module.exports.Hook = Hook; module.exports.Singular = Hook.Singular; module.exports.Collection = Hook.Collection; /***/ }), /***/ 22027: /***/ ((module) => { module.exports = addHook; function addHook(state, kind, name, hook) { var orig = hook; if (!state.registry[name]) { state.registry[name] = []; } if (kind === "before") { hook = function (method, options) { return Promise.resolve() .then(orig.bind(null, options)) .then(method.bind(null, options)); }; } if (kind === "after") { hook = function (method, options) { var result; return Promise.resolve() .then(method.bind(null, options)) .then(function (result_) { result = result_; return orig(result, options); }) .then(function () { return result; }); }; } if (kind === "error") { hook = function (method, options) { return Promise.resolve() .then(method.bind(null, options)) .catch(function (error) { return orig(error, options); }); }; } state.registry[name].push({ hook: hook, orig: orig, }); } /***/ }), /***/ 11063: /***/ ((module) => { module.exports = register; function register(state, name, method, options) { if (typeof method !== "function") { throw new Error("method for before hook must be a function"); } if (!options) { options = {}; } if (Array.isArray(name)) { return name.reverse().reduce(function (callback, name) { return register.bind(null, state, name, callback, options); }, method)(); } return Promise.resolve().then(function () { if (!state.registry[name]) { return method(options); } return state.registry[name].reduce(function (method, registered) { return registered.hook.bind(null, method, options); }, method)(); }); } /***/ }), /***/ 59934: /***/ ((module) => { module.exports = removeHook; function removeHook(state, name, method) { if (!state.registry[name]) { return; } var index = state.registry[name] .map(function (registered) { return registered.orig; }) .indexOf(method); if (index === -1) { return; } state.registry[name].splice(index, 1); } /***/ }), /***/ 51259: /***/ (function(module) { ;(function (globalObject) { 'use strict'; /* * bignumber.js v9.3.0 * A JavaScript library for arbitrary-precision arithmetic. * https://github.com/MikeMcl/bignumber.js * Copyright (c) 2025 Michael Mclaughlin * MIT Licensed. * * BigNumber.prototype methods | BigNumber methods * | * absoluteValue abs | clone * comparedTo | config set * decimalPlaces dp | DECIMAL_PLACES * dividedBy div | ROUNDING_MODE * dividedToIntegerBy idiv | EXPONENTIAL_AT * exponentiatedBy pow | RANGE * integerValue | CRYPTO * isEqualTo eq | MODULO_MODE * isFinite | POW_PRECISION * isGreaterThan gt | FORMAT * isGreaterThanOrEqualTo gte | ALPHABET * isInteger | isBigNumber * isLessThan lt | maximum max * isLessThanOrEqualTo lte | minimum min * isNaN | random * isNegative | sum * isPositive | * isZero | * minus | * modulo mod | * multipliedBy times | * negated | * plus | * precision sd | * shiftedBy | * squareRoot sqrt | * toExponential | * toFixed | * toFormat | * toFraction | * toJSON | * toNumber | * toPrecision | * toString | * valueOf | * */ var BigNumber, isNumeric = /^-?(?:\d+(?:\.\d*)?|\.\d+)(?:e[+-]?\d+)?$/i, mathceil = Math.ceil, mathfloor = Math.floor, bignumberError = '[BigNumber Error] ', tooManyDigits = bignumberError + 'Number primitive has more than 15 significant digits: ', BASE = 1e14, LOG_BASE = 14, MAX_SAFE_INTEGER = 0x1fffffffffffff, // 2^53 - 1 // MAX_INT32 = 0x7fffffff, // 2^31 - 1 POWS_TEN = [1, 10, 100, 1e3, 1e4, 1e5, 1e6, 1e7, 1e8, 1e9, 1e10, 1e11, 1e12, 1e13], SQRT_BASE = 1e7, // EDITABLE // The limit on the value of DECIMAL_PLACES, TO_EXP_NEG, TO_EXP_POS, MIN_EXP, MAX_EXP, and // the arguments to toExponential, toFixed, toFormat, and toPrecision. MAX = 1E9; // 0 to MAX_INT32 /* * Create and return a BigNumber constructor. */ function clone(configObject) { var div, convertBase, parseNumeric, P = BigNumber.prototype = { constructor: BigNumber, toString: null, valueOf: null }, ONE = new BigNumber(1), //----------------------------- EDITABLE CONFIG DEFAULTS ------------------------------- // The default values below must be integers within the inclusive ranges stated. // The values can also be changed at run-time using BigNumber.set. // The maximum number of decimal places for operations involving division. DECIMAL_PLACES = 20, // 0 to MAX // The rounding mode used when rounding to the above decimal places, and when using // toExponential, toFixed, toFormat and toPrecision, and round (default value). // UP 0 Away from zero. // DOWN 1 Towards zero. // CEIL 2 Towards +Infinity. // FLOOR 3 Towards -Infinity. // HALF_UP 4 Towards nearest neighbour. If equidistant, up. // HALF_DOWN 5 Towards nearest neighbour. If equidistant, down. // HALF_EVEN 6 Towards nearest neighbour. If equidistant, towards even neighbour. // HALF_CEIL 7 Towards nearest neighbour. If equidistant, towards +Infinity. // HALF_FLOOR 8 Towards nearest neighbour. If equidistant, towards -Infinity. ROUNDING_MODE = 4, // 0 to 8 // EXPONENTIAL_AT : [TO_EXP_NEG , TO_EXP_POS] // The exponent value at and beneath which toString returns exponential notation. // Number type: -7 TO_EXP_NEG = -7, // 0 to -MAX // The exponent value at and above which toString returns exponential notation. // Number type: 21 TO_EXP_POS = 21, // 0 to MAX // RANGE : [MIN_EXP, MAX_EXP] // The minimum exponent value, beneath which underflow to zero occurs. // Number type: -324 (5e-324) MIN_EXP = -1e7, // -1 to -MAX // The maximum exponent value, above which overflow to Infinity occurs. // Number type: 308 (1.7976931348623157e+308) // For MAX_EXP > 1e7, e.g. new BigNumber('1e100000000').plus(1) may be slow. MAX_EXP = 1e7, // 1 to MAX // Whether to use cryptographically-secure random number generation, if available. CRYPTO = false, // true or false // The modulo mode used when calculating the modulus: a mod n. // The quotient (q = a / n) is calculated according to the corresponding rounding mode. // The remainder (r) is calculated as: r = a - n * q. // // UP 0 The remainder is positive if the dividend is negative, else is negative. // DOWN 1 The remainder has the same sign as the dividend. // This modulo mode is commonly known as 'truncated division' and is // equivalent to (a % n) in JavaScript. // FLOOR 3 The remainder has the same sign as the divisor (Python %). // HALF_EVEN 6 This modulo mode implements the IEEE 754 remainder function. // EUCLID 9 Euclidian division. q = sign(n) * floor(a / abs(n)). // The remainder is always positive. // // The truncated division, floored division, Euclidian division and IEEE 754 remainder // modes are commonly used for the modulus operation. // Although the other rounding modes can also be used, they may not give useful results. MODULO_MODE = 1, // 0 to 9 // The maximum number of significant digits of the result of the exponentiatedBy operation. // If POW_PRECISION is 0, there will be unlimited significant digits. POW_PRECISION = 0, // 0 to MAX // The format specification used by the BigNumber.prototype.toFormat method. FORMAT = { prefix: '', groupSize: 3, secondaryGroupSize: 0, groupSeparator: ',', decimalSeparator: '.', fractionGroupSize: 0, fractionGroupSeparator: '\xA0', // non-breaking space suffix: '' }, // The alphabet used for base conversion. It must be at least 2 characters long, with no '+', // '-', '.', whitespace, or repeated character. // '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ$_' ALPHABET = '0123456789abcdefghijklmnopqrstuvwxyz', alphabetHasNormalDecimalDigits = true; //------------------------------------------------------------------------------------------ // CONSTRUCTOR /* * The BigNumber constructor and exported function. * Create and return a new instance of a BigNumber object. * * v {number|string|BigNumber} A numeric value. * [b] {number} The base of v. Integer, 2 to ALPHABET.length inclusive. */ function BigNumber(v, b) { var alphabet, c, caseChanged, e, i, isNum, len, str, x = this; // Enable constructor call without `new`. if (!(x instanceof BigNumber)) return new BigNumber(v, b); if (b == null) { if (v && v._isBigNumber === true) { x.s = v.s; if (!v.c || v.e > MAX_EXP) { x.c = x.e = null; } else if (v.e < MIN_EXP) { x.c = [x.e = 0]; } else { x.e = v.e; x.c = v.c.slice(); } return; } if ((isNum = typeof v == 'number') && v * 0 == 0) { // Use `1 / n` to handle minus zero also. x.s = 1 / v < 0 ? (v = -v, -1) : 1; // Fast path for integers, where n < 2147483648 (2**31). if (v === ~~v) { for (e = 0, i = v; i >= 10; i /= 10, e++); if (e > MAX_EXP) { x.c = x.e = null; } else { x.e = e; x.c = [v]; } return; } str = String(v); } else { if (!isNumeric.test(str = String(v))) return parseNumeric(x, str, isNum); x.s = str.charCodeAt(0) == 45 ? (str = str.slice(1), -1) : 1; } // Decimal point? if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); // Exponential form? if ((i = str.search(/e/i)) > 0) { // Determine exponent. if (e < 0) e = i; e += +str.slice(i + 1); str = str.substring(0, i); } else if (e < 0) { // Integer. e = str.length; } } else { // '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' intCheck(b, 2, ALPHABET.length, 'Base'); // Allow exponential notation to be used with base 10 argument, while // also rounding to DECIMAL_PLACES as with other bases. if (b == 10 && alphabetHasNormalDecimalDigits) { x = new BigNumber(v); return round(x, DECIMAL_PLACES + x.e + 1, ROUNDING_MODE); } str = String(v); if (isNum = typeof v == 'number') { // Avoid potential interpretation of Infinity and NaN as base 44+ values. if (v * 0 != 0) return parseNumeric(x, str, isNum, b); x.s = 1 / v < 0 ? (str = str.slice(1), -1) : 1; // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' if (BigNumber.DEBUG && str.replace(/^0\.0*|\./, '').length > 15) { throw Error (tooManyDigits + v); } } else { x.s = str.charCodeAt(0) === 45 ? (str = str.slice(1), -1) : 1; } alphabet = ALPHABET.slice(0, b); e = i = 0; // Check that str is a valid base b number. // Don't use RegExp, so alphabet can contain special characters. for (len = str.length; i < len; i++) { if (alphabet.indexOf(c = str.charAt(i)) < 0) { if (c == '.') { // If '.' is not the first character and it has not be found before. if (i > e) { e = len; continue; } } else if (!caseChanged) { // Allow e.g. hexadecimal 'FF' as well as 'ff'. if (str == str.toUpperCase() && (str = str.toLowerCase()) || str == str.toLowerCase() && (str = str.toUpperCase())) { caseChanged = true; i = -1; e = 0; continue; } } return parseNumeric(x, String(v), isNum, b); } } // Prevent later check for length on converted number. isNum = false; str = convertBase(str, b, 10, x.s); // Decimal point? if ((e = str.indexOf('.')) > -1) str = str.replace('.', ''); else e = str.length; } // Determine leading zeros. for (i = 0; str.charCodeAt(i) === 48; i++); // Determine trailing zeros. for (len = str.length; str.charCodeAt(--len) === 48;); if (str = str.slice(i, ++len)) { len -= i; // '[BigNumber Error] Number primitive has more than 15 significant digits: {n}' if (isNum && BigNumber.DEBUG && len > 15 && (v > MAX_SAFE_INTEGER || v !== mathfloor(v))) { throw Error (tooManyDigits + (x.s * v)); } // Overflow? if ((e = e - i - 1) > MAX_EXP) { // Infinity. x.c = x.e = null; // Underflow? } else if (e < MIN_EXP) { // Zero. x.c = [x.e = 0]; } else { x.e = e; x.c = []; // Transform base // e is the base 10 exponent. // i is where to slice str to get the first element of the coefficient array. i = (e + 1) % LOG_BASE; if (e < 0) i += LOG_BASE; // i < 1 if (i < len) { if (i) x.c.push(+str.slice(0, i)); for (len -= LOG_BASE; i < len;) { x.c.push(+str.slice(i, i += LOG_BASE)); } i = LOG_BASE - (str = str.slice(i)).length; } else { i -= len; } for (; i--; str += '0'); x.c.push(+str); } } else { // Zero. x.c = [x.e = 0]; } } // CONSTRUCTOR PROPERTIES BigNumber.clone = clone; BigNumber.ROUND_UP = 0; BigNumber.ROUND_DOWN = 1; BigNumber.ROUND_CEIL = 2; BigNumber.ROUND_FLOOR = 3; BigNumber.ROUND_HALF_UP = 4; BigNumber.ROUND_HALF_DOWN = 5; BigNumber.ROUND_HALF_EVEN = 6; BigNumber.ROUND_HALF_CEIL = 7; BigNumber.ROUND_HALF_FLOOR = 8; BigNumber.EUCLID = 9; /* * Configure infrequently-changing library-wide settings. * * Accept an object with the following optional properties (if the value of a property is * a number, it must be an integer within the inclusive range stated): * * DECIMAL_PLACES {number} 0 to MAX * ROUNDING_MODE {number} 0 to 8 * EXPONENTIAL_AT {number|number[]} -MAX to MAX or [-MAX to 0, 0 to MAX] * RANGE {number|number[]} -MAX to MAX (not zero) or [-MAX to -1, 1 to MAX] * CRYPTO {boolean} true or false * MODULO_MODE {number} 0 to 9 * POW_PRECISION {number} 0 to MAX * ALPHABET {string} A string of two or more unique characters which does * not contain '.'. * FORMAT {object} An object with some of the following properties: * prefix {string} * groupSize {number} * secondaryGroupSize {number} * groupSeparator {string} * decimalSeparator {string} * fractionGroupSize {number} * fractionGroupSeparator {string} * suffix {string} * * (The values assigned to the above FORMAT object properties are not checked for validity.) * * E.g. * BigNumber.config({ DECIMAL_PLACES : 20, ROUNDING_MODE : 4 }) * * Ignore properties/parameters set to null or undefined, except for ALPHABET. * * Return an object with the properties current values. */ BigNumber.config = BigNumber.set = function (obj) { var p, v; if (obj != null) { if (typeof obj == 'object') { // DECIMAL_PLACES {number} Integer, 0 to MAX inclusive. // '[BigNumber Error] DECIMAL_PLACES {not a primitive number|not an integer|out of range}: {v}' if (obj.hasOwnProperty(p = 'DECIMAL_PLACES')) { v = obj[p]; intCheck(v, 0, MAX, p); DECIMAL_PLACES = v; } // ROUNDING_MODE {number} Integer, 0 to 8 inclusive. // '[BigNumber Error] ROUNDING_MODE {not a primitive number|not an integer|out of range}: {v}' if (obj.hasOwnProperty(p = 'ROUNDING_MODE')) { v = obj[p]; intCheck(v, 0, 8, p); ROUNDING_MODE = v; } // EXPONENTIAL_AT {number|number[]} // Integer, -MAX to MAX inclusive or // [integer -MAX to 0 inclusive, 0 to MAX inclusive]. // '[BigNumber Error] EXPONENTIAL_AT {not a primitive number|not an integer|out of range}: {v}' if (obj.hasOwnProperty(p = 'EXPONENTIAL_AT')) { v = obj[p]; if (v && v.pop) { intCheck(v[0], -MAX, 0, p); intCheck(v[1], 0, MAX, p); TO_EXP_NEG = v[0]; TO_EXP_POS = v[1]; } else { intCheck(v, -MAX, MAX, p); TO_EXP_NEG = -(TO_EXP_POS = v < 0 ? -v : v); } } // RANGE {number|number[]} Non-zero integer, -MAX to MAX inclusive or // [integer -MAX to -1 inclusive, integer 1 to MAX inclusive]. // '[BigNumber Error] RANGE {not a primitive number|not an integer|out of range|cannot be zero}: {v}' if (obj.hasOwnProperty(p = 'RANGE')) { v = obj[p]; if (v && v.pop) { intCheck(v[0], -MAX, -1, p); intCheck(v[1], 1, MAX, p); MIN_EXP = v[0]; MAX_EXP = v[1]; } else { intCheck(v, -MAX, MAX, p); if (v) { MIN_EXP = -(MAX_EXP = v < 0 ? -v : v); } else { throw Error (bignumberError + p + ' cannot be zero: ' + v); } } } // CRYPTO {boolean} true or false. // '[BigNumber Error] CRYPTO not true or false: {v}' // '[BigNumber Error] crypto unavailable' if (obj.hasOwnProperty(p = 'CRYPTO')) { v = obj[p]; if (v === !!v) { if (v) { if (typeof crypto != 'undefined' && crypto && (crypto.getRandomValues || crypto.randomBytes)) { CRYPTO = v; } else { CRYPTO = !v; throw Error (bignumberError + 'crypto unavailable'); } } else { CRYPTO = v; } } else { throw Error (bignumberError + p + ' not true or false: ' + v); } } // MODULO_MODE {number} Integer, 0 to 9 inclusive. // '[BigNumber Error] MODULO_MODE {not a primitive number|not an integer|out of range}: {v}' if (obj.hasOwnProperty(p = 'MODULO_MODE')) { v = obj[p]; intCheck(v, 0, 9, p); MODULO_MODE = v; } // POW_PRECISION {number} Integer, 0 to MAX inclusive. // '[BigNumber Error] POW_PRECISION {not a primitive number|not an integer|out of range}: {v}' if (obj.hasOwnProperty(p = 'POW_PRECISION')) { v = obj[p]; intCheck(v, 0, MAX, p); POW_PRECISION = v; } // FORMAT {object} // '[BigNumber Error] FORMAT not an object: {v}' if (obj.hasOwnProperty(p = 'FORMAT')) { v = obj[p]; if (typeof v == 'object') FORMAT = v; else throw Error (bignumberError + p + ' not an object: ' + v); } // ALPHABET {string} // '[BigNumber Error] ALPHABET invalid: {v}' if (obj.hasOwnProperty(p = 'ALPHABET')) { v = obj[p]; // Disallow if less than two characters, // or if it contains '+', '-', '.', whitespace, or a repeated character. if (typeof v == 'string' && !/^.?$|[+\-.\s]|(.).*\1/.test(v)) { alphabetHasNormalDecimalDigits = v.slice(0, 10) == '0123456789'; ALPHABET = v; } else { throw Error (bignumberError + p + ' invalid: ' + v); } } } else { // '[BigNumber Error] Object expected: {v}' throw Error (bignumberError + 'Object expected: ' + obj); } } return { DECIMAL_PLACES: DECIMAL_PLACES, ROUNDING_MODE: ROUNDING_MODE, EXPONENTIAL_AT: [TO_EXP_NEG, TO_EXP_POS], RANGE: [MIN_EXP, MAX_EXP], CRYPTO: CRYPTO, MODULO_MODE: MODULO_MODE, POW_PRECISION: POW_PRECISION, FORMAT: FORMAT, ALPHABET: ALPHABET }; }; /* * Return true if v is a BigNumber instance, otherwise return false. * * If BigNumber.DEBUG is true, throw if a BigNumber instance is not well-formed. * * v {any} * * '[BigNumber Error] Invalid BigNumber: {v}' */ BigNumber.isBigNumber = function (v) { if (!v || v._isBigNumber !== true) return false; if (!BigNumber.DEBUG) return true; var i, n, c = v.c, e = v.e, s = v.s; out: if ({}.toString.call(c) == '[object Array]') { if ((s === 1 || s === -1) && e >= -MAX && e <= MAX && e === mathfloor(e)) { // If the first element is zero, the BigNumber value must be zero. if (c[0] === 0) { if (e === 0 && c.length === 1) return true; break out; } // Calculate number of digits that c[0] should have, based on the exponent. i = (e + 1) % LOG_BASE; if (i < 1) i += LOG_BASE; // Calculate number of digits of c[0]. //if (Math.ceil(Math.log(c[0] + 1) / Math.LN10) == i) { if (String(c[0]).length == i) { for (i = 0; i < c.length; i++) { n = c[i]; if (n < 0 || n >= BASE || n !== mathfloor(n)) break out; } // Last element cannot be zero, unless it is the only element. if (n !== 0) return true; } } // Infinity/NaN } else if (c === null && e === null && (s === null || s === 1 || s === -1)) { return true; } throw Error (bignumberError + 'Invalid BigNumber: ' + v); }; /* * Return a new BigNumber whose value is the maximum of the arguments. * * arguments {number|string|BigNumber} */ BigNumber.maximum = BigNumber.max = function () { return maxOrMin(arguments, -1); }; /* * Return a new BigNumber whose value is the minimum of the arguments. * * arguments {number|string|BigNumber} */ BigNumber.minimum = BigNumber.min = function () { return maxOrMin(arguments, 1); }; /* * Return a new BigNumber with a random value equal to or greater than 0 and less than 1, * and with dp, or DECIMAL_PLACES if dp is omitted, decimal places (or less if trailing * zeros are produced). * * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp}' * '[BigNumber Error] crypto unavailable' */ BigNumber.random = (function () { var pow2_53 = 0x20000000000000; // Return a 53 bit integer n, where 0 <= n < 9007199254740992. // Check if Math.random() produces more than 32 bits of randomness. // If it does, assume at least 53 bits are produced, otherwise assume at least 30 bits. // 0x40000000 is 2^30, 0x800000 is 2^23, 0x1fffff is 2^21 - 1. var random53bitInt = (Math.random() * pow2_53) & 0x1fffff ? function () { return mathfloor(Math.random() * pow2_53); } : function () { return ((Math.random() * 0x40000000 | 0) * 0x800000) + (Math.random() * 0x800000 | 0); }; return function (dp) { var a, b, e, k, v, i = 0, c = [], rand = new BigNumber(ONE); if (dp == null) dp = DECIMAL_PLACES; else intCheck(dp, 0, MAX); k = mathceil(dp / LOG_BASE); if (CRYPTO) { // Browsers supporting crypto.getRandomValues. if (crypto.getRandomValues) { a = crypto.getRandomValues(new Uint32Array(k *= 2)); for (; i < k;) { // 53 bits: // ((Math.pow(2, 32) - 1) * Math.pow(2, 21)).toString(2) // 11111 11111111 11111111 11111111 11100000 00000000 00000000 // ((Math.pow(2, 32) - 1) >>> 11).toString(2) // 11111 11111111 11111111 // 0x20000 is 2^21. v = a[i] * 0x20000 + (a[i + 1] >>> 11); // Rejection sampling: // 0 <= v < 9007199254740992 // Probability that v >= 9e15, is // 7199254740992 / 9007199254740992 ~= 0.0008, i.e. 1 in 1251 if (v >= 9e15) { b = crypto.getRandomValues(new Uint32Array(2)); a[i] = b[0]; a[i + 1] = b[1]; } else { // 0 <= v <= 8999999999999999 // 0 <= (v % 1e14) <= 99999999999999 c.push(v % 1e14); i += 2; } } i = k / 2; // Node.js supporting crypto.randomBytes. } else if (crypto.randomBytes) { // buffer a = crypto.randomBytes(k *= 7); for (; i < k;) { // 0x1000000000000 is 2^48, 0x10000000000 is 2^40 // 0x100000000 is 2^32, 0x1000000 is 2^24 // 11111 11111111 11111111 11111111 11111111 11111111 11111111 // 0 <= v < 9007199254740992 v = ((a[i] & 31) * 0x1000000000000) + (a[i + 1] * 0x10000000000) + (a[i + 2] * 0x100000000) + (a[i + 3] * 0x1000000) + (a[i + 4] << 16) + (a[i + 5] << 8) + a[i + 6]; if (v >= 9e15) { crypto.randomBytes(7).copy(a, i); } else { // 0 <= (v % 1e14) <= 99999999999999 c.push(v % 1e14); i += 7; } } i = k / 7; } else { CRYPTO = false; throw Error (bignumberError + 'crypto unavailable'); } } // Use Math.random. if (!CRYPTO) { for (; i < k;) { v = random53bitInt(); if (v < 9e15) c[i++] = v % 1e14; } } k = c[--i]; dp %= LOG_BASE; // Convert trailing digits to zeros according to dp. if (k && dp) { v = POWS_TEN[LOG_BASE - dp]; c[i] = mathfloor(k / v) * v; } // Remove trailing elements which are zero. for (; c[i] === 0; c.pop(), i--); // Zero? if (i < 0) { c = [e = 0]; } else { // Remove leading elements which are zero and adjust exponent accordingly. for (e = -1 ; c[0] === 0; c.splice(0, 1), e -= LOG_BASE); // Count the digits of the first element of c to determine leading zeros, and... for (i = 1, v = c[0]; v >= 10; v /= 10, i++); // adjust the exponent accordingly. if (i < LOG_BASE) e -= LOG_BASE - i; } rand.e = e; rand.c = c; return rand; }; })(); /* * Return a BigNumber whose value is the sum of the arguments. * * arguments {number|string|BigNumber} */ BigNumber.sum = function () { var i = 1, args = arguments, sum = new BigNumber(args[0]); for (; i < args.length;) sum = sum.plus(args[i++]); return sum; }; // PRIVATE FUNCTIONS // Called by BigNumber and BigNumber.prototype.toString. convertBase = (function () { var decimal = '0123456789'; /* * Convert string of baseIn to an array of numbers of baseOut. * Eg. toBaseOut('255', 10, 16) returns [15, 15]. * Eg. toBaseOut('ff', 16, 10) returns [2, 5, 5]. */ function toBaseOut(str, baseIn, baseOut, alphabet) { var j, arr = [0], arrL, i = 0, len = str.length; for (; i < len;) { for (arrL = arr.length; arrL--; arr[arrL] *= baseIn); arr[0] += alphabet.indexOf(str.charAt(i++)); for (j = 0; j < arr.length; j++) { if (arr[j] > baseOut - 1) { if (arr[j + 1] == null) arr[j + 1] = 0; arr[j + 1] += arr[j] / baseOut | 0; arr[j] %= baseOut; } } } return arr.reverse(); } // Convert a numeric string of baseIn to a numeric string of baseOut. // If the caller is toString, we are converting from base 10 to baseOut. // If the caller is BigNumber, we are converting from baseIn to base 10. return function (str, baseIn, baseOut, sign, callerIsToString) { var alphabet, d, e, k, r, x, xc, y, i = str.indexOf('.'), dp = DECIMAL_PLACES, rm = ROUNDING_MODE; // Non-integer. if (i >= 0) { k = POW_PRECISION; // Unlimited precision. POW_PRECISION = 0; str = str.replace('.', ''); y = new BigNumber(baseIn); x = y.pow(str.length - i); POW_PRECISION = k; // Convert str as if an integer, then restore the fraction part by dividing the // result by its base raised to a power. y.c = toBaseOut(toFixedPoint(coeffToString(x.c), x.e, '0'), 10, baseOut, decimal); y.e = y.c.length; } // Convert the number as integer. xc = toBaseOut(str, baseIn, baseOut, callerIsToString ? (alphabet = ALPHABET, decimal) : (alphabet = decimal, ALPHABET)); // xc now represents str as an integer and converted to baseOut. e is the exponent. e = k = xc.length; // Remove trailing zeros. for (; xc[--k] == 0; xc.pop()); // Zero? if (!xc[0]) return alphabet.charAt(0); // Does str represent an integer? If so, no need for the division. if (i < 0) { --e; } else { x.c = xc; x.e = e; // The sign is needed for correct rounding. x.s = sign; x = div(x, y, dp, rm, baseOut); xc = x.c; r = x.r; e = x.e; } // xc now represents str converted to baseOut. // The index of the rounding digit. d = e + dp + 1; // The rounding digit: the digit to the right of the digit that may be rounded up. i = xc[d]; // Look at the rounding digits and mode to determine whether to round up. k = baseOut / 2; r = r || d < 0 || xc[d + 1] != null; r = rm < 4 ? (i != null || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) : i > k || i == k &&(rm == 4 || r || rm == 6 && xc[d - 1] & 1 || rm == (x.s < 0 ? 8 : 7)); // If the index of the rounding digit is not greater than zero, or xc represents // zero, then the result of the base conversion is zero or, if rounding up, a value // such as 0.00001. if (d < 1 || !xc[0]) { // 1^-dp or 0 str = r ? toFixedPoint(alphabet.charAt(1), -dp, alphabet.charAt(0)) : alphabet.charAt(0); } else { // Truncate xc to the required number of decimal places. xc.length = d; // Round up? if (r) { // Rounding up may mean the previous digit has to be rounded up and so on. for (--baseOut; ++xc[--d] > baseOut;) { xc[d] = 0; if (!d) { ++e; xc = [1].concat(xc); } } } // Determine trailing zeros. for (k = xc.length; !xc[--k];); // E.g. [4, 11, 15] becomes 4bf. for (i = 0, str = ''; i <= k; str += alphabet.charAt(xc[i++])); // Add leading zeros, decimal point and trailing zeros as required. str = toFixedPoint(str, e, alphabet.charAt(0)); } // The caller will add the sign. return str; }; })(); // Perform division in the specified base. Called by div and convertBase. div = (function () { // Assume non-zero x and k. function multiply(x, k, base) { var m, temp, xlo, xhi, carry = 0, i = x.length, klo = k % SQRT_BASE, khi = k / SQRT_BASE | 0; for (x = x.slice(); i--;) { xlo = x[i] % SQRT_BASE; xhi = x[i] / SQRT_BASE | 0; m = khi * xlo + xhi * klo; temp = klo * xlo + ((m % SQRT_BASE) * SQRT_BASE) + carry; carry = (temp / base | 0) + (m / SQRT_BASE | 0) + khi * xhi; x[i] = temp % base; } if (carry) x = [carry].concat(x); return x; } function compare(a, b, aL, bL) { var i, cmp; if (aL != bL) { cmp = aL > bL ? 1 : -1; } else { for (i = cmp = 0; i < aL; i++) { if (a[i] != b[i]) { cmp = a[i] > b[i] ? 1 : -1; break; } } } return cmp; } function subtract(a, b, aL, base) { var i = 0; // Subtract b from a. for (; aL--;) { a[aL] -= i; i = a[aL] < b[aL] ? 1 : 0; a[aL] = i * base + a[aL] - b[aL]; } // Remove leading zeros. for (; !a[0] && a.length > 1; a.splice(0, 1)); } // x: dividend, y: divisor. return function (x, y, dp, rm, base) { var cmp, e, i, more, n, prod, prodL, q, qc, rem, remL, rem0, xi, xL, yc0, yL, yz, s = x.s == y.s ? 1 : -1, xc = x.c, yc = y.c; // Either NaN, Infinity or 0? if (!xc || !xc[0] || !yc || !yc[0]) { return new BigNumber( // Return NaN if either NaN, or both Infinity or 0. !x.s || !y.s || (xc ? yc && xc[0] == yc[0] : !yc) ? NaN : // Return ±0 if x is ±0 or y is ±Infinity, or return ±Infinity as y is ±0. xc && xc[0] == 0 || !yc ? s * 0 : s / 0 ); } q = new BigNumber(s); qc = q.c = []; e = x.e - y.e; s = dp + e + 1; if (!base) { base = BASE; e = bitFloor(x.e / LOG_BASE) - bitFloor(y.e / LOG_BASE); s = s / LOG_BASE | 0; } // Result exponent may be one less then the current value of e. // The coefficients of the BigNumbers from convertBase may have trailing zeros. for (i = 0; yc[i] == (xc[i] || 0); i++); if (yc[i] > (xc[i] || 0)) e--; if (s < 0) { qc.push(1); more = true; } else { xL = xc.length; yL = yc.length; i = 0; s += 2; // Normalise xc and yc so highest order digit of yc is >= base / 2. n = mathfloor(base / (yc[0] + 1)); // Not necessary, but to handle odd bases where yc[0] == (base / 2) - 1. // if (n > 1 || n++ == 1 && yc[0] < base / 2) { if (n > 1) { yc = multiply(yc, n, base); xc = multiply(xc, n, base); yL = yc.length; xL = xc.length; } xi = yL; rem = xc.slice(0, yL); remL = rem.length; // Add zeros to make remainder as long as divisor. for (; remL < yL; rem[remL++] = 0); yz = yc.slice(); yz = [0].concat(yz); yc0 = yc[0]; if (yc[1] >= base / 2) yc0++; // Not necessary, but to prevent trial digit n > base, when using base 3. // else if (base == 3 && yc0 == 1) yc0 = 1 + 1e-15; do { n = 0; // Compare divisor and remainder. cmp = compare(yc, rem, yL, remL); // If divisor < remainder. if (cmp < 0) { // Calculate trial digit, n. rem0 = rem[0]; if (yL != remL) rem0 = rem0 * base + (rem[1] || 0); // n is how many times the divisor goes into the current remainder. n = mathfloor(rem0 / yc0); // Algorithm: // product = divisor multiplied by trial digit (n). // Compare product and remainder. // If product is greater than remainder: // Subtract divisor from product, decrement trial digit. // Subtract product from remainder. // If product was less than remainder at the last compare: // Compare new remainder and divisor. // If remainder is greater than divisor: // Subtract divisor from remainder, increment trial digit. if (n > 1) { // n may be > base only when base is 3. if (n >= base) n = base - 1; // product = divisor * trial digit. prod = multiply(yc, n, base); prodL = prod.length; remL = rem.length; // Compare product and remainder. // If product > remainder then trial digit n too high. // n is 1 too high about 5% of the time, and is not known to have // ever been more than 1 too high. while (compare(prod, rem, prodL, remL) == 1) { n--; // Subtract divisor from product. subtract(prod, yL < prodL ? yz : yc, prodL, base); prodL = prod.length; cmp = 1; } } else { // n is 0 or 1, cmp is -1. // If n is 0, there is no need to compare yc and rem again below, // so change cmp to 1 to avoid it. // If n is 1, leave cmp as -1, so yc and rem are compared again. if (n == 0) { // divisor < remainder, so n must be at least 1. cmp = n = 1; } // product = divisor prod = yc.slice(); prodL = prod.length; } if (prodL < remL) prod = [0].concat(prod); // Subtract product from remainder. subtract(rem, prod, remL, base); remL = rem.length; // If product was < remainder. if (cmp == -1) { // Compare divisor and new remainder. // If divisor < new remainder, subtract divisor from remainder. // Trial digit n too low. // n is 1 too low about 5% of the time, and very rarely 2 too low. while (compare(yc, rem, yL, remL) < 1) { n++; // Subtract divisor from remainder. subtract(rem, yL < remL ? yz : yc, remL, base); remL = rem.length; } } } else if (cmp === 0) { n++; rem = [0]; } // else cmp === 1 and n will be 0 // Add the next digit, n, to the result array. qc[i++] = n; // Update the remainder. if (rem[0]) { rem[remL++] = xc[xi] || 0; } else { rem = [xc[xi]]; remL = 1; } } while ((xi++ < xL || rem[0] != null) && s--); more = rem[0] != null; // Leading zero? if (!qc[0]) qc.splice(0, 1); } if (base == BASE) { // To calculate q.e, first get the number of digits of qc[0]. for (i = 1, s = qc[0]; s >= 10; s /= 10, i++); round(q, dp + (q.e = i + e * LOG_BASE - 1) + 1, rm, more); // Caller is convertBase. } else { q.e = e; q.r = +more; } return q; }; })(); /* * Return a string representing the value of BigNumber n in fixed-point or exponential * notation rounded to the specified decimal places or significant digits. * * n: a BigNumber. * i: the index of the last digit required (i.e. the digit that may be rounded up). * rm: the rounding mode. * id: 1 (toExponential) or 2 (toPrecision). */ function format(n, i, rm, id) { var c0, e, ne, len, str; if (rm == null) rm = ROUNDING_MODE; else intCheck(rm, 0, 8); if (!n.c) return n.toString(); c0 = n.c[0]; ne = n.e; if (i == null) { str = coeffToString(n.c); str = id == 1 || id == 2 && (ne <= TO_EXP_NEG || ne >= TO_EXP_POS) ? toExponential(str, ne) : toFixedPoint(str, ne, '0'); } else { n = round(new BigNumber(n), i, rm); // n.e may have changed if the value was rounded up. e = n.e; str = coeffToString(n.c); len = str.length; // toPrecision returns exponential notation if the number of significant digits // specified is less than the number of digits necessary to represent the integer // part of the value in fixed-point notation. // Exponential notation. if (id == 1 || id == 2 && (i <= e || e <= TO_EXP_NEG)) { // Append zeros? for (; len < i; str += '0', len++); str = toExponential(str, e); // Fixed-point notation. } else { i -= ne; str = toFixedPoint(str, e, '0'); // Append zeros? if (e + 1 > len) { if (--i > 0) for (str += '.'; i--; str += '0'); } else { i += e - len; if (i > 0) { if (e + 1 == len) str += '.'; for (; i--; str += '0'); } } } } return n.s < 0 && c0 ? '-' + str : str; } // Handle BigNumber.max and BigNumber.min. // If any number is NaN, return NaN. function maxOrMin(args, n) { var k, y, i = 1, x = new BigNumber(args[0]); for (; i < args.length; i++) { y = new BigNumber(args[i]); if (!y.s || (k = compare(x, y)) === n || k === 0 && x.s === n) { x = y; } } return x; } /* * Strip trailing zeros, calculate base 10 exponent and check against MIN_EXP and MAX_EXP. * Called by minus, plus and times. */ function normalise(n, c, e) { var i = 1, j = c.length; // Remove trailing zeros. for (; !c[--j]; c.pop()); // Calculate the base 10 exponent. First get the number of digits of c[0]. for (j = c[0]; j >= 10; j /= 10, i++); // Overflow? if ((e = i + e * LOG_BASE - 1) > MAX_EXP) { // Infinity. n.c = n.e = null; // Underflow? } else if (e < MIN_EXP) { // Zero. n.c = [n.e = 0]; } else { n.e = e; n.c = c; } return n; } // Handle values that fail the validity test in BigNumber. parseNumeric = (function () { var basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i, dotAfter = /^([^.]+)\.$/, dotBefore = /^\.([^.]+)$/, isInfinityOrNaN = /^-?(Infinity|NaN)$/, whitespaceOrPlus = /^\s*\+(?=[\w.])|^\s+|\s+$/g; return function (x, str, isNum, b) { var base, s = isNum ? str : str.replace(whitespaceOrPlus, ''); // No exception on ±Infinity or NaN. if (isInfinityOrNaN.test(s)) { x.s = isNaN(s) ? null : s < 0 ? -1 : 1; } else { if (!isNum) { // basePrefix = /^(-?)0([xbo])(?=\w[\w.]*$)/i s = s.replace(basePrefix, function (m, p1, p2) { base = (p2 = p2.toLowerCase()) == 'x' ? 16 : p2 == 'b' ? 2 : 8; return !b || b == base ? p1 : m; }); if (b) { base = b; // E.g. '1.' to '1', '.1' to '0.1' s = s.replace(dotAfter, '$1').replace(dotBefore, '0.$1'); } if (str != s) return new BigNumber(s, base); } // '[BigNumber Error] Not a number: {n}' // '[BigNumber Error] Not a base {b} number: {n}' if (BigNumber.DEBUG) { throw Error (bignumberError + 'Not a' + (b ? ' base ' + b : '') + ' number: ' + str); } // NaN x.s = null; } x.c = x.e = null; } })(); /* * Round x to sd significant digits using rounding mode rm. Check for over/under-flow. * If r is truthy, it is known that there are more digits after the rounding digit. */ function round(x, sd, rm, r) { var d, i, j, k, n, ni, rd, xc = x.c, pows10 = POWS_TEN; // if x is not Infinity or NaN... if (xc) { // rd is the rounding digit, i.e. the digit after the digit that may be rounded up. // n is a base 1e14 number, the value of the element of array x.c containing rd. // ni is the index of n within x.c. // d is the number of digits of n. // i is the index of rd within n including leading zeros. // j is the actual index of rd within n (if < 0, rd is a leading zero). out: { // Get the number of digits of the first element of xc. for (d = 1, k = xc[0]; k >= 10; k /= 10, d++); i = sd - d; // If the rounding digit is in the first element of xc... if (i < 0) { i += LOG_BASE; j = sd; n = xc[ni = 0]; // Get the rounding digit at index j of n. rd = mathfloor(n / pows10[d - j - 1] % 10); } else { ni = mathceil((i + 1) / LOG_BASE); if (ni >= xc.length) { if (r) { // Needed by sqrt. for (; xc.length <= ni; xc.push(0)); n = rd = 0; d = 1; i %= LOG_BASE; j = i - LOG_BASE + 1; } else { break out; } } else { n = k = xc[ni]; // Get the number of digits of n. for (d = 1; k >= 10; k /= 10, d++); // Get the index of rd within n. i %= LOG_BASE; // Get the index of rd within n, adjusted for leading zeros. // The number of leading zeros of n is given by LOG_BASE - d. j = i - LOG_BASE + d; // Get the rounding digit at index j of n. rd = j < 0 ? 0 : mathfloor(n / pows10[d - j - 1] % 10); } } r = r || sd < 0 || // Are there any non-zero digits after the rounding digit? // The expression n % pows10[d - j - 1] returns all digits of n to the right // of the digit at j, e.g. if n is 908714 and j is 2, the expression gives 714. xc[ni + 1] != null || (j < 0 ? n : n % pows10[d - j - 1]); r = rm < 4 ? (rd || r) && (rm == 0 || rm == (x.s < 0 ? 3 : 2)) : rd > 5 || rd == 5 && (rm == 4 || r || rm == 6 && // Check whether the digit to the left of the rounding digit is odd. ((i > 0 ? j > 0 ? n / pows10[d - j] : 0 : xc[ni - 1]) % 10) & 1 || rm == (x.s < 0 ? 8 : 7)); if (sd < 1 || !xc[0]) { xc.length = 0; if (r) { // Convert sd to decimal places. sd -= x.e + 1; // 1, 0.1, 0.01, 0.001, 0.0001 etc. xc[0] = pows10[(LOG_BASE - sd % LOG_BASE) % LOG_BASE]; x.e = -sd || 0; } else { // Zero. xc[0] = x.e = 0; } return x; } // Remove excess digits. if (i == 0) { xc.length = ni; k = 1; ni--; } else { xc.length = ni + 1; k = pows10[LOG_BASE - i]; // E.g. 56700 becomes 56000 if 7 is the rounding digit. // j > 0 means i > number of leading zeros of n. xc[ni] = j > 0 ? mathfloor(n / pows10[d - j] % pows10[j]) * k : 0; } // Round up? if (r) { for (; ;) { // If the digit to be rounded up is in the first element of xc... if (ni == 0) { // i will be the length of xc[0] before k is added. for (i = 1, j = xc[0]; j >= 10; j /= 10, i++); j = xc[0] += k; for (k = 1; j >= 10; j /= 10, k++); // if i != k the length has increased. if (i != k) { x.e++; if (xc[0] == BASE) xc[0] = 1; } break; } else { xc[ni] += k; if (xc[ni] != BASE) break; xc[ni--] = 0; k = 1; } } } // Remove trailing zeros. for (i = xc.length; xc[--i] === 0; xc.pop()); } // Overflow? Infinity. if (x.e > MAX_EXP) { x.c = x.e = null; // Underflow? Zero. } else if (x.e < MIN_EXP) { x.c = [x.e = 0]; } } return x; } function valueOf(n) { var str, e = n.e; if (e === null) return n.toString(); str = coeffToString(n.c); str = e <= TO_EXP_NEG || e >= TO_EXP_POS ? toExponential(str, e) : toFixedPoint(str, e, '0'); return n.s < 0 ? '-' + str : str; } // PROTOTYPE/INSTANCE METHODS /* * Return a new BigNumber whose value is the absolute value of this BigNumber. */ P.absoluteValue = P.abs = function () { var x = new BigNumber(this); if (x.s < 0) x.s = 1; return x; }; /* * Return * 1 if the value of this BigNumber is greater than the value of BigNumber(y, b), * -1 if the value of this BigNumber is less than the value of BigNumber(y, b), * 0 if they have the same value, * or null if the value of either is NaN. */ P.comparedTo = function (y, b) { return compare(this, new BigNumber(y, b)); }; /* * If dp is undefined or null or true or false, return the number of decimal places of the * value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. * * Otherwise, if dp is a number, return a new BigNumber whose value is the value of this * BigNumber rounded to a maximum of dp decimal places using rounding mode rm, or * ROUNDING_MODE if rm is omitted. * * [dp] {number} Decimal places: integer, 0 to MAX inclusive. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' */ P.decimalPlaces = P.dp = function (dp, rm) { var c, n, v, x = this; if (dp != null) { intCheck(dp, 0, MAX); if (rm == null) rm = ROUNDING_MODE; else intCheck(rm, 0, 8); return round(new BigNumber(x), dp + x.e + 1, rm); } if (!(c = x.c)) return null; n = ((v = c.length - 1) - bitFloor(this.e / LOG_BASE)) * LOG_BASE; // Subtract the number of trailing zeros of the last number. if (v = c[v]) for (; v % 10 == 0; v /= 10, n--); if (n < 0) n = 0; return n; }; /* * n / 0 = I * n / N = N * n / I = 0 * 0 / n = 0 * 0 / 0 = N * 0 / N = N * 0 / I = 0 * N / n = N * N / 0 = N * N / N = N * N / I = N * I / n = I * I / 0 = I * I / N = N * I / I = N * * Return a new BigNumber whose value is the value of this BigNumber divided by the value of * BigNumber(y, b), rounded according to DECIMAL_PLACES and ROUNDING_MODE. */ P.dividedBy = P.div = function (y, b) { return div(this, new BigNumber(y, b), DECIMAL_PLACES, ROUNDING_MODE); }; /* * Return a new BigNumber whose value is the integer part of dividing the value of this * BigNumber by the value of BigNumber(y, b). */ P.dividedToIntegerBy = P.idiv = function (y, b) { return div(this, new BigNumber(y, b), 0, 1); }; /* * Return a BigNumber whose value is the value of this BigNumber exponentiated by n. * * If m is present, return the result modulo m. * If n is negative round according to DECIMAL_PLACES and ROUNDING_MODE. * If POW_PRECISION is non-zero and m is not present, round to POW_PRECISION using ROUNDING_MODE. * * The modular power operation works efficiently when x, n, and m are integers, otherwise it * is equivalent to calculating x.exponentiatedBy(n).modulo(m) with a POW_PRECISION of 0. * * n {number|string|BigNumber} The exponent. An integer. * [m] {number|string|BigNumber} The modulus. * * '[BigNumber Error] Exponent not an integer: {n}' */ P.exponentiatedBy = P.pow = function (n, m) { var half, isModExp, i, k, more, nIsBig, nIsNeg, nIsOdd, y, x = this; n = new BigNumber(n); // Allow NaN and ±Infinity, but not other non-integers. if (n.c && !n.isInteger()) { throw Error (bignumberError + 'Exponent not an integer: ' + valueOf(n)); } if (m != null) m = new BigNumber(m); // Exponent of MAX_SAFE_INTEGER is 15. nIsBig = n.e > 14; // If x is NaN, ±Infinity, ±0 or ±1, or n is ±Infinity, NaN or ±0. if (!x.c || !x.c[0] || x.c[0] == 1 && !x.e && x.c.length == 1 || !n.c || !n.c[0]) { // The sign of the result of pow when x is negative depends on the evenness of n. // If +n overflows to ±Infinity, the evenness of n would be not be known. y = new BigNumber(Math.pow(+valueOf(x), nIsBig ? n.s * (2 - isOdd(n)) : +valueOf(n))); return m ? y.mod(m) : y; } nIsNeg = n.s < 0; if (m) { // x % m returns NaN if abs(m) is zero, or m is NaN. if (m.c ? !m.c[0] : !m.s) return new BigNumber(NaN); isModExp = !nIsNeg && x.isInteger() && m.isInteger(); if (isModExp) x = x.mod(m); // Overflow to ±Infinity: >=2**1e10 or >=1.0000024**1e15. // Underflow to ±0: <=0.79**1e10 or <=0.9999975**1e15. } else if (n.e > 9 && (x.e > 0 || x.e < -1 || (x.e == 0 // [1, 240000000] ? x.c[0] > 1 || nIsBig && x.c[1] >= 24e7 // [80000000000000] [99999750000000] : x.c[0] < 8e13 || nIsBig && x.c[0] <= 9999975e7))) { // If x is negative and n is odd, k = -0, else k = 0. k = x.s < 0 && isOdd(n) ? -0 : 0; // If x >= 1, k = ±Infinity. if (x.e > -1) k = 1 / k; // If n is negative return ±0, else return ±Infinity. return new BigNumber(nIsNeg ? 1 / k : k); } else if (POW_PRECISION) { // Truncating each coefficient array to a length of k after each multiplication // equates to truncating significant digits to POW_PRECISION + [28, 41], // i.e. there will be a minimum of 28 guard digits retained. k = mathceil(POW_PRECISION / LOG_BASE + 2); } if (nIsBig) { half = new BigNumber(0.5); if (nIsNeg) n.s = 1; nIsOdd = isOdd(n); } else { i = Math.abs(+valueOf(n)); nIsOdd = i % 2; } y = new BigNumber(ONE); // Performs 54 loop iterations for n of 9007199254740991. for (; ;) { if (nIsOdd) { y = y.times(x); if (!y.c) break; if (k) { if (y.c.length > k) y.c.length = k; } else if (isModExp) { y = y.mod(m); //y = y.minus(div(y, m, 0, MODULO_MODE).times(m)); } } if (i) { i = mathfloor(i / 2); if (i === 0) break; nIsOdd = i % 2; } else { n = n.times(half); round(n, n.e + 1, 1); if (n.e > 14) { nIsOdd = isOdd(n); } else { i = +valueOf(n); if (i === 0) break; nIsOdd = i % 2; } } x = x.times(x); if (k) { if (x.c && x.c.length > k) x.c.length = k; } else if (isModExp) { x = x.mod(m); //x = x.minus(div(x, m, 0, MODULO_MODE).times(m)); } } if (isModExp) return y; if (nIsNeg) y = ONE.div(y); return m ? y.mod(m) : k ? round(y, POW_PRECISION, ROUNDING_MODE, more) : y; }; /* * Return a new BigNumber whose value is the value of this BigNumber rounded to an integer * using rounding mode rm, or ROUNDING_MODE if rm is omitted. * * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {rm}' */ P.integerValue = function (rm) { var n = new BigNumber(this); if (rm == null) rm = ROUNDING_MODE; else intCheck(rm, 0, 8); return round(n, n.e + 1, rm); }; /* * Return true if the value of this BigNumber is equal to the value of BigNumber(y, b), * otherwise return false. */ P.isEqualTo = P.eq = function (y, b) { return compare(this, new BigNumber(y, b)) === 0; }; /* * Return true if the value of this BigNumber is a finite number, otherwise return false. */ P.isFinite = function () { return !!this.c; }; /* * Return true if the value of this BigNumber is greater than the value of BigNumber(y, b), * otherwise return false. */ P.isGreaterThan = P.gt = function (y, b) { return compare(this, new BigNumber(y, b)) > 0; }; /* * Return true if the value of this BigNumber is greater than or equal to the value of * BigNumber(y, b), otherwise return false. */ P.isGreaterThanOrEqualTo = P.gte = function (y, b) { return (b = compare(this, new BigNumber(y, b))) === 1 || b === 0; }; /* * Return true if the value of this BigNumber is an integer, otherwise return false. */ P.isInteger = function () { return !!this.c && bitFloor(this.e / LOG_BASE) > this.c.length - 2; }; /* * Return true if the value of this BigNumber is less than the value of BigNumber(y, b), * otherwise return false. */ P.isLessThan = P.lt = function (y, b) { return compare(this, new BigNumber(y, b)) < 0; }; /* * Return true if the value of this BigNumber is less than or equal to the value of * BigNumber(y, b), otherwise return false. */ P.isLessThanOrEqualTo = P.lte = function (y, b) { return (b = compare(this, new BigNumber(y, b))) === -1 || b === 0; }; /* * Return true if the value of this BigNumber is NaN, otherwise return false. */ P.isNaN = function () { return !this.s; }; /* * Return true if the value of this BigNumber is negative, otherwise return false. */ P.isNegative = function () { return this.s < 0; }; /* * Return true if the value of this BigNumber is positive, otherwise return false. */ P.isPositive = function () { return this.s > 0; }; /* * Return true if the value of this BigNumber is 0 or -0, otherwise return false. */ P.isZero = function () { return !!this.c && this.c[0] == 0; }; /* * n - 0 = n * n - N = N * n - I = -I * 0 - n = -n * 0 - 0 = 0 * 0 - N = N * 0 - I = -I * N - n = N * N - 0 = N * N - N = N * N - I = N * I - n = I * I - 0 = I * I - N = N * I - I = N * * Return a new BigNumber whose value is the value of this BigNumber minus the value of * BigNumber(y, b). */ P.minus = function (y, b) { var i, j, t, xLTy, x = this, a = x.s; y = new BigNumber(y, b); b = y.s; // Either NaN? if (!a || !b) return new BigNumber(NaN); // Signs differ? if (a != b) { y.s = -b; return x.plus(y); } var xe = x.e / LOG_BASE, ye = y.e / LOG_BASE, xc = x.c, yc = y.c; if (!xe || !ye) { // Either Infinity? if (!xc || !yc) return xc ? (y.s = -b, y) : new BigNumber(yc ? x : NaN); // Either zero? if (!xc[0] || !yc[0]) { // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. return yc[0] ? (y.s = -b, y) : new BigNumber(xc[0] ? x : // IEEE 754 (2008) 6.3: n - n = -0 when rounding to -Infinity ROUNDING_MODE == 3 ? -0 : 0); } } xe = bitFloor(xe); ye = bitFloor(ye); xc = xc.slice(); // Determine which is the bigger number. if (a = xe - ye) { if (xLTy = a < 0) { a = -a; t = xc; } else { ye = xe; t = yc; } t.reverse(); // Prepend zeros to equalise exponents. for (b = a; b--; t.push(0)); t.reverse(); } else { // Exponents equal. Check digit by digit. j = (xLTy = (a = xc.length) < (b = yc.length)) ? a : b; for (a = b = 0; b < j; b++) { if (xc[b] != yc[b]) { xLTy = xc[b] < yc[b]; break; } } } // x < y? Point xc to the array of the bigger number. if (xLTy) { t = xc; xc = yc; yc = t; y.s = -y.s; } b = (j = yc.length) - (i = xc.length); // Append zeros to xc if shorter. // No need to add zeros to yc if shorter as subtract only needs to start at yc.length. if (b > 0) for (; b--; xc[i++] = 0); b = BASE - 1; // Subtract yc from xc. for (; j > a;) { if (xc[--j] < yc[j]) { for (i = j; i && !xc[--i]; xc[i] = b); --xc[i]; xc[j] += BASE; } xc[j] -= yc[j]; } // Remove leading zeros and adjust exponent accordingly. for (; xc[0] == 0; xc.splice(0, 1), --ye); // Zero? if (!xc[0]) { // Following IEEE 754 (2008) 6.3, // n - n = +0 but n - n = -0 when rounding towards -Infinity. y.s = ROUNDING_MODE == 3 ? -1 : 1; y.c = [y.e = 0]; return y; } // No need to check for Infinity as +x - +y != Infinity && -x - -y != Infinity // for finite x and y. return normalise(y, xc, ye); }; /* * n % 0 = N * n % N = N * n % I = n * 0 % n = 0 * -0 % n = -0 * 0 % 0 = N * 0 % N = N * 0 % I = 0 * N % n = N * N % 0 = N * N % N = N * N % I = N * I % n = N * I % 0 = N * I % N = N * I % I = N * * Return a new BigNumber whose value is the value of this BigNumber modulo the value of * BigNumber(y, b). The result depends on the value of MODULO_MODE. */ P.modulo = P.mod = function (y, b) { var q, s, x = this; y = new BigNumber(y, b); // Return NaN if x is Infinity or NaN, or y is NaN or zero. if (!x.c || !y.s || y.c && !y.c[0]) { return new BigNumber(NaN); // Return x if y is Infinity or x is zero. } else if (!y.c || x.c && !x.c[0]) { return new BigNumber(x); } if (MODULO_MODE == 9) { // Euclidian division: q = sign(y) * floor(x / abs(y)) // r = x - qy where 0 <= r < abs(y) s = y.s; y.s = 1; q = div(x, y, 0, 3); y.s = s; q.s *= s; } else { q = div(x, y, 0, MODULO_MODE); } y = x.minus(q.times(y)); // To match JavaScript %, ensure sign of zero is sign of dividend. if (!y.c[0] && MODULO_MODE == 1) y.s = x.s; return y; }; /* * n * 0 = 0 * n * N = N * n * I = I * 0 * n = 0 * 0 * 0 = 0 * 0 * N = N * 0 * I = N * N * n = N * N * 0 = N * N * N = N * N * I = N * I * n = I * I * 0 = N * I * N = N * I * I = I * * Return a new BigNumber whose value is the value of this BigNumber multiplied by the value * of BigNumber(y, b). */ P.multipliedBy = P.times = function (y, b) { var c, e, i, j, k, m, xcL, xlo, xhi, ycL, ylo, yhi, zc, base, sqrtBase, x = this, xc = x.c, yc = (y = new BigNumber(y, b)).c; // Either NaN, ±Infinity or ±0? if (!xc || !yc || !xc[0] || !yc[0]) { // Return NaN if either is NaN, or one is 0 and the other is Infinity. if (!x.s || !y.s || xc && !xc[0] && !yc || yc && !yc[0] && !xc) { y.c = y.e = y.s = null; } else { y.s *= x.s; // Return ±Infinity if either is ±Infinity. if (!xc || !yc) { y.c = y.e = null; // Return ±0 if either is ±0. } else { y.c = [0]; y.e = 0; } } return y; } e = bitFloor(x.e / LOG_BASE) + bitFloor(y.e / LOG_BASE); y.s *= x.s; xcL = xc.length; ycL = yc.length; // Ensure xc points to longer array and xcL to its length. if (xcL < ycL) { zc = xc; xc = yc; yc = zc; i = xcL; xcL = ycL; ycL = i; } // Initialise the result array with zeros. for (i = xcL + ycL, zc = []; i--; zc.push(0)); base = BASE; sqrtBase = SQRT_BASE; for (i = ycL; --i >= 0;) { c = 0; ylo = yc[i] % sqrtBase; yhi = yc[i] / sqrtBase | 0; for (k = xcL, j = i + k; j > i;) { xlo = xc[--k] % sqrtBase; xhi = xc[k] / sqrtBase | 0; m = yhi * xlo + xhi * ylo; xlo = ylo * xlo + ((m % sqrtBase) * sqrtBase) + zc[j] + c; c = (xlo / base | 0) + (m / sqrtBase | 0) + yhi * xhi; zc[j--] = xlo % base; } zc[j] = c; } if (c) { ++e; } else { zc.splice(0, 1); } return normalise(y, zc, e); }; /* * Return a new BigNumber whose value is the value of this BigNumber negated, * i.e. multiplied by -1. */ P.negated = function () { var x = new BigNumber(this); x.s = -x.s || null; return x; }; /* * n + 0 = n * n + N = N * n + I = I * 0 + n = n * 0 + 0 = 0 * 0 + N = N * 0 + I = I * N + n = N * N + 0 = N * N + N = N * N + I = N * I + n = I * I + 0 = I * I + N = N * I + I = I * * Return a new BigNumber whose value is the value of this BigNumber plus the value of * BigNumber(y, b). */ P.plus = function (y, b) { var t, x = this, a = x.s; y = new BigNumber(y, b); b = y.s; // Either NaN? if (!a || !b) return new BigNumber(NaN); // Signs differ? if (a != b) { y.s = -b; return x.minus(y); } var xe = x.e / LOG_BASE, ye = y.e / LOG_BASE, xc = x.c, yc = y.c; if (!xe || !ye) { // Return ±Infinity if either ±Infinity. if (!xc || !yc) return new BigNumber(a / 0); // Either zero? // Return y if y is non-zero, x if x is non-zero, or zero if both are zero. if (!xc[0] || !yc[0]) return yc[0] ? y : new BigNumber(xc[0] ? x : a * 0); } xe = bitFloor(xe); ye = bitFloor(ye); xc = xc.slice(); // Prepend zeros to equalise exponents. Faster to use reverse then do unshifts. if (a = xe - ye) { if (a > 0) { ye = xe; t = yc; } else { a = -a; t = xc; } t.reverse(); for (; a--; t.push(0)); t.reverse(); } a = xc.length; b = yc.length; // Point xc to the longer array, and b to the shorter length. if (a - b < 0) { t = yc; yc = xc; xc = t; b = a; } // Only start adding at yc.length - 1 as the further digits of xc can be ignored. for (a = 0; b;) { a = (xc[--b] = xc[b] + yc[b] + a) / BASE | 0; xc[b] = BASE === xc[b] ? 0 : xc[b] % BASE; } if (a) { xc = [a].concat(xc); ++ye; } // No need to check for zero, as +x + +y != 0 && -x + -y != 0 // ye = MAX_EXP + 1 possible return normalise(y, xc, ye); }; /* * If sd is undefined or null or true or false, return the number of significant digits of * the value of this BigNumber, or null if the value of this BigNumber is ±Infinity or NaN. * If sd is true include integer-part trailing zeros in the count. * * Otherwise, if sd is a number, return a new BigNumber whose value is the value of this * BigNumber rounded to a maximum of sd significant digits using rounding mode rm, or * ROUNDING_MODE if rm is omitted. * * sd {number|boolean} number: significant digits: integer, 1 to MAX inclusive. * boolean: whether to count integer-part trailing zeros: true or false. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' */ P.precision = P.sd = function (sd, rm) { var c, n, v, x = this; if (sd != null && sd !== !!sd) { intCheck(sd, 1, MAX); if (rm == null) rm = ROUNDING_MODE; else intCheck(rm, 0, 8); return round(new BigNumber(x), sd, rm); } if (!(c = x.c)) return null; v = c.length - 1; n = v * LOG_BASE + 1; if (v = c[v]) { // Subtract the number of trailing zeros of the last element. for (; v % 10 == 0; v /= 10, n--); // Add the number of digits of the first element. for (v = c[0]; v >= 10; v /= 10, n++); } if (sd && x.e + 1 > n) n = x.e + 1; return n; }; /* * Return a new BigNumber whose value is the value of this BigNumber shifted by k places * (powers of 10). Shift to the right if n > 0, and to the left if n < 0. * * k {number} Integer, -MAX_SAFE_INTEGER to MAX_SAFE_INTEGER inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {k}' */ P.shiftedBy = function (k) { intCheck(k, -MAX_SAFE_INTEGER, MAX_SAFE_INTEGER); return this.times('1e' + k); }; /* * sqrt(-n) = N * sqrt(N) = N * sqrt(-I) = N * sqrt(I) = I * sqrt(0) = 0 * sqrt(-0) = -0 * * Return a new BigNumber whose value is the square root of the value of this BigNumber, * rounded according to DECIMAL_PLACES and ROUNDING_MODE. */ P.squareRoot = P.sqrt = function () { var m, n, r, rep, t, x = this, c = x.c, s = x.s, e = x.e, dp = DECIMAL_PLACES + 4, half = new BigNumber('0.5'); // Negative/NaN/Infinity/zero? if (s !== 1 || !c || !c[0]) { return new BigNumber(!s || s < 0 && (!c || c[0]) ? NaN : c ? x : 1 / 0); } // Initial estimate. s = Math.sqrt(+valueOf(x)); // Math.sqrt underflow/overflow? // Pass x to Math.sqrt as integer, then adjust the exponent of the result. if (s == 0 || s == 1 / 0) { n = coeffToString(c); if ((n.length + e) % 2 == 0) n += '0'; s = Math.sqrt(+n); e = bitFloor((e + 1) / 2) - (e < 0 || e % 2); if (s == 1 / 0) { n = '5e' + e; } else { n = s.toExponential(); n = n.slice(0, n.indexOf('e') + 1) + e; } r = new BigNumber(n); } else { r = new BigNumber(s + ''); } // Check for zero. // r could be zero if MIN_EXP is changed after the this value was created. // This would cause a division by zero (x/t) and hence Infinity below, which would cause // coeffToString to throw. if (r.c[0]) { e = r.e; s = e + dp; if (s < 3) s = 0; // Newton-Raphson iteration. for (; ;) { t = r; r = half.times(t.plus(div(x, t, dp, 1))); if (coeffToString(t.c).slice(0, s) === (n = coeffToString(r.c)).slice(0, s)) { // The exponent of r may here be one less than the final result exponent, // e.g 0.0009999 (e-4) --> 0.001 (e-3), so adjust s so the rounding digits // are indexed correctly. if (r.e < e) --s; n = n.slice(s - 3, s + 1); // The 4th rounding digit may be in error by -1 so if the 4 rounding digits // are 9999 or 4999 (i.e. approaching a rounding boundary) continue the // iteration. if (n == '9999' || !rep && n == '4999') { // On the first iteration only, check to see if rounding up gives the // exact result as the nines may infinitely repeat. if (!rep) { round(t, t.e + DECIMAL_PLACES + 2, 0); if (t.times(t).eq(x)) { r = t; break; } } dp += 4; s += 4; rep = 1; } else { // If rounding digits are null, 0{0,4} or 50{0,3}, check for exact // result. If not, then there are further digits and m will be truthy. if (!+n || !+n.slice(1) && n.charAt(0) == '5') { // Truncate to the first rounding digit. round(r, r.e + DECIMAL_PLACES + 2, 1); m = !r.times(r).eq(x); } break; } } } } return round(r, r.e + DECIMAL_PLACES + 1, ROUNDING_MODE, m); }; /* * Return a string representing the value of this BigNumber in exponential notation and * rounded using ROUNDING_MODE to dp fixed decimal places. * * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' */ P.toExponential = function (dp, rm) { if (dp != null) { intCheck(dp, 0, MAX); dp++; } return format(this, dp, rm, 1); }; /* * Return a string representing the value of this BigNumber in fixed-point notation rounding * to dp fixed decimal places using rounding mode rm, or ROUNDING_MODE if rm is omitted. * * Note: as with JavaScript's number type, (-0).toFixed(0) is '0', * but e.g. (-0.00001).toFixed(0) is '-0'. * * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' */ P.toFixed = function (dp, rm) { if (dp != null) { intCheck(dp, 0, MAX); dp = dp + this.e + 1; } return format(this, dp, rm); }; /* * Return a string representing the value of this BigNumber in fixed-point notation rounded * using rm or ROUNDING_MODE to dp decimal places, and formatted according to the properties * of the format or FORMAT object (see BigNumber.set). * * The formatting object may contain some or all of the properties shown below. * * FORMAT = { * prefix: '', * groupSize: 3, * secondaryGroupSize: 0, * groupSeparator: ',', * decimalSeparator: '.', * fractionGroupSize: 0, * fractionGroupSeparator: '\xA0', // non-breaking space * suffix: '' * }; * * [dp] {number} Decimal places. Integer, 0 to MAX inclusive. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * [format] {object} Formatting options. See FORMAT pbject above. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {dp|rm}' * '[BigNumber Error] Argument not an object: {format}' */ P.toFormat = function (dp, rm, format) { var str, x = this; if (format == null) { if (dp != null && rm && typeof rm == 'object') { format = rm; rm = null; } else if (dp && typeof dp == 'object') { format = dp; dp = rm = null; } else { format = FORMAT; } } else if (typeof format != 'object') { throw Error (bignumberError + 'Argument not an object: ' + format); } str = x.toFixed(dp, rm); if (x.c) { var i, arr = str.split('.'), g1 = +format.groupSize, g2 = +format.secondaryGroupSize, groupSeparator = format.groupSeparator || '', intPart = arr[0], fractionPart = arr[1], isNeg = x.s < 0, intDigits = isNeg ? intPart.slice(1) : intPart, len = intDigits.length; if (g2) { i = g1; g1 = g2; g2 = i; len -= i; } if (g1 > 0 && len > 0) { i = len % g1 || g1; intPart = intDigits.substr(0, i); for (; i < len; i += g1) intPart += groupSeparator + intDigits.substr(i, g1); if (g2 > 0) intPart += groupSeparator + intDigits.slice(i); if (isNeg) intPart = '-' + intPart; } str = fractionPart ? intPart + (format.decimalSeparator || '') + ((g2 = +format.fractionGroupSize) ? fractionPart.replace(new RegExp('\\d{' + g2 + '}\\B', 'g'), '$&' + (format.fractionGroupSeparator || '')) : fractionPart) : intPart; } return (format.prefix || '') + str + (format.suffix || ''); }; /* * Return an array of two BigNumbers representing the value of this BigNumber as a simple * fraction with an integer numerator and an integer denominator. * The denominator will be a positive non-zero value less than or equal to the specified * maximum denominator. If a maximum denominator is not specified, the denominator will be * the lowest value necessary to represent the number exactly. * * [md] {number|string|BigNumber} Integer >= 1, or Infinity. The maximum denominator. * * '[BigNumber Error] Argument {not an integer|out of range} : {md}' */ P.toFraction = function (md) { var d, d0, d1, d2, e, exp, n, n0, n1, q, r, s, x = this, xc = x.c; if (md != null) { n = new BigNumber(md); // Throw if md is less than one or is not an integer, unless it is Infinity. if (!n.isInteger() && (n.c || n.s !== 1) || n.lt(ONE)) { throw Error (bignumberError + 'Argument ' + (n.isInteger() ? 'out of range: ' : 'not an integer: ') + valueOf(n)); } } if (!xc) return new BigNumber(x); d = new BigNumber(ONE); n1 = d0 = new BigNumber(ONE); d1 = n0 = new BigNumber(ONE); s = coeffToString(xc); // Determine initial denominator. // d is a power of 10 and the minimum max denominator that specifies the value exactly. e = d.e = s.length - x.e - 1; d.c[0] = POWS_TEN[(exp = e % LOG_BASE) < 0 ? LOG_BASE + exp : exp]; md = !md || n.comparedTo(d) > 0 ? (e > 0 ? d : n1) : n; exp = MAX_EXP; MAX_EXP = 1 / 0; n = new BigNumber(s); // n0 = d1 = 0 n0.c[0] = 0; for (; ;) { q = div(n, d, 0, 1); d2 = d0.plus(q.times(d1)); if (d2.comparedTo(md) == 1) break; d0 = d1; d1 = d2; n1 = n0.plus(q.times(d2 = n1)); n0 = d2; d = n.minus(q.times(d2 = d)); n = d2; } d2 = div(md.minus(d0), d1, 0, 1); n0 = n0.plus(d2.times(n1)); d0 = d0.plus(d2.times(d1)); n0.s = n1.s = x.s; e = e * 2; // Determine which fraction is closer to x, n0/d0 or n1/d1 r = div(n1, d1, e, ROUNDING_MODE).minus(x).abs().comparedTo( div(n0, d0, e, ROUNDING_MODE).minus(x).abs()) < 1 ? [n1, d1] : [n0, d0]; MAX_EXP = exp; return r; }; /* * Return the value of this BigNumber converted to a number primitive. */ P.toNumber = function () { return +valueOf(this); }; /* * Return a string representing the value of this BigNumber rounded to sd significant digits * using rounding mode rm or ROUNDING_MODE. If sd is less than the number of digits * necessary to represent the integer part of the value in fixed-point notation, then use * exponential notation. * * [sd] {number} Significant digits. Integer, 1 to MAX inclusive. * [rm] {number} Rounding mode. Integer, 0 to 8 inclusive. * * '[BigNumber Error] Argument {not a primitive number|not an integer|out of range}: {sd|rm}' */ P.toPrecision = function (sd, rm) { if (sd != null) intCheck(sd, 1, MAX); return format(this, sd, rm, 2); }; /* * Return a string representing the value of this BigNumber in base b, or base 10 if b is * omitted. If a base is specified, including base 10, round according to DECIMAL_PLACES and * ROUNDING_MODE. If a base is not specified, and this BigNumber has a positive exponent * that is equal to or greater than TO_EXP_POS, or a negative exponent equal to or less than * TO_EXP_NEG, return exponential notation. * * [b] {number} Integer, 2 to ALPHABET.length inclusive. * * '[BigNumber Error] Base {not a primitive number|not an integer|out of range}: {b}' */ P.toString = function (b) { var str, n = this, s = n.s, e = n.e; // Infinity or NaN? if (e === null) { if (s) { str = 'Infinity'; if (s < 0) str = '-' + str; } else { str = 'NaN'; } } else { if (b == null) { str = e <= TO_EXP_NEG || e >= TO_EXP_POS ? toExponential(coeffToString(n.c), e) : toFixedPoint(coeffToString(n.c), e, '0'); } else if (b === 10 && alphabetHasNormalDecimalDigits) { n = round(new BigNumber(n), DECIMAL_PLACES + e + 1, ROUNDING_MODE); str = toFixedPoint(coeffToString(n.c), n.e, '0'); } else { intCheck(b, 2, ALPHABET.length, 'Base'); str = convertBase(toFixedPoint(coeffToString(n.c), e, '0'), 10, b, s, true); } if (s < 0 && n.c[0]) str = '-' + str; } return str; }; /* * Return as toString, but do not accept a base argument, and include the minus sign for * negative zero. */ P.valueOf = P.toJSON = function () { return valueOf(this); }; P._isBigNumber = true; if (configObject != null) BigNumber.set(configObject); return BigNumber; } // PRIVATE HELPER FUNCTIONS // These functions don't need access to variables, // e.g. DECIMAL_PLACES, in the scope of the `clone` function above. function bitFloor(n) { var i = n | 0; return n > 0 || n === i ? i : i - 1; } // Return a coefficient array as a string of base 10 digits. function coeffToString(a) { var s, z, i = 1, j = a.length, r = a[0] + ''; for (; i < j;) { s = a[i++] + ''; z = LOG_BASE - s.length; for (; z--; s = '0' + s); r += s; } // Determine trailing zeros. for (j = r.length; r.charCodeAt(--j) === 48;); return r.slice(0, j + 1 || 1); } // Compare the value of BigNumbers x and y. function compare(x, y) { var a, b, xc = x.c, yc = y.c, i = x.s, j = y.s, k = x.e, l = y.e; // Either NaN? if (!i || !j) return null; a = xc && !xc[0]; b = yc && !yc[0]; // Either zero? if (a || b) return a ? b ? 0 : -j : i; // Signs differ? if (i != j) return i; a = i < 0; b = k == l; // Either Infinity? if (!xc || !yc) return b ? 0 : !xc ^ a ? 1 : -1; // Compare exponents. if (!b) return k > l ^ a ? 1 : -1; j = (k = xc.length) < (l = yc.length) ? k : l; // Compare digit by digit. for (i = 0; i < j; i++) if (xc[i] != yc[i]) return xc[i] > yc[i] ^ a ? 1 : -1; // Compare lengths. return k == l ? 0 : k > l ^ a ? 1 : -1; } /* * Check that n is a primitive number, an integer, and in range, otherwise throw. */ function intCheck(n, min, max, name) { if (n < min || n > max || n !== mathfloor(n)) { throw Error (bignumberError + (name || 'Argument') + (typeof n == 'number' ? n < min || n > max ? ' out of range: ' : ' not an integer: ' : ' not a primitive number: ') + String(n)); } } // Assumes finite n. function isOdd(n) { var k = n.c.length - 1; return bitFloor(n.e / LOG_BASE) == k && n.c[k] % 2 != 0; } function toExponential(str, e) { return (str.length > 1 ? str.charAt(0) + '.' + str.slice(1) : str) + (e < 0 ? 'e' : 'e+') + e; } function toFixedPoint(str, e, z) { var len, zs; // Negative exponent? if (e < 0) { // Prepend zeros. for (zs = z + '.'; ++e; zs += z); str = zs + str; // Positive exponent } else { len = str.length; // Append zeros. if (++e > len) { for (zs = z, e -= len; --e; zs += z); str += zs; } else if (e < len) { str = str.slice(0, e) + '.' + str.slice(e); } } return str; } // EXPORT BigNumber = clone(); BigNumber['default'] = BigNumber.BigNumber = BigNumber; // AMD. if (typeof define == 'function' && define.amd) { define(function () { return BigNumber; }); // Node.js and other environments that support module.exports. } else if ( true && module.exports) { module.exports = BigNumber; // Browser. } else { if (!globalObject) { globalObject = typeof self != 'undefined' && self ? self : window; } globalObject.BigNumber = BigNumber; } })(this); /***/ }), /***/ 94691: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var concatMap = __nccwpck_require__(97087); var balanced = __nccwpck_require__(59380); module.exports = expandTop; var escSlash = '\0SLASH'+Math.random()+'\0'; var escOpen = '\0OPEN'+Math.random()+'\0'; var escClose = '\0CLOSE'+Math.random()+'\0'; var escComma = '\0COMMA'+Math.random()+'\0'; var escPeriod = '\0PERIOD'+Math.random()+'\0'; function numeric(str) { return parseInt(str, 10) == str ? parseInt(str, 10) : str.charCodeAt(0); } function escapeBraces(str) { return str.split('\\\\').join(escSlash) .split('\\{').join(escOpen) .split('\\}').join(escClose) .split('\\,').join(escComma) .split('\\.').join(escPeriod); } function unescapeBraces(str) { return str.split(escSlash).join('\\') .split(escOpen).join('{') .split(escClose).join('}') .split(escComma).join(',') .split(escPeriod).join('.'); } // Basically just str.split(","), but handling cases // where we have nested braced sections, which should be // treated as individual members, like {a,{b,c},d} function parseCommaParts(str) { if (!str) return ['']; var parts = []; var m = balanced('{', '}', str); if (!m) return str.split(','); var pre = m.pre; var body = m.body; var post = m.post; var p = pre.split(','); p[p.length-1] += '{' + body + '}'; var postParts = parseCommaParts(post); if (post.length) { p[p.length-1] += postParts.shift(); p.push.apply(p, postParts); } parts.push.apply(parts, p); return parts; } function expandTop(str) { if (!str) return []; // I don't know why Bash 4.3 does this, but it does. // Anything starting with {} will have the first two bytes preserved // but *only* at the top level, so {},a}b will not expand to anything, // but a{},b}c will be expanded to [a}c,abc]. // One could argue that this is a bug in Bash, but since the goal of // this module is to match Bash's rules, we escape a leading {} if (str.substr(0, 2) === '{}') { str = '\\{\\}' + str.substr(2); } return expand(escapeBraces(str), true).map(unescapeBraces); } function identity(e) { return e; } function embrace(str) { return '{' + str + '}'; } function isPadded(el) { return /^-?0\d/.test(el); } function lte(i, y) { return i <= y; } function gte(i, y) { return i >= y; } function expand(str, isTop) { var expansions = []; var m = balanced('{', '}', str); if (!m || /\$$/.test(m.pre)) return [str]; var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); var isSequence = isNumericSequence || isAlphaSequence; var isOptions = m.body.indexOf(',') >= 0; if (!isSequence && !isOptions) { // {a},b} if (m.post.match(/,.*\}/)) { str = m.pre + '{' + m.body + escClose + m.post; return expand(str); } return [str]; } var n; if (isSequence) { n = m.body.split(/\.\./); } else { n = parseCommaParts(m.body); if (n.length === 1) { // x{{a,b}}y ==> x{a}y x{b}y n = expand(n[0], false).map(embrace); if (n.length === 1) { var post = m.post.length ? expand(m.post, false) : ['']; return post.map(function(p) { return m.pre + n[0] + p; }); } } } // at this point, n is the parts, and we know it's not a comma set // with a single entry. // no need to expand pre, since it is guaranteed to be free of brace-sets var pre = m.pre; var post = m.post.length ? expand(m.post, false) : ['']; var N; if (isSequence) { var x = numeric(n[0]); var y = numeric(n[1]); var width = Math.max(n[0].length, n[1].length) var incr = n.length == 3 ? Math.abs(numeric(n[2])) : 1; var test = lte; var reverse = y < x; if (reverse) { incr *= -1; test = gte; } var pad = n.some(isPadded); N = []; for (var i = x; test(i, y); i += incr) { var c; if (isAlphaSequence) { c = String.fromCharCode(i); if (c === '\\') c = ''; } else { c = String(i); if (pad) { var need = width - c.length; if (need > 0) { var z = new Array(need + 1).join('0'); if (i < 0) c = '-' + z + c.slice(1); else c = z + c; } } } N.push(c); } } else { N = concatMap(n, function(el) { return expand(el, false) }); } for (var j = 0; j < N.length; j++) { for (var k = 0; k < post.length; k++) { var expansion = pre + N[j] + post[k]; if (!isTop || isSequence || expansion) expansions.push(expansion); } } return expansions; } /***/ }), /***/ 39732: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*jshint node:true */ var Buffer = (__nccwpck_require__(20181).Buffer); // browserify var SlowBuffer = (__nccwpck_require__(20181).SlowBuffer); module.exports = bufferEq; function bufferEq(a, b) { // shortcutting on type is necessary for correctness if (!Buffer.isBuffer(a) || !Buffer.isBuffer(b)) { return false; } // buffer sizes should be well-known information, so despite this // shortcutting, it doesn't leak any information about the *contents* of the // buffers. if (a.length !== b.length) { return false; } var c = 0; for (var i = 0; i < a.length; i++) { /*jshint bitwise:false */ c |= a[i] ^ b[i]; // XOR } return c === 0; } bufferEq.install = function() { Buffer.prototype.equal = SlowBuffer.prototype.equal = function equal(that) { return bufferEq(this, that); }; }; var origBufEqual = Buffer.prototype.equal; var origSlowBufEqual = SlowBuffer.prototype.equal; bufferEq.restore = function() { Buffer.prototype.equal = origBufEqual; SlowBuffer.prototype.equal = origSlowBufEqual; }; /***/ }), /***/ 22639: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var bind = __nccwpck_require__(37564); var $apply = __nccwpck_require__(33945); var $call = __nccwpck_require__(88093); var $reflectApply = __nccwpck_require__(31330); /** @type {import('./actualApply')} */ module.exports = $reflectApply || bind.call($call, $apply); /***/ }), /***/ 33945: /***/ ((module) => { "use strict"; /** @type {import('./functionApply')} */ module.exports = Function.prototype.apply; /***/ }), /***/ 88093: /***/ ((module) => { "use strict"; /** @type {import('./functionCall')} */ module.exports = Function.prototype.call; /***/ }), /***/ 88705: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var bind = __nccwpck_require__(37564); var $TypeError = __nccwpck_require__(73314); var $call = __nccwpck_require__(88093); var $actualApply = __nccwpck_require__(22639); /** @type {(args: [Function, thisArg?: unknown, ...args: unknown[]]) => Function} TODO FIXME, find a way to use import('.') */ module.exports = function callBindBasic(args) { if (args.length < 1 || typeof args[0] !== 'function') { throw new $TypeError('a function is required'); } return $actualApply(bind, $call, args); }; /***/ }), /***/ 31330: /***/ ((module) => { "use strict"; /** @type {import('./reflectApply')} */ module.exports = typeof Reflect !== 'undefined' && Reflect && Reflect.apply; /***/ }), /***/ 35630: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var util = __nccwpck_require__(39023); var Stream = (__nccwpck_require__(2203).Stream); var DelayedStream = __nccwpck_require__(72710); module.exports = CombinedStream; function CombinedStream() { this.writable = false; this.readable = true; this.dataSize = 0; this.maxDataSize = 2 * 1024 * 1024; this.pauseStreams = true; this._released = false; this._streams = []; this._currentStream = null; this._insideLoop = false; this._pendingNext = false; } util.inherits(CombinedStream, Stream); CombinedStream.create = function(options) { var combinedStream = new this(); options = options || {}; for (var option in options) { combinedStream[option] = options[option]; } return combinedStream; }; CombinedStream.isStreamLike = function(stream) { return (typeof stream !== 'function') && (typeof stream !== 'string') && (typeof stream !== 'boolean') && (typeof stream !== 'number') && (!Buffer.isBuffer(stream)); }; CombinedStream.prototype.append = function(stream) { var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { if (!(stream instanceof DelayedStream)) { var newStream = DelayedStream.create(stream, { maxDataSize: Infinity, pauseStream: this.pauseStreams, }); stream.on('data', this._checkDataSize.bind(this)); stream = newStream; } this._handleErrors(stream); if (this.pauseStreams) { stream.pause(); } } this._streams.push(stream); return this; }; CombinedStream.prototype.pipe = function(dest, options) { Stream.prototype.pipe.call(this, dest, options); this.resume(); return dest; }; CombinedStream.prototype._getNext = function() { this._currentStream = null; if (this._insideLoop) { this._pendingNext = true; return; // defer call } this._insideLoop = true; try { do { this._pendingNext = false; this._realGetNext(); } while (this._pendingNext); } finally { this._insideLoop = false; } }; CombinedStream.prototype._realGetNext = function() { var stream = this._streams.shift(); if (typeof stream == 'undefined') { this.end(); return; } if (typeof stream !== 'function') { this._pipeNext(stream); return; } var getStream = stream; getStream(function(stream) { var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { stream.on('data', this._checkDataSize.bind(this)); this._handleErrors(stream); } this._pipeNext(stream); }.bind(this)); }; CombinedStream.prototype._pipeNext = function(stream) { this._currentStream = stream; var isStreamLike = CombinedStream.isStreamLike(stream); if (isStreamLike) { stream.on('end', this._getNext.bind(this)); stream.pipe(this, {end: false}); return; } var value = stream; this.write(value); this._getNext(); }; CombinedStream.prototype._handleErrors = function(stream) { var self = this; stream.on('error', function(err) { self._emitError(err); }); }; CombinedStream.prototype.write = function(data) { this.emit('data', data); }; CombinedStream.prototype.pause = function() { if (!this.pauseStreams) { return; } if(this.pauseStreams && this._currentStream && typeof(this._currentStream.pause) == 'function') this._currentStream.pause(); this.emit('pause'); }; CombinedStream.prototype.resume = function() { if (!this._released) { this._released = true; this.writable = true; this._getNext(); } if(this.pauseStreams && this._currentStream && typeof(this._currentStream.resume) == 'function') this._currentStream.resume(); this.emit('resume'); }; CombinedStream.prototype.end = function() { this._reset(); this.emit('end'); }; CombinedStream.prototype.destroy = function() { this._reset(); this.emit('close'); }; CombinedStream.prototype._reset = function() { this.writable = false; this._streams = []; this._currentStream = null; }; CombinedStream.prototype._checkDataSize = function() { this._updateDataSize(); if (this.dataSize <= this.maxDataSize) { return; } var message = 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.'; this._emitError(new Error(message)); }; CombinedStream.prototype._updateDataSize = function() { this.dataSize = 0; var self = this; this._streams.forEach(function(stream) { if (!stream.dataSize) { return; } self.dataSize += stream.dataSize; }); if (this._currentStream && this._currentStream.dataSize) { this.dataSize += this._currentStream.dataSize; } }; CombinedStream.prototype._emitError = function(err) { this._reset(); this.emit('error', err); }; /***/ }), /***/ 97087: /***/ ((module) => { module.exports = function (xs, fn) { var res = []; for (var i = 0; i < xs.length; i++) { var x = fn(xs[i], i); if (isArray(x)) res.push.apply(res, x); else res.push(x); } return res; }; var isArray = Array.isArray || function (xs) { return Object.prototype.toString.call(xs) === '[object Array]'; }; /***/ }), /***/ 6110: /***/ ((module, exports, __nccwpck_require__) => { /* eslint-env browser */ /** * This is the web browser implementation of `debug()`. */ exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.storage = localstorage(); exports.destroy = (() => { let warned = false; return () => { if (!warned) { warned = true; console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } }; })(); /** * Colors. */ exports.colors = [ '#0000CC', '#0000FF', '#0033CC', '#0033FF', '#0066CC', '#0066FF', '#0099CC', '#0099FF', '#00CC00', '#00CC33', '#00CC66', '#00CC99', '#00CCCC', '#00CCFF', '#3300CC', '#3300FF', '#3333CC', '#3333FF', '#3366CC', '#3366FF', '#3399CC', '#3399FF', '#33CC00', '#33CC33', '#33CC66', '#33CC99', '#33CCCC', '#33CCFF', '#6600CC', '#6600FF', '#6633CC', '#6633FF', '#66CC00', '#66CC33', '#9900CC', '#9900FF', '#9933CC', '#9933FF', '#99CC00', '#99CC33', '#CC0000', '#CC0033', '#CC0066', '#CC0099', '#CC00CC', '#CC00FF', '#CC3300', '#CC3333', '#CC3366', '#CC3399', '#CC33CC', '#CC33FF', '#CC6600', '#CC6633', '#CC9900', '#CC9933', '#CCCC00', '#CCCC33', '#FF0000', '#FF0033', '#FF0066', '#FF0099', '#FF00CC', '#FF00FF', '#FF3300', '#FF3333', '#FF3366', '#FF3399', '#FF33CC', '#FF33FF', '#FF6600', '#FF6633', '#FF9900', '#FF9933', '#FFCC00', '#FFCC33' ]; /** * Currently only WebKit-based Web Inspectors, Firefox >= v31, * and the Firebug extension (any Firefox version) are known * to support "%c" CSS customizations. * * TODO: add a `localStorage` variable to explicitly enable/disable colors */ // eslint-disable-next-line complexity function useColors() { // NB: In an Electron preload script, document will be defined but not fully // initialized. Since we know we're in Chrome, we'll just detect this case // explicitly if (typeof window !== 'undefined' && window.process && (window.process.type === 'renderer' || window.process.__nwjs)) { return true; } // Internet Explorer and Edge do not support colors. if (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/(edge|trident)\/(\d+)/)) { return false; } let m; // Is webkit? http://stackoverflow.com/a/16459606/376773 // document is undefined in react-native: https://github.com/facebook/react-native/pull/1632 // eslint-disable-next-line no-return-assign return (typeof document !== 'undefined' && document.documentElement && document.documentElement.style && document.documentElement.style.WebkitAppearance) || // Is firebug? http://stackoverflow.com/a/398120/376773 (typeof window !== 'undefined' && window.console && (window.console.firebug || (window.console.exception && window.console.table))) || // Is firefox >= v31? // https://developer.mozilla.org/en-US/docs/Tools/Web_Console#Styling_messages (typeof navigator !== 'undefined' && navigator.userAgent && (m = navigator.userAgent.toLowerCase().match(/firefox\/(\d+)/)) && parseInt(m[1], 10) >= 31) || // Double check webkit in userAgent just in case we are in a worker (typeof navigator !== 'undefined' && navigator.userAgent && navigator.userAgent.toLowerCase().match(/applewebkit\/(\d+)/)); } /** * Colorize log arguments if enabled. * * @api public */ function formatArgs(args) { args[0] = (this.useColors ? '%c' : '') + this.namespace + (this.useColors ? ' %c' : ' ') + args[0] + (this.useColors ? '%c ' : ' ') + '+' + module.exports.humanize(this.diff); if (!this.useColors) { return; } const c = 'color: ' + this.color; args.splice(1, 0, c, 'color: inherit'); // The final "%c" is somewhat tricky, because there could be other // arguments passed either before or after the %c, so we need to // figure out the correct index to insert the CSS into let index = 0; let lastC = 0; args[0].replace(/%[a-zA-Z%]/g, match => { if (match === '%%') { return; } index++; if (match === '%c') { // We only are interested in the *last* %c // (the user may have provided their own) lastC = index; } }); args.splice(lastC, 0, c); } /** * Invokes `console.debug()` when available. * No-op when `console.debug` is not a "function". * If `console.debug` is not available, falls back * to `console.log`. * * @api public */ exports.log = console.debug || console.log || (() => {}); /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { try { if (namespaces) { exports.storage.setItem('debug', namespaces); } else { exports.storage.removeItem('debug'); } } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { let r; try { r = exports.storage.getItem('debug'); } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } // If debug isn't set in LS, and we're in Electron, try to load $DEBUG if (!r && typeof process !== 'undefined' && 'env' in process) { r = process.env.DEBUG; } return r; } /** * Localstorage attempts to return the localstorage. * * This is necessary because safari throws * when a user disables cookies/localstorage * and you attempt to access it. * * @return {LocalStorage} * @api private */ function localstorage() { try { // TVMLKit (Apple TV JS Runtime) does not have a window object, just localStorage in the global context // The Browser also has localStorage in the global context. return localStorage; } catch (error) { // Swallow // XXX (@Qix-) should we be logging these? } } module.exports = __nccwpck_require__(40897)(exports); const {formatters} = module.exports; /** * Map %j to `JSON.stringify()`, since no Web Inspectors do that by default. */ formatters.j = function (v) { try { return JSON.stringify(v); } catch (error) { return '[UnexpectedJSONParseError]: ' + error.message; } }; /***/ }), /***/ 40897: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** * This is the common logic for both the Node.js and web browser * implementations of `debug()`. */ function setup(env) { createDebug.debug = createDebug; createDebug.default = createDebug; createDebug.coerce = coerce; createDebug.disable = disable; createDebug.enable = enable; createDebug.enabled = enabled; createDebug.humanize = __nccwpck_require__(70744); createDebug.destroy = destroy; Object.keys(env).forEach(key => { createDebug[key] = env[key]; }); /** * The currently active debug mode names, and names to skip. */ createDebug.names = []; createDebug.skips = []; /** * Map of special "%n" handling functions, for the debug "format" argument. * * Valid key names are a single, lower or upper-case letter, i.e. "n" and "N". */ createDebug.formatters = {}; /** * Selects a color for a debug namespace * @param {String} namespace The namespace string for the debug instance to be colored * @return {Number|String} An ANSI color code for the given namespace * @api private */ function selectColor(namespace) { let hash = 0; for (let i = 0; i < namespace.length; i++) { hash = ((hash << 5) - hash) + namespace.charCodeAt(i); hash |= 0; // Convert to 32bit integer } return createDebug.colors[Math.abs(hash) % createDebug.colors.length]; } createDebug.selectColor = selectColor; /** * Create a debugger with the given `namespace`. * * @param {String} namespace * @return {Function} * @api public */ function createDebug(namespace) { let prevTime; let enableOverride = null; let namespacesCache; let enabledCache; function debug(...args) { // Disabled? if (!debug.enabled) { return; } const self = debug; // Set `diff` timestamp const curr = Number(new Date()); const ms = curr - (prevTime || curr); self.diff = ms; self.prev = prevTime; self.curr = curr; prevTime = curr; args[0] = createDebug.coerce(args[0]); if (typeof args[0] !== 'string') { // Anything else let's inspect with %O args.unshift('%O'); } // Apply any `formatters` transformations let index = 0; args[0] = args[0].replace(/%([a-zA-Z%])/g, (match, format) => { // If we encounter an escaped % then don't increase the array index if (match === '%%') { return '%'; } index++; const formatter = createDebug.formatters[format]; if (typeof formatter === 'function') { const val = args[index]; match = formatter.call(self, val); // Now we need to remove `args[index]` since it's inlined in the `format` args.splice(index, 1); index--; } return match; }); // Apply env-specific formatting (colors, etc.) createDebug.formatArgs.call(self, args); const logFn = self.log || createDebug.log; logFn.apply(self, args); } debug.namespace = namespace; debug.useColors = createDebug.useColors(); debug.color = createDebug.selectColor(namespace); debug.extend = extend; debug.destroy = createDebug.destroy; // XXX Temporary. Will be removed in the next major release. Object.defineProperty(debug, 'enabled', { enumerable: true, configurable: false, get: () => { if (enableOverride !== null) { return enableOverride; } if (namespacesCache !== createDebug.namespaces) { namespacesCache = createDebug.namespaces; enabledCache = createDebug.enabled(namespace); } return enabledCache; }, set: v => { enableOverride = v; } }); // Env-specific initialization logic for debug instances if (typeof createDebug.init === 'function') { createDebug.init(debug); } return debug; } function extend(namespace, delimiter) { const newDebug = createDebug(this.namespace + (typeof delimiter === 'undefined' ? ':' : delimiter) + namespace); newDebug.log = this.log; return newDebug; } /** * Enables a debug mode by namespaces. This can include modes * separated by a colon and wildcards. * * @param {String} namespaces * @api public */ function enable(namespaces) { createDebug.save(namespaces); createDebug.namespaces = namespaces; createDebug.names = []; createDebug.skips = []; const split = (typeof namespaces === 'string' ? namespaces : '') .trim() .replace(' ', ',') .split(',') .filter(Boolean); for (const ns of split) { if (ns[0] === '-') { createDebug.skips.push(ns.slice(1)); } else { createDebug.names.push(ns); } } } /** * Checks if the given string matches a namespace template, honoring * asterisks as wildcards. * * @param {String} search * @param {String} template * @return {Boolean} */ function matchesTemplate(search, template) { let searchIndex = 0; let templateIndex = 0; let starIndex = -1; let matchIndex = 0; while (searchIndex < search.length) { if (templateIndex < template.length && (template[templateIndex] === search[searchIndex] || template[templateIndex] === '*')) { // Match character or proceed with wildcard if (template[templateIndex] === '*') { starIndex = templateIndex; matchIndex = searchIndex; templateIndex++; // Skip the '*' } else { searchIndex++; templateIndex++; } } else if (starIndex !== -1) { // eslint-disable-line no-negated-condition // Backtrack to the last '*' and try to match more characters templateIndex = starIndex + 1; matchIndex++; searchIndex = matchIndex; } else { return false; // No match } } // Handle trailing '*' in template while (templateIndex < template.length && template[templateIndex] === '*') { templateIndex++; } return templateIndex === template.length; } /** * Disable debug output. * * @return {String} namespaces * @api public */ function disable() { const namespaces = [ ...createDebug.names, ...createDebug.skips.map(namespace => '-' + namespace) ].join(','); createDebug.enable(''); return namespaces; } /** * Returns true if the given mode name is enabled, false otherwise. * * @param {String} name * @return {Boolean} * @api public */ function enabled(name) { for (const skip of createDebug.skips) { if (matchesTemplate(name, skip)) { return false; } } for (const ns of createDebug.names) { if (matchesTemplate(name, ns)) { return true; } } return false; } /** * Coerce `val`. * * @param {Mixed} val * @return {Mixed} * @api private */ function coerce(val) { if (val instanceof Error) { return val.stack || val.message; } return val; } /** * XXX DO NOT USE. This is a temporary stub function. * XXX It WILL be removed in the next major release. */ function destroy() { console.warn('Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.'); } createDebug.enable(createDebug.load()); return createDebug; } module.exports = setup; /***/ }), /***/ 2830: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** * Detect Electron renderer / nwjs process, which is node, but we should * treat as a browser. */ if (typeof process === 'undefined' || process.type === 'renderer' || process.browser === true || process.__nwjs) { module.exports = __nccwpck_require__(6110); } else { module.exports = __nccwpck_require__(95108); } /***/ }), /***/ 95108: /***/ ((module, exports, __nccwpck_require__) => { /** * Module dependencies. */ const tty = __nccwpck_require__(52018); const util = __nccwpck_require__(39023); /** * This is the Node.js implementation of `debug()`. */ exports.init = init; exports.log = log; exports.formatArgs = formatArgs; exports.save = save; exports.load = load; exports.useColors = useColors; exports.destroy = util.deprecate( () => {}, 'Instance method `debug.destroy()` is deprecated and no longer does anything. It will be removed in the next major version of `debug`.' ); /** * Colors. */ exports.colors = [6, 2, 3, 4, 5, 1]; try { // Optional dependency (as in, doesn't need to be installed, NOT like optionalDependencies in package.json) // eslint-disable-next-line import/no-extraneous-dependencies const supportsColor = __nccwpck_require__(60075); if (supportsColor && (supportsColor.stderr || supportsColor).level >= 2) { exports.colors = [ 20, 21, 26, 27, 32, 33, 38, 39, 40, 41, 42, 43, 44, 45, 56, 57, 62, 63, 68, 69, 74, 75, 76, 77, 78, 79, 80, 81, 92, 93, 98, 99, 112, 113, 128, 129, 134, 135, 148, 149, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 178, 179, 184, 185, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 214, 215, 220, 221 ]; } } catch (error) { // Swallow - we only care if `supports-color` is available; it doesn't have to be. } /** * Build up the default `inspectOpts` object from the environment variables. * * $ DEBUG_COLORS=no DEBUG_DEPTH=10 DEBUG_SHOW_HIDDEN=enabled node script.js */ exports.inspectOpts = Object.keys(process.env).filter(key => { return /^debug_/i.test(key); }).reduce((obj, key) => { // Camel-case const prop = key .substring(6) .toLowerCase() .replace(/_([a-z])/g, (_, k) => { return k.toUpperCase(); }); // Coerce string value into JS value let val = process.env[key]; if (/^(yes|on|true|enabled)$/i.test(val)) { val = true; } else if (/^(no|off|false|disabled)$/i.test(val)) { val = false; } else if (val === 'null') { val = null; } else { val = Number(val); } obj[prop] = val; return obj; }, {}); /** * Is stdout a TTY? Colored output is enabled when `true`. */ function useColors() { return 'colors' in exports.inspectOpts ? Boolean(exports.inspectOpts.colors) : tty.isatty(process.stderr.fd); } /** * Adds ANSI color escape codes if enabled. * * @api public */ function formatArgs(args) { const {namespace: name, useColors} = this; if (useColors) { const c = this.color; const colorCode = '\u001B[3' + (c < 8 ? c : '8;5;' + c); const prefix = ` ${colorCode};1m${name} \u001B[0m`; args[0] = prefix + args[0].split('\n').join('\n' + prefix); args.push(colorCode + 'm+' + module.exports.humanize(this.diff) + '\u001B[0m'); } else { args[0] = getDate() + name + ' ' + args[0]; } } function getDate() { if (exports.inspectOpts.hideDate) { return ''; } return new Date().toISOString() + ' '; } /** * Invokes `util.formatWithOptions()` with the specified arguments and writes to stderr. */ function log(...args) { return process.stderr.write(util.formatWithOptions(exports.inspectOpts, ...args) + '\n'); } /** * Save `namespaces`. * * @param {String} namespaces * @api private */ function save(namespaces) { if (namespaces) { process.env.DEBUG = namespaces; } else { // If you set a process.env field to null or undefined, it gets cast to the // string 'null' or 'undefined'. Just delete instead. delete process.env.DEBUG; } } /** * Load `namespaces`. * * @return {String} returns the previously persisted debug modes * @api private */ function load() { return process.env.DEBUG; } /** * Init logic for `debug` instances. * * Create a new `inspectOpts` object in case `useColors` is set * differently for a particular `debug` instance. */ function init(debug) { debug.inspectOpts = {}; const keys = Object.keys(exports.inspectOpts); for (let i = 0; i < keys.length; i++) { debug.inspectOpts[keys[i]] = exports.inspectOpts[keys[i]]; } } module.exports = __nccwpck_require__(40897)(exports); const {formatters} = module.exports; /** * Map %o to `util.inspect()`, all on a single line. */ formatters.o = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts) .split('\n') .map(str => str.trim()) .join(' '); }; /** * Map %O to `util.inspect()`, allowing multiple lines if needed. */ formatters.O = function (v) { this.inspectOpts.colors = this.useColors; return util.inspect(v, this.inspectOpts); }; /***/ }), /***/ 72710: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Stream = (__nccwpck_require__(2203).Stream); var util = __nccwpck_require__(39023); module.exports = DelayedStream; function DelayedStream() { this.source = null; this.dataSize = 0; this.maxDataSize = 1024 * 1024; this.pauseStream = true; this._maxDataSizeExceeded = false; this._released = false; this._bufferedEvents = []; } util.inherits(DelayedStream, Stream); DelayedStream.create = function(source, options) { var delayedStream = new this(); options = options || {}; for (var option in options) { delayedStream[option] = options[option]; } delayedStream.source = source; var realEmit = source.emit; source.emit = function() { delayedStream._handleEmit(arguments); return realEmit.apply(source, arguments); }; source.on('error', function() {}); if (delayedStream.pauseStream) { source.pause(); } return delayedStream; }; Object.defineProperty(DelayedStream.prototype, 'readable', { configurable: true, enumerable: true, get: function() { return this.source.readable; } }); DelayedStream.prototype.setEncoding = function() { return this.source.setEncoding.apply(this.source, arguments); }; DelayedStream.prototype.resume = function() { if (!this._released) { this.release(); } this.source.resume(); }; DelayedStream.prototype.pause = function() { this.source.pause(); }; DelayedStream.prototype.release = function() { this._released = true; this._bufferedEvents.forEach(function(args) { this.emit.apply(this, args); }.bind(this)); this._bufferedEvents = []; }; DelayedStream.prototype.pipe = function() { var r = Stream.prototype.pipe.apply(this, arguments); this.resume(); return r; }; DelayedStream.prototype._handleEmit = function(args) { if (this._released) { this.emit.apply(this, args); return; } if (args[0] === 'data') { this.dataSize += args[1].length; this._checkIfMaxDataSizeExceeded(); } this._bufferedEvents.push(args); }; DelayedStream.prototype._checkIfMaxDataSizeExceeded = function() { if (this._maxDataSizeExceeded) { return; } if (this.dataSize <= this.maxDataSize) { return; } this._maxDataSizeExceeded = true; var message = 'DelayedStream#maxDataSize of ' + this.maxDataSize + ' bytes exceeded.' this.emit('error', new Error(message)); }; /***/ }), /***/ 14150: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); class Deprecation extends Error { constructor(message) { super(message); // Maintains proper stack trace (only available on V8) /* istanbul ignore next */ if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } this.name = 'Deprecation'; } } exports.Deprecation = Deprecation; /***/ }), /***/ 95129: /***/ ((module) => { "use strict"; function _process (v, mod) { var i var r if (typeof mod === 'function') { r = mod(v) if (r !== undefined) { v = r } } else if (Array.isArray(mod)) { for (i = 0; i < mod.length; i++) { r = mod[i](v) if (r !== undefined) { v = r } } } return v } function parseKey (key, val) { // detect negative index notation if (key[0] === '-' && Array.isArray(val) && /^-\d+$/.test(key)) { return val.length + parseInt(key, 10) } return key } function isIndex (k) { return /^\d+$/.test(k) } function isObject (val) { return Object.prototype.toString.call(val) === '[object Object]' } function isArrayOrObject (val) { return Object(val) === val } function isEmptyObject (val) { return Object.keys(val).length === 0 } var blacklist = ['__proto__', 'prototype', 'constructor'] var blacklistFilter = function (part) { return blacklist.indexOf(part) === -1 } function parsePath (path, sep) { if (path.indexOf('[') >= 0) { path = path.replace(/\[/g, sep).replace(/]/g, '') } var parts = path.split(sep) var check = parts.filter(blacklistFilter) if (check.length !== parts.length) { throw Error('Refusing to update blacklisted property ' + path) } return parts } var hasOwnProperty = Object.prototype.hasOwnProperty function DotObject (separator, override, useArray, useBrackets) { if (!(this instanceof DotObject)) { return new DotObject(separator, override, useArray, useBrackets) } if (typeof override === 'undefined') override = false if (typeof useArray === 'undefined') useArray = true if (typeof useBrackets === 'undefined') useBrackets = true this.separator = separator || '.' this.override = override this.useArray = useArray this.useBrackets = useBrackets this.keepArray = false // contains touched arrays this.cleanup = [] } var dotDefault = new DotObject('.', false, true, true) function wrap (method) { return function () { return dotDefault[method].apply(dotDefault, arguments) } } DotObject.prototype._fill = function (a, obj, v, mod) { var k = a.shift() if (a.length > 0) { obj[k] = obj[k] || (this.useArray && isIndex(a[0]) ? [] : {}) if (!isArrayOrObject(obj[k])) { if (this.override) { obj[k] = {} } else { if (!(isArrayOrObject(v) && isEmptyObject(v))) { throw new Error( 'Trying to redefine `' + k + '` which is a ' + typeof obj[k] ) } return } } this._fill(a, obj[k], v, mod) } else { if (!this.override && isArrayOrObject(obj[k]) && !isEmptyObject(obj[k])) { if (!(isArrayOrObject(v) && isEmptyObject(v))) { throw new Error("Trying to redefine non-empty obj['" + k + "']") } return } obj[k] = _process(v, mod) } } /** * * Converts an object with dotted-key/value pairs to it's expanded version * * Optionally transformed by a set of modifiers. * * Usage: * * var row = { * 'nr': 200, * 'doc.name': ' My Document ' * } * * var mods = { * 'doc.name': [_s.trim, _s.underscored] * } * * dot.object(row, mods) * * @param {Object} obj * @param {Object} mods */ DotObject.prototype.object = function (obj, mods) { var self = this Object.keys(obj).forEach(function (k) { var mod = mods === undefined ? null : mods[k] // normalize array notation. var ok = parsePath(k, self.separator).join(self.separator) if (ok.indexOf(self.separator) !== -1) { self._fill(ok.split(self.separator), obj, obj[k], mod) delete obj[k] } else { obj[k] = _process(obj[k], mod) } }) return obj } /** * @param {String} path dotted path * @param {String} v value to be set * @param {Object} obj object to be modified * @param {Function|Array} mod optional modifier */ DotObject.prototype.str = function (path, v, obj, mod) { var ok = parsePath(path, this.separator).join(this.separator) if (path.indexOf(this.separator) !== -1) { this._fill(ok.split(this.separator), obj, v, mod) } else { obj[path] = _process(v, mod) } return obj } /** * * Pick a value from an object using dot notation. * * Optionally remove the value * * @param {String} path * @param {Object} obj * @param {Boolean} remove */ DotObject.prototype.pick = function (path, obj, remove, reindexArray) { var i var keys var val var key var cp keys = parsePath(path, this.separator) for (i = 0; i < keys.length; i++) { key = parseKey(keys[i], obj) if (obj && typeof obj === 'object' && key in obj) { if (i === keys.length - 1) { if (remove) { val = obj[key] if (reindexArray && Array.isArray(obj)) { obj.splice(key, 1) } else { delete obj[key] } if (Array.isArray(obj)) { cp = keys.slice(0, -1).join('.') if (this.cleanup.indexOf(cp) === -1) { this.cleanup.push(cp) } } return val } else { return obj[key] } } else { obj = obj[key] } } else { return undefined } } if (remove && Array.isArray(obj)) { obj = obj.filter(function (n) { return n !== undefined }) } return obj } /** * * Delete value from an object using dot notation. * * @param {String} path * @param {Object} obj * @return {any} The removed value */ DotObject.prototype.delete = function (path, obj) { return this.remove(path, obj, true) } /** * * Remove value from an object using dot notation. * * Will remove multiple items if path is an array. * In this case array indexes will be retained until all * removals have been processed. * * Use dot.delete() to automatically re-index arrays. * * @param {String|Array} path * @param {Object} obj * @param {Boolean} reindexArray * @return {any} The removed value */ DotObject.prototype.remove = function (path, obj, reindexArray) { var i this.cleanup = [] if (Array.isArray(path)) { for (i = 0; i < path.length; i++) { this.pick(path[i], obj, true, reindexArray) } if (!reindexArray) { this._cleanup(obj) } return obj } else { return this.pick(path, obj, true, reindexArray) } } DotObject.prototype._cleanup = function (obj) { var ret var i var keys var root if (this.cleanup.length) { for (i = 0; i < this.cleanup.length; i++) { keys = this.cleanup[i].split('.') root = keys.splice(0, -1).join('.') ret = root ? this.pick(root, obj) : obj ret = ret[keys[0]].filter(function (v) { return v !== undefined }) this.set(this.cleanup[i], ret, obj) } this.cleanup = [] } } /** * Alias method for `dot.remove` * * Note: this is not an alias for dot.delete() * * @param {String|Array} path * @param {Object} obj * @param {Boolean} reindexArray * @return {any} The removed value */ DotObject.prototype.del = DotObject.prototype.remove /** * * Move a property from one place to the other. * * If the source path does not exist (undefined) * the target property will not be set. * * @param {String} source * @param {String} target * @param {Object} obj * @param {Function|Array} mods * @param {Boolean} merge */ DotObject.prototype.move = function (source, target, obj, mods, merge) { if (typeof mods === 'function' || Array.isArray(mods)) { this.set(target, _process(this.pick(source, obj, true), mods), obj, merge) } else { merge = mods this.set(target, this.pick(source, obj, true), obj, merge) } return obj } /** * * Transfer a property from one object to another object. * * If the source path does not exist (undefined) * the property on the other object will not be set. * * @param {String} source * @param {String} target * @param {Object} obj1 * @param {Object} obj2 * @param {Function|Array} mods * @param {Boolean} merge */ DotObject.prototype.transfer = function ( source, target, obj1, obj2, mods, merge ) { if (typeof mods === 'function' || Array.isArray(mods)) { this.set( target, _process(this.pick(source, obj1, true), mods), obj2, merge ) } else { merge = mods this.set(target, this.pick(source, obj1, true), obj2, merge) } return obj2 } /** * * Copy a property from one object to another object. * * If the source path does not exist (undefined) * the property on the other object will not be set. * * @param {String} source * @param {String} target * @param {Object} obj1 * @param {Object} obj2 * @param {Function|Array} mods * @param {Boolean} merge */ DotObject.prototype.copy = function (source, target, obj1, obj2, mods, merge) { if (typeof mods === 'function' || Array.isArray(mods)) { this.set( target, _process( // clone what is picked JSON.parse(JSON.stringify(this.pick(source, obj1, false))), mods ), obj2, merge ) } else { merge = mods this.set(target, this.pick(source, obj1, false), obj2, merge) } return obj2 } /** * * Set a property on an object using dot notation. * * @param {String} path * @param {any} val * @param {Object} obj * @param {Boolean} merge */ DotObject.prototype.set = function (path, val, obj, merge) { var i var k var keys var key // Do not operate if the value is undefined. if (typeof val === 'undefined') { return obj } keys = parsePath(path, this.separator) for (i = 0; i < keys.length; i++) { key = keys[i] if (i === keys.length - 1) { if (merge && isObject(val) && isObject(obj[key])) { for (k in val) { if (hasOwnProperty.call(val, k)) { obj[key][k] = val[k] } } } else if (merge && Array.isArray(obj[key]) && Array.isArray(val)) { for (var j = 0; j < val.length; j++) { obj[keys[i]].push(val[j]) } } else { obj[key] = val } } else if ( // force the value to be an object !hasOwnProperty.call(obj, key) || (!isObject(obj[key]) && !Array.isArray(obj[key])) ) { // initialize as array if next key is numeric if (/^\d+$/.test(keys[i + 1])) { obj[key] = [] } else { obj[key] = {} } } obj = obj[key] } return obj } /** * * Transform an object * * Usage: * * var obj = { * "id": 1, * "some": { * "thing": "else" * } * } * * var transform = { * "id": "nr", * "some.thing": "name" * } * * var tgt = dot.transform(transform, obj) * * @param {Object} recipe Transform recipe * @param {Object} obj Object to be transformed * @param {Array} mods modifiers for the target */ DotObject.prototype.transform = function (recipe, obj, tgt) { obj = obj || {} tgt = tgt || {} Object.keys(recipe).forEach( function (key) { this.set(recipe[key], this.pick(key, obj), tgt) }.bind(this) ) return tgt } /** * * Convert object to dotted-key/value pair * * Usage: * * var tgt = dot.dot(obj) * * or * * var tgt = {} * dot.dot(obj, tgt) * * @param {Object} obj source object * @param {Object} tgt target object * @param {Array} path path array (internal) */ DotObject.prototype.dot = function (obj, tgt, path) { tgt = tgt || {} path = path || [] var isArray = Array.isArray(obj) Object.keys(obj).forEach( function (key) { var index = isArray && this.useBrackets ? '[' + key + ']' : key if ( isArrayOrObject(obj[key]) && ((isObject(obj[key]) && !isEmptyObject(obj[key])) || (Array.isArray(obj[key]) && !this.keepArray && obj[key].length !== 0)) ) { if (isArray && this.useBrackets) { var previousKey = path[path.length - 1] || '' return this.dot( obj[key], tgt, path.slice(0, -1).concat(previousKey + index) ) } else { return this.dot(obj[key], tgt, path.concat(index)) } } else { if (isArray && this.useBrackets) { tgt[path.join(this.separator).concat('[' + key + ']')] = obj[key] } else { tgt[path.concat(index).join(this.separator)] = obj[key] } } }.bind(this) ) return tgt } DotObject.pick = wrap('pick') DotObject.move = wrap('move') DotObject.transfer = wrap('transfer') DotObject.transform = wrap('transform') DotObject.copy = wrap('copy') DotObject.object = wrap('object') DotObject.str = wrap('str') DotObject.set = wrap('set') DotObject.delete = wrap('delete') DotObject.del = DotObject.remove = wrap('remove') DotObject.dot = wrap('dot'); ['override', 'overwrite'].forEach(function (prop) { Object.defineProperty(DotObject, prop, { get: function () { return dotDefault.override }, set: function (val) { dotDefault.override = !!val } }) }); ['useArray', 'keepArray', 'useBrackets'].forEach(function (prop) { Object.defineProperty(DotObject, prop, { get: function () { return dotDefault[prop] }, set: function (val) { dotDefault[prop] = val } }) }) DotObject._process = _process module.exports = DotObject /***/ }), /***/ 26669: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var callBind = __nccwpck_require__(88705); var gOPD = __nccwpck_require__(33170); var hasProtoAccessor; try { // eslint-disable-next-line no-extra-parens, no-proto hasProtoAccessor = /** @type {{ __proto__?: typeof Array.prototype }} */ ([]).__proto__ === Array.prototype; } catch (e) { if (!e || typeof e !== 'object' || !('code' in e) || e.code !== 'ERR_PROTO_ACCESS') { throw e; } } // eslint-disable-next-line no-extra-parens var desc = !!hasProtoAccessor && gOPD && gOPD(Object.prototype, /** @type {keyof typeof Object.prototype} */ ('__proto__')); var $Object = Object; var $getPrototypeOf = $Object.getPrototypeOf; /** @type {import('./get')} */ module.exports = desc && typeof desc.get === 'function' ? callBind([desc.get]) : typeof $getPrototypeOf === 'function' ? /** @type {import('./get')} */ function getDunder(value) { // eslint-disable-next-line eqeqeq return $getPrototypeOf(value == null ? value : $Object(value)); } : false; /***/ }), /***/ 29112: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var stream = __nccwpck_require__(86131) var eos = __nccwpck_require__(31424) var inherits = __nccwpck_require__(39598) var shift = __nccwpck_require__(34633) var SIGNAL_FLUSH = (Buffer.from && Buffer.from !== Uint8Array.from) ? Buffer.from([0]) : new Buffer([0]) var onuncork = function(self, fn) { if (self._corked) self.once('uncork', fn) else fn() } var autoDestroy = function (self, err) { if (self._autoDestroy) self.destroy(err) } var destroyer = function(self, end) { return function(err) { if (err) autoDestroy(self, err.message === 'premature close' ? null : err) else if (end && !self._ended) self.end() } } var end = function(ws, fn) { if (!ws) return fn() if (ws._writableState && ws._writableState.finished) return fn() if (ws._writableState) return ws.end(fn) ws.end() fn() } var noop = function() {} var toStreams2 = function(rs) { return new (stream.Readable)({objectMode:true, highWaterMark:16}).wrap(rs) } var Duplexify = function(writable, readable, opts) { if (!(this instanceof Duplexify)) return new Duplexify(writable, readable, opts) stream.Duplex.call(this, opts) this._writable = null this._readable = null this._readable2 = null this._autoDestroy = !opts || opts.autoDestroy !== false this._forwardDestroy = !opts || opts.destroy !== false this._forwardEnd = !opts || opts.end !== false this._corked = 1 // start corked this._ondrain = null this._drained = false this._forwarding = false this._unwrite = null this._unread = null this._ended = false this.destroyed = false if (writable) this.setWritable(writable) if (readable) this.setReadable(readable) } inherits(Duplexify, stream.Duplex) Duplexify.obj = function(writable, readable, opts) { if (!opts) opts = {} opts.objectMode = true opts.highWaterMark = 16 return new Duplexify(writable, readable, opts) } Duplexify.prototype.cork = function() { if (++this._corked === 1) this.emit('cork') } Duplexify.prototype.uncork = function() { if (this._corked && --this._corked === 0) this.emit('uncork') } Duplexify.prototype.setWritable = function(writable) { if (this._unwrite) this._unwrite() if (this.destroyed) { if (writable && writable.destroy) writable.destroy() return } if (writable === null || writable === false) { this.end() return } var self = this var unend = eos(writable, {writable:true, readable:false}, destroyer(this, this._forwardEnd)) var ondrain = function() { var ondrain = self._ondrain self._ondrain = null if (ondrain) ondrain() } var clear = function() { self._writable.removeListener('drain', ondrain) unend() } if (this._unwrite) process.nextTick(ondrain) // force a drain on stream reset to avoid livelocks this._writable = writable this._writable.on('drain', ondrain) this._unwrite = clear this.uncork() // always uncork setWritable } Duplexify.prototype.setReadable = function(readable) { if (this._unread) this._unread() if (this.destroyed) { if (readable && readable.destroy) readable.destroy() return } if (readable === null || readable === false) { this.push(null) this.resume() return } var self = this var unend = eos(readable, {writable:false, readable:true}, destroyer(this)) var onreadable = function() { self._forward() } var onend = function() { self.push(null) } var clear = function() { self._readable2.removeListener('readable', onreadable) self._readable2.removeListener('end', onend) unend() } this._drained = true this._readable = readable this._readable2 = readable._readableState ? readable : toStreams2(readable) this._readable2.on('readable', onreadable) this._readable2.on('end', onend) this._unread = clear this._forward() } Duplexify.prototype._read = function() { this._drained = true this._forward() } Duplexify.prototype._forward = function() { if (this._forwarding || !this._readable2 || !this._drained) return this._forwarding = true var data while (this._drained && (data = shift(this._readable2)) !== null) { if (this.destroyed) continue this._drained = this.push(data) } this._forwarding = false } Duplexify.prototype.destroy = function(err, cb) { if (!cb) cb = noop if (this.destroyed) return cb(null) this.destroyed = true var self = this process.nextTick(function() { self._destroy(err) cb(null) }) } Duplexify.prototype._destroy = function(err) { if (err) { var ondrain = this._ondrain this._ondrain = null if (ondrain) ondrain(err) else this.emit('error', err) } if (this._forwardDestroy) { if (this._readable && this._readable.destroy) this._readable.destroy() if (this._writable && this._writable.destroy) this._writable.destroy() } this.emit('close') } Duplexify.prototype._write = function(data, enc, cb) { if (this.destroyed) return if (this._corked) return onuncork(this, this._write.bind(this, data, enc, cb)) if (data === SIGNAL_FLUSH) return this._finish(cb) if (!this._writable) return cb() if (this._writable.write(data) === false) this._ondrain = cb else if (!this.destroyed) cb() } Duplexify.prototype._finish = function(cb) { var self = this this.emit('preend') onuncork(this, function() { end(self._forwardEnd && self._writable, function() { // haxx to not emit prefinish twice if (self._writableState.prefinished === false) self._writableState.prefinished = true self.emit('prefinish') onuncork(self, cb) }) }) } Duplexify.prototype.end = function(data, enc, cb) { if (typeof data === 'function') return this.end(null, null, data) if (typeof enc === 'function') return this.end(data, null, enc) this._ended = true if (data) this.write(data) if (!this._writableState.ending && !this._writableState.destroyed) this.write(SIGNAL_FLUSH) return stream.Writable.prototype.end.call(this, cb) } module.exports = Duplexify /***/ }), /***/ 325: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var Buffer = (__nccwpck_require__(93058).Buffer); var getParamBytesForAlg = __nccwpck_require__(5028); var MAX_OCTET = 0x80, CLASS_UNIVERSAL = 0, PRIMITIVE_BIT = 0x20, TAG_SEQ = 0x10, TAG_INT = 0x02, ENCODED_TAG_SEQ = (TAG_SEQ | PRIMITIVE_BIT) | (CLASS_UNIVERSAL << 6), ENCODED_TAG_INT = TAG_INT | (CLASS_UNIVERSAL << 6); function base64Url(base64) { return base64 .replace(/=/g, '') .replace(/\+/g, '-') .replace(/\//g, '_'); } function signatureAsBuffer(signature) { if (Buffer.isBuffer(signature)) { return signature; } else if ('string' === typeof signature) { return Buffer.from(signature, 'base64'); } throw new TypeError('ECDSA signature must be a Base64 string or a Buffer'); } function derToJose(signature, alg) { signature = signatureAsBuffer(signature); var paramBytes = getParamBytesForAlg(alg); // the DER encoded param should at most be the param size, plus a padding // zero, since due to being a signed integer var maxEncodedParamLength = paramBytes + 1; var inputLength = signature.length; var offset = 0; if (signature[offset++] !== ENCODED_TAG_SEQ) { throw new Error('Could not find expected "seq"'); } var seqLength = signature[offset++]; if (seqLength === (MAX_OCTET | 1)) { seqLength = signature[offset++]; } if (inputLength - offset < seqLength) { throw new Error('"seq" specified length of "' + seqLength + '", only "' + (inputLength - offset) + '" remaining'); } if (signature[offset++] !== ENCODED_TAG_INT) { throw new Error('Could not find expected "int" for "r"'); } var rLength = signature[offset++]; if (inputLength - offset - 2 < rLength) { throw new Error('"r" specified length of "' + rLength + '", only "' + (inputLength - offset - 2) + '" available'); } if (maxEncodedParamLength < rLength) { throw new Error('"r" specified length of "' + rLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); } var rOffset = offset; offset += rLength; if (signature[offset++] !== ENCODED_TAG_INT) { throw new Error('Could not find expected "int" for "s"'); } var sLength = signature[offset++]; if (inputLength - offset !== sLength) { throw new Error('"s" specified length of "' + sLength + '", expected "' + (inputLength - offset) + '"'); } if (maxEncodedParamLength < sLength) { throw new Error('"s" specified length of "' + sLength + '", max of "' + maxEncodedParamLength + '" is acceptable'); } var sOffset = offset; offset += sLength; if (offset !== inputLength) { throw new Error('Expected to consume entire buffer, but "' + (inputLength - offset) + '" bytes remain'); } var rPadding = paramBytes - rLength, sPadding = paramBytes - sLength; var dst = Buffer.allocUnsafe(rPadding + rLength + sPadding + sLength); for (offset = 0; offset < rPadding; ++offset) { dst[offset] = 0; } signature.copy(dst, offset, rOffset + Math.max(-rPadding, 0), rOffset + rLength); offset = paramBytes; for (var o = offset; offset < o + sPadding; ++offset) { dst[offset] = 0; } signature.copy(dst, offset, sOffset + Math.max(-sPadding, 0), sOffset + sLength); dst = dst.toString('base64'); dst = base64Url(dst); return dst; } function countPadding(buf, start, stop) { var padding = 0; while (start + padding < stop && buf[start + padding] === 0) { ++padding; } var needsSign = buf[start + padding] >= MAX_OCTET; if (needsSign) { --padding; } return padding; } function joseToDer(signature, alg) { signature = signatureAsBuffer(signature); var paramBytes = getParamBytesForAlg(alg); var signatureBytes = signature.length; if (signatureBytes !== paramBytes * 2) { throw new TypeError('"' + alg + '" signatures must be "' + paramBytes * 2 + '" bytes, saw "' + signatureBytes + '"'); } var rPadding = countPadding(signature, 0, paramBytes); var sPadding = countPadding(signature, paramBytes, signature.length); var rLength = paramBytes - rPadding; var sLength = paramBytes - sPadding; var rsBytes = 1 + 1 + rLength + 1 + 1 + sLength; var shortLength = rsBytes < MAX_OCTET; var dst = Buffer.allocUnsafe((shortLength ? 2 : 3) + rsBytes); var offset = 0; dst[offset++] = ENCODED_TAG_SEQ; if (shortLength) { // Bit 8 has value "0" // bits 7-1 give the length. dst[offset++] = rsBytes; } else { // Bit 8 of first octet has value "1" // bits 7-1 give the number of additional length octets. dst[offset++] = MAX_OCTET | 1; // length, base 256 dst[offset++] = rsBytes & 0xff; } dst[offset++] = ENCODED_TAG_INT; dst[offset++] = rLength; if (rPadding < 0) { dst[offset++] = 0; offset += signature.copy(dst, offset, 0, paramBytes); } else { offset += signature.copy(dst, offset, rPadding, paramBytes); } dst[offset++] = ENCODED_TAG_INT; dst[offset++] = sLength; if (sPadding < 0) { dst[offset++] = 0; signature.copy(dst, offset, paramBytes); } else { signature.copy(dst, offset, paramBytes + sPadding); } return dst; } module.exports = { derToJose: derToJose, joseToDer: joseToDer }; /***/ }), /***/ 5028: /***/ ((module) => { "use strict"; function getParamSize(keySize) { var result = ((keySize / 8) | 0) + (keySize % 8 === 0 ? 0 : 1); return result; } var paramBytesForAlg = { ES256: getParamSize(256), ES384: getParamSize(384), ES512: getParamSize(521) }; function getParamBytesForAlg(alg) { var paramBytes = paramBytesForAlg[alg]; if (paramBytes) { return paramBytes; } throw new Error('Unknown algorithm "' + alg + '"'); } module.exports = getParamBytesForAlg; /***/ }), /***/ 31424: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var once = __nccwpck_require__(55560); var noop = function() {}; var qnt = global.Bare ? queueMicrotask : process.nextTick.bind(process); var isRequest = function(stream) { return stream.setHeader && typeof stream.abort === 'function'; }; var isChildProcess = function(stream) { return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 }; var eos = function(stream, opts, callback) { if (typeof opts === 'function') return eos(stream, null, opts); if (!opts) opts = {}; callback = once(callback || noop); var ws = stream._writableState; var rs = stream._readableState; var readable = opts.readable || (opts.readable !== false && stream.readable); var writable = opts.writable || (opts.writable !== false && stream.writable); var cancelled = false; var onlegacyfinish = function() { if (!stream.writable) onfinish(); }; var onfinish = function() { writable = false; if (!readable) callback.call(stream); }; var onend = function() { readable = false; if (!writable) callback.call(stream); }; var onexit = function(exitCode) { callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); }; var onerror = function(err) { callback.call(stream, err); }; var onclose = function() { qnt(onclosenexttick); }; var onclosenexttick = function() { if (cancelled) return; if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); }; var onrequest = function() { stream.req.on('finish', onfinish); }; if (isRequest(stream)) { stream.on('complete', onfinish); stream.on('abort', onclose); if (stream.req) onrequest(); else stream.on('request', onrequest); } else if (writable && !ws) { // legacy streams stream.on('end', onlegacyfinish); stream.on('close', onlegacyfinish); } if (isChildProcess(stream)) stream.on('exit', onexit); stream.on('end', onend); stream.on('finish', onfinish); if (opts.error !== false) stream.on('error', onerror); stream.on('close', onclose); return function() { cancelled = true; stream.removeListener('complete', onfinish); stream.removeListener('abort', onclose); stream.removeListener('request', onrequest); if (stream.req) stream.req.removeListener('finish', onfinish); stream.removeListener('end', onlegacyfinish); stream.removeListener('close', onlegacyfinish); stream.removeListener('finish', onfinish); stream.removeListener('exit', onexit); stream.removeListener('end', onend); stream.removeListener('error', onerror); stream.removeListener('close', onclose); }; }; module.exports = eos; /***/ }), /***/ 79094: /***/ ((module) => { "use strict"; /** @type {import('.')} */ var $defineProperty = Object.defineProperty || false; if ($defineProperty) { try { $defineProperty({}, 'a', { value: 1 }); } catch (e) { // IE 8 has a broken defineProperty $defineProperty = false; } } module.exports = $defineProperty; /***/ }), /***/ 33056: /***/ ((module) => { "use strict"; /** @type {import('./eval')} */ module.exports = EvalError; /***/ }), /***/ 31620: /***/ ((module) => { "use strict"; /** @type {import('.')} */ module.exports = Error; /***/ }), /***/ 14585: /***/ ((module) => { "use strict"; /** @type {import('./range')} */ module.exports = RangeError; /***/ }), /***/ 46905: /***/ ((module) => { "use strict"; /** @type {import('./ref')} */ module.exports = ReferenceError; /***/ }), /***/ 80105: /***/ ((module) => { "use strict"; /** @type {import('./syntax')} */ module.exports = SyntaxError; /***/ }), /***/ 73314: /***/ ((module) => { "use strict"; /** @type {import('./type')} */ module.exports = TypeError; /***/ }), /***/ 32578: /***/ ((module) => { "use strict"; /** @type {import('./uri')} */ module.exports = URIError; /***/ }), /***/ 95399: /***/ ((module) => { "use strict"; /** @type {import('.')} */ module.exports = Object; /***/ }), /***/ 88700: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var GetIntrinsic = __nccwpck_require__(60470); var $defineProperty = GetIntrinsic('%Object.defineProperty%', true); var hasToStringTag = __nccwpck_require__(85479)(); var hasOwn = __nccwpck_require__(54076); var $TypeError = __nccwpck_require__(73314); var toStringTag = hasToStringTag ? Symbol.toStringTag : null; /** @type {import('.')} */ module.exports = function setToStringTag(object, value) { var overrideIfSet = arguments.length > 2 && !!arguments[2] && arguments[2].force; var nonConfigurable = arguments.length > 2 && !!arguments[2] && arguments[2].nonConfigurable; if ( (typeof overrideIfSet !== 'undefined' && typeof overrideIfSet !== 'boolean') || (typeof nonConfigurable !== 'undefined' && typeof nonConfigurable !== 'boolean') ) { throw new $TypeError('if provided, the `overrideIfSet` and `nonConfigurable` options must be booleans'); } if (toStringTag && (overrideIfSet || !hasOwn(object, toStringTag))) { if ($defineProperty) { $defineProperty(object, toStringTag, { configurable: !nonConfigurable, enumerable: false, value: value, writable: false }); } else { object[toStringTag] = value; // eslint-disable-line no-param-reassign } } }; /***/ }), /***/ 16577: /***/ ((module, exports) => { "use strict"; /** * @author Toru Nagashima * @copyright 2015 Toru Nagashima. All rights reserved. * See LICENSE file in root directory for full license. */ Object.defineProperty(exports, "__esModule", ({ value: true })); /** * @typedef {object} PrivateData * @property {EventTarget} eventTarget The event target. * @property {{type:string}} event The original event object. * @property {number} eventPhase The current event phase. * @property {EventTarget|null} currentTarget The current event target. * @property {boolean} canceled The flag to prevent default. * @property {boolean} stopped The flag to stop propagation. * @property {boolean} immediateStopped The flag to stop propagation immediately. * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null. * @property {number} timeStamp The unix time. * @private */ /** * Private data for event wrappers. * @type {WeakMap} * @private */ const privateData = new WeakMap(); /** * Cache for wrapper classes. * @type {WeakMap} * @private */ const wrappers = new WeakMap(); /** * Get private data. * @param {Event} event The event object to get private data. * @returns {PrivateData} The private data of the event. * @private */ function pd(event) { const retv = privateData.get(event); console.assert( retv != null, "'this' is expected an Event object, but got", event ); return retv } /** * https://dom.spec.whatwg.org/#set-the-canceled-flag * @param data {PrivateData} private data. */ function setCancelFlag(data) { if (data.passiveListener != null) { if ( typeof console !== "undefined" && typeof console.error === "function" ) { console.error( "Unable to preventDefault inside passive event listener invocation.", data.passiveListener ); } return } if (!data.event.cancelable) { return } data.canceled = true; if (typeof data.event.preventDefault === "function") { data.event.preventDefault(); } } /** * @see https://dom.spec.whatwg.org/#interface-event * @private */ /** * The event wrapper. * @constructor * @param {EventTarget} eventTarget The event target of this dispatching. * @param {Event|{type:string}} event The original event to wrap. */ function Event(eventTarget, event) { privateData.set(this, { eventTarget, event, eventPhase: 2, currentTarget: eventTarget, canceled: false, stopped: false, immediateStopped: false, passiveListener: null, timeStamp: event.timeStamp || Date.now(), }); // https://heycam.github.io/webidl/#Unforgeable Object.defineProperty(this, "isTrusted", { value: false, enumerable: true }); // Define accessors const keys = Object.keys(event); for (let i = 0; i < keys.length; ++i) { const key = keys[i]; if (!(key in this)) { Object.defineProperty(this, key, defineRedirectDescriptor(key)); } } } // Should be enumerable, but class methods are not enumerable. Event.prototype = { /** * The type of this event. * @type {string} */ get type() { return pd(this).event.type }, /** * The target of this event. * @type {EventTarget} */ get target() { return pd(this).eventTarget }, /** * The target of this event. * @type {EventTarget} */ get currentTarget() { return pd(this).currentTarget }, /** * @returns {EventTarget[]} The composed path of this event. */ composedPath() { const currentTarget = pd(this).currentTarget; if (currentTarget == null) { return [] } return [currentTarget] }, /** * Constant of NONE. * @type {number} */ get NONE() { return 0 }, /** * Constant of CAPTURING_PHASE. * @type {number} */ get CAPTURING_PHASE() { return 1 }, /** * Constant of AT_TARGET. * @type {number} */ get AT_TARGET() { return 2 }, /** * Constant of BUBBLING_PHASE. * @type {number} */ get BUBBLING_PHASE() { return 3 }, /** * The target of this event. * @type {number} */ get eventPhase() { return pd(this).eventPhase }, /** * Stop event bubbling. * @returns {void} */ stopPropagation() { const data = pd(this); data.stopped = true; if (typeof data.event.stopPropagation === "function") { data.event.stopPropagation(); } }, /** * Stop event bubbling. * @returns {void} */ stopImmediatePropagation() { const data = pd(this); data.stopped = true; data.immediateStopped = true; if (typeof data.event.stopImmediatePropagation === "function") { data.event.stopImmediatePropagation(); } }, /** * The flag to be bubbling. * @type {boolean} */ get bubbles() { return Boolean(pd(this).event.bubbles) }, /** * The flag to be cancelable. * @type {boolean} */ get cancelable() { return Boolean(pd(this).event.cancelable) }, /** * Cancel this event. * @returns {void} */ preventDefault() { setCancelFlag(pd(this)); }, /** * The flag to indicate cancellation state. * @type {boolean} */ get defaultPrevented() { return pd(this).canceled }, /** * The flag to be composed. * @type {boolean} */ get composed() { return Boolean(pd(this).event.composed) }, /** * The unix time of this event. * @type {number} */ get timeStamp() { return pd(this).timeStamp }, /** * The target of this event. * @type {EventTarget} * @deprecated */ get srcElement() { return pd(this).eventTarget }, /** * The flag to stop event bubbling. * @type {boolean} * @deprecated */ get cancelBubble() { return pd(this).stopped }, set cancelBubble(value) { if (!value) { return } const data = pd(this); data.stopped = true; if (typeof data.event.cancelBubble === "boolean") { data.event.cancelBubble = true; } }, /** * The flag to indicate cancellation state. * @type {boolean} * @deprecated */ get returnValue() { return !pd(this).canceled }, set returnValue(value) { if (!value) { setCancelFlag(pd(this)); } }, /** * Initialize this event object. But do nothing under event dispatching. * @param {string} type The event type. * @param {boolean} [bubbles=false] The flag to be possible to bubble up. * @param {boolean} [cancelable=false] The flag to be possible to cancel. * @deprecated */ initEvent() { // Do nothing. }, }; // `constructor` is not enumerable. Object.defineProperty(Event.prototype, "constructor", { value: Event, configurable: true, writable: true, }); // Ensure `event instanceof window.Event` is `true`. if (typeof window !== "undefined" && typeof window.Event !== "undefined") { Object.setPrototypeOf(Event.prototype, window.Event.prototype); // Make association for wrappers. wrappers.set(window.Event.prototype, Event); } /** * Get the property descriptor to redirect a given property. * @param {string} key Property name to define property descriptor. * @returns {PropertyDescriptor} The property descriptor to redirect the property. * @private */ function defineRedirectDescriptor(key) { return { get() { return pd(this).event[key] }, set(value) { pd(this).event[key] = value; }, configurable: true, enumerable: true, } } /** * Get the property descriptor to call a given method property. * @param {string} key Property name to define property descriptor. * @returns {PropertyDescriptor} The property descriptor to call the method property. * @private */ function defineCallDescriptor(key) { return { value() { const event = pd(this).event; return event[key].apply(event, arguments) }, configurable: true, enumerable: true, } } /** * Define new wrapper class. * @param {Function} BaseEvent The base wrapper class. * @param {Object} proto The prototype of the original event. * @returns {Function} The defined wrapper class. * @private */ function defineWrapper(BaseEvent, proto) { const keys = Object.keys(proto); if (keys.length === 0) { return BaseEvent } /** CustomEvent */ function CustomEvent(eventTarget, event) { BaseEvent.call(this, eventTarget, event); } CustomEvent.prototype = Object.create(BaseEvent.prototype, { constructor: { value: CustomEvent, configurable: true, writable: true }, }); // Define accessors. for (let i = 0; i < keys.length; ++i) { const key = keys[i]; if (!(key in BaseEvent.prototype)) { const descriptor = Object.getOwnPropertyDescriptor(proto, key); const isFunc = typeof descriptor.value === "function"; Object.defineProperty( CustomEvent.prototype, key, isFunc ? defineCallDescriptor(key) : defineRedirectDescriptor(key) ); } } return CustomEvent } /** * Get the wrapper class of a given prototype. * @param {Object} proto The prototype of the original event to get its wrapper. * @returns {Function} The wrapper class. * @private */ function getWrapper(proto) { if (proto == null || proto === Object.prototype) { return Event } let wrapper = wrappers.get(proto); if (wrapper == null) { wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto); wrappers.set(proto, wrapper); } return wrapper } /** * Wrap a given event to management a dispatching. * @param {EventTarget} eventTarget The event target of this dispatching. * @param {Object} event The event to wrap. * @returns {Event} The wrapper instance. * @private */ function wrapEvent(eventTarget, event) { const Wrapper = getWrapper(Object.getPrototypeOf(event)); return new Wrapper(eventTarget, event) } /** * Get the immediateStopped flag of a given event. * @param {Event} event The event to get. * @returns {boolean} The flag to stop propagation immediately. * @private */ function isStopped(event) { return pd(event).immediateStopped } /** * Set the current event phase of a given event. * @param {Event} event The event to set current target. * @param {number} eventPhase New event phase. * @returns {void} * @private */ function setEventPhase(event, eventPhase) { pd(event).eventPhase = eventPhase; } /** * Set the current target of a given event. * @param {Event} event The event to set current target. * @param {EventTarget|null} currentTarget New current target. * @returns {void} * @private */ function setCurrentTarget(event, currentTarget) { pd(event).currentTarget = currentTarget; } /** * Set a passive listener of a given event. * @param {Event} event The event to set current target. * @param {Function|null} passiveListener New passive listener. * @returns {void} * @private */ function setPassiveListener(event, passiveListener) { pd(event).passiveListener = passiveListener; } /** * @typedef {object} ListenerNode * @property {Function} listener * @property {1|2|3} listenerType * @property {boolean} passive * @property {boolean} once * @property {ListenerNode|null} next * @private */ /** * @type {WeakMap>} * @private */ const listenersMap = new WeakMap(); // Listener types const CAPTURE = 1; const BUBBLE = 2; const ATTRIBUTE = 3; /** * Check whether a given value is an object or not. * @param {any} x The value to check. * @returns {boolean} `true` if the value is an object. */ function isObject(x) { return x !== null && typeof x === "object" //eslint-disable-line no-restricted-syntax } /** * Get listeners. * @param {EventTarget} eventTarget The event target to get. * @returns {Map} The listeners. * @private */ function getListeners(eventTarget) { const listeners = listenersMap.get(eventTarget); if (listeners == null) { throw new TypeError( "'this' is expected an EventTarget object, but got another value." ) } return listeners } /** * Get the property descriptor for the event attribute of a given event. * @param {string} eventName The event name to get property descriptor. * @returns {PropertyDescriptor} The property descriptor. * @private */ function defineEventAttributeDescriptor(eventName) { return { get() { const listeners = getListeners(this); let node = listeners.get(eventName); while (node != null) { if (node.listenerType === ATTRIBUTE) { return node.listener } node = node.next; } return null }, set(listener) { if (typeof listener !== "function" && !isObject(listener)) { listener = null; // eslint-disable-line no-param-reassign } const listeners = getListeners(this); // Traverse to the tail while removing old value. let prev = null; let node = listeners.get(eventName); while (node != null) { if (node.listenerType === ATTRIBUTE) { // Remove old value. if (prev !== null) { prev.next = node.next; } else if (node.next !== null) { listeners.set(eventName, node.next); } else { listeners.delete(eventName); } } else { prev = node; } node = node.next; } // Add new value. if (listener !== null) { const newNode = { listener, listenerType: ATTRIBUTE, passive: false, once: false, next: null, }; if (prev === null) { listeners.set(eventName, newNode); } else { prev.next = newNode; } } }, configurable: true, enumerable: true, } } /** * Define an event attribute (e.g. `eventTarget.onclick`). * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite. * @param {string} eventName The event name to define. * @returns {void} */ function defineEventAttribute(eventTargetPrototype, eventName) { Object.defineProperty( eventTargetPrototype, `on${eventName}`, defineEventAttributeDescriptor(eventName) ); } /** * Define a custom EventTarget with event attributes. * @param {string[]} eventNames Event names for event attributes. * @returns {EventTarget} The custom EventTarget. * @private */ function defineCustomEventTarget(eventNames) { /** CustomEventTarget */ function CustomEventTarget() { EventTarget.call(this); } CustomEventTarget.prototype = Object.create(EventTarget.prototype, { constructor: { value: CustomEventTarget, configurable: true, writable: true, }, }); for (let i = 0; i < eventNames.length; ++i) { defineEventAttribute(CustomEventTarget.prototype, eventNames[i]); } return CustomEventTarget } /** * EventTarget. * * - This is constructor if no arguments. * - This is a function which returns a CustomEventTarget constructor if there are arguments. * * For example: * * class A extends EventTarget {} * class B extends EventTarget("message") {} * class C extends EventTarget("message", "error") {} * class D extends EventTarget(["message", "error"]) {} */ function EventTarget() { /*eslint-disable consistent-return */ if (this instanceof EventTarget) { listenersMap.set(this, new Map()); return } if (arguments.length === 1 && Array.isArray(arguments[0])) { return defineCustomEventTarget(arguments[0]) } if (arguments.length > 0) { const types = new Array(arguments.length); for (let i = 0; i < arguments.length; ++i) { types[i] = arguments[i]; } return defineCustomEventTarget(types) } throw new TypeError("Cannot call a class as a function") /*eslint-enable consistent-return */ } // Should be enumerable, but class methods are not enumerable. EventTarget.prototype = { /** * Add a given listener to this event target. * @param {string} eventName The event name to add. * @param {Function} listener The listener to add. * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. * @returns {void} */ addEventListener(eventName, listener, options) { if (listener == null) { return } if (typeof listener !== "function" && !isObject(listener)) { throw new TypeError("'listener' should be a function or an object.") } const listeners = getListeners(this); const optionsIsObj = isObject(options); const capture = optionsIsObj ? Boolean(options.capture) : Boolean(options); const listenerType = capture ? CAPTURE : BUBBLE; const newNode = { listener, listenerType, passive: optionsIsObj && Boolean(options.passive), once: optionsIsObj && Boolean(options.once), next: null, }; // Set it as the first node if the first node is null. let node = listeners.get(eventName); if (node === undefined) { listeners.set(eventName, newNode); return } // Traverse to the tail while checking duplication.. let prev = null; while (node != null) { if ( node.listener === listener && node.listenerType === listenerType ) { // Should ignore duplication. return } prev = node; node = node.next; } // Add it. prev.next = newNode; }, /** * Remove a given listener from this event target. * @param {string} eventName The event name to remove. * @param {Function} listener The listener to remove. * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener. * @returns {void} */ removeEventListener(eventName, listener, options) { if (listener == null) { return } const listeners = getListeners(this); const capture = isObject(options) ? Boolean(options.capture) : Boolean(options); const listenerType = capture ? CAPTURE : BUBBLE; let prev = null; let node = listeners.get(eventName); while (node != null) { if ( node.listener === listener && node.listenerType === listenerType ) { if (prev !== null) { prev.next = node.next; } else if (node.next !== null) { listeners.set(eventName, node.next); } else { listeners.delete(eventName); } return } prev = node; node = node.next; } }, /** * Dispatch a given event. * @param {Event|{type:string}} event The event to dispatch. * @returns {boolean} `false` if canceled. */ dispatchEvent(event) { if (event == null || typeof event.type !== "string") { throw new TypeError('"event.type" should be a string.') } // If listeners aren't registered, terminate. const listeners = getListeners(this); const eventName = event.type; let node = listeners.get(eventName); if (node == null) { return true } // Since we cannot rewrite several properties, so wrap object. const wrappedEvent = wrapEvent(this, event); // This doesn't process capturing phase and bubbling phase. // This isn't participating in a tree. let prev = null; while (node != null) { // Remove this listener if it's once if (node.once) { if (prev !== null) { prev.next = node.next; } else if (node.next !== null) { listeners.set(eventName, node.next); } else { listeners.delete(eventName); } } else { prev = node; } // Call this listener setPassiveListener( wrappedEvent, node.passive ? node.listener : null ); if (typeof node.listener === "function") { try { node.listener.call(this, wrappedEvent); } catch (err) { if ( typeof console !== "undefined" && typeof console.error === "function" ) { console.error(err); } } } else if ( node.listenerType !== ATTRIBUTE && typeof node.listener.handleEvent === "function" ) { node.listener.handleEvent(wrappedEvent); } // Break if `event.stopImmediatePropagation` was called. if (isStopped(wrappedEvent)) { break } node = node.next; } setPassiveListener(wrappedEvent, null); setEventPhase(wrappedEvent, 0); setCurrentTarget(wrappedEvent, null); return !wrappedEvent.defaultPrevented }, }; // `constructor` is not enumerable. Object.defineProperty(EventTarget.prototype, "constructor", { value: EventTarget, configurable: true, writable: true, }); // Ensure `eventTarget instanceof window.EventTarget` is `true`. if ( typeof window !== "undefined" && typeof window.EventTarget !== "undefined" ) { Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype); } exports.defineEventAttribute = defineEventAttribute; exports.EventTarget = EventTarget; exports["default"] = EventTarget; module.exports = EventTarget module.exports.EventTarget = module.exports["default"] = EventTarget module.exports.defineEventAttribute = defineEventAttribute //# sourceMappingURL=event-target-shim.js.map /***/ }), /***/ 23860: /***/ ((module) => { "use strict"; var hasOwn = Object.prototype.hasOwnProperty; var toStr = Object.prototype.toString; var defineProperty = Object.defineProperty; var gOPD = Object.getOwnPropertyDescriptor; var isArray = function isArray(arr) { if (typeof Array.isArray === 'function') { return Array.isArray(arr); } return toStr.call(arr) === '[object Array]'; }; var isPlainObject = function isPlainObject(obj) { if (!obj || toStr.call(obj) !== '[object Object]') { return false; } var hasOwnConstructor = hasOwn.call(obj, 'constructor'); var hasIsPrototypeOf = obj.constructor && obj.constructor.prototype && hasOwn.call(obj.constructor.prototype, 'isPrototypeOf'); // Not own constructor property must be Object if (obj.constructor && !hasOwnConstructor && !hasIsPrototypeOf) { return false; } // Own properties are enumerated firstly, so to speed up, // if last one is own, then all properties are own. var key; for (key in obj) { /**/ } return typeof key === 'undefined' || hasOwn.call(obj, key); }; // If name is '__proto__', and Object.defineProperty is available, define __proto__ as an own property on target var setProperty = function setProperty(target, options) { if (defineProperty && options.name === '__proto__') { defineProperty(target, options.name, { enumerable: true, configurable: true, value: options.newValue, writable: true }); } else { target[options.name] = options.newValue; } }; // Return undefined instead of __proto__ if '__proto__' is not an own property var getProperty = function getProperty(obj, name) { if (name === '__proto__') { if (!hasOwn.call(obj, name)) { return void 0; } else if (gOPD) { // In early versions of node, obj['__proto__'] is buggy when obj has // __proto__ as an own property. Object.getOwnPropertyDescriptor() works. return gOPD(obj, name).value; } } return obj[name]; }; module.exports = function extend() { var options, name, src, copy, copyIsArray, clone; var target = arguments[0]; var i = 1; var length = arguments.length; var deep = false; // Handle a deep copy situation if (typeof target === 'boolean') { deep = target; target = arguments[1] || {}; // skip the boolean and the target i = 2; } if (target == null || (typeof target !== 'object' && typeof target !== 'function')) { target = {}; } for (; i < length; ++i) { options = arguments[i]; // Only deal with non-null/undefined values if (options != null) { // Extend the base object for (name in options) { src = getProperty(target, name); copy = getProperty(options, name); // Prevent never-ending loop if (target !== copy) { // Recurse if we're merging plain objects or arrays if (deep && copy && (isPlainObject(copy) || (copyIsArray = isArray(copy)))) { if (copyIsArray) { copyIsArray = false; clone = src && isArray(src) ? src : []; } else { clone = src && isPlainObject(src) ? src : {}; } // Never move original objects, clone them setProperty(target, { name: name, newValue: extend(deep, clone, copy) }); // Don't bring in undefined values } else if (typeof copy !== 'undefined') { setProperty(target, { name: name, newValue: copy }); } } } } } // Return the modified object return target; }; /***/ }), /***/ 39741: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const validator = __nccwpck_require__(39433); const XMLParser = __nccwpck_require__(79844); const XMLBuilder = __nccwpck_require__(80659); module.exports = { XMLParser: XMLParser, XMLValidator: validator, XMLBuilder: XMLBuilder } /***/ }), /***/ 30812: /***/ ((module) => { function getIgnoreAttributesFn(ignoreAttributes) { if (typeof ignoreAttributes === 'function') { return ignoreAttributes } if (Array.isArray(ignoreAttributes)) { return (attrName) => { for (const pattern of ignoreAttributes) { if (typeof pattern === 'string' && attrName === pattern) { return true } if (pattern instanceof RegExp && pattern.test(attrName)) { return true } } } } return () => false } module.exports = getIgnoreAttributesFn /***/ }), /***/ 47019: /***/ ((__unused_webpack_module, exports) => { "use strict"; const nameStartChar = ':A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD'; const nameChar = nameStartChar + '\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040'; const nameRegexp = '[' + nameStartChar + '][' + nameChar + ']*' const regexName = new RegExp('^' + nameRegexp + '$'); const getAllMatches = function(string, regex) { const matches = []; let match = regex.exec(string); while (match) { const allmatches = []; allmatches.startIndex = regex.lastIndex - match[0].length; const len = match.length; for (let index = 0; index < len; index++) { allmatches.push(match[index]); } matches.push(allmatches); match = regex.exec(string); } return matches; }; const isName = function(string) { const match = regexName.exec(string); return !(match === null || typeof match === 'undefined'); }; exports.isExist = function(v) { return typeof v !== 'undefined'; }; exports.isEmptyObject = function(obj) { return Object.keys(obj).length === 0; }; /** * Copy all the properties of a into b. * @param {*} target * @param {*} a */ exports.merge = function(target, a, arrayMode) { if (a) { const keys = Object.keys(a); // will return an array of own properties const len = keys.length; //don't make it inline for (let i = 0; i < len; i++) { if (arrayMode === 'strict') { target[keys[i]] = [ a[keys[i]] ]; } else { target[keys[i]] = a[keys[i]]; } } } }; /* exports.merge =function (b,a){ return Object.assign(b,a); } */ exports.getValue = function(v) { if (exports.isExist(v)) { return v; } else { return ''; } }; // const fakeCall = function(a) {return a;}; // const fakeCallNoReturn = function() {}; exports.isName = isName; exports.getAllMatches = getAllMatches; exports.nameRegexp = nameRegexp; /***/ }), /***/ 39433: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; const util = __nccwpck_require__(47019); const defaultOptions = { allowBooleanAttributes: false, //A tag can have attributes without any value unpairedTags: [] }; //const tagsPattern = new RegExp("<\\/?([\\w:\\-_\.]+)\\s*\/?>","g"); exports.validate = function (xmlData, options) { options = Object.assign({}, defaultOptions, options); //xmlData = xmlData.replace(/(\r\n|\n|\r)/gm,"");//make it single line //xmlData = xmlData.replace(/(^\s*<\?xml.*?\?>)/g,"");//Remove XML starting tag //xmlData = xmlData.replace(/()/g,"");//Remove DOCTYPE const tags = []; let tagFound = false; //indicates that the root tag has been closed (aka. depth 0 has been reached) let reachedRoot = false; if (xmlData[0] === '\ufeff') { // check for byte order mark (BOM) xmlData = xmlData.substr(1); } for (let i = 0; i < xmlData.length; i++) { if (xmlData[i] === '<' && xmlData[i+1] === '?') { i+=2; i = readPI(xmlData,i); if (i.err) return i; }else if (xmlData[i] === '<') { //starting of tag //read until you reach to '>' avoiding any '>' in attribute value let tagStartPos = i; i++; if (xmlData[i] === '!') { i = readCommentAndCDATA(xmlData, i); continue; } else { let closingTag = false; if (xmlData[i] === '/') { //closing tag closingTag = true; i++; } //read tagname let tagName = ''; for (; i < xmlData.length && xmlData[i] !== '>' && xmlData[i] !== ' ' && xmlData[i] !== '\t' && xmlData[i] !== '\n' && xmlData[i] !== '\r'; i++ ) { tagName += xmlData[i]; } tagName = tagName.trim(); //console.log(tagName); if (tagName[tagName.length - 1] === '/') { //self closing tag without attributes tagName = tagName.substring(0, tagName.length - 1); //continue; i--; } if (!validateTagName(tagName)) { let msg; if (tagName.trim().length === 0) { msg = "Invalid space after '<'."; } else { msg = "Tag '"+tagName+"' is an invalid name."; } return getErrorObject('InvalidTag', msg, getLineNumberForPosition(xmlData, i)); } const result = readAttributeStr(xmlData, i); if (result === false) { return getErrorObject('InvalidAttr', "Attributes for '"+tagName+"' have open quote.", getLineNumberForPosition(xmlData, i)); } let attrStr = result.value; i = result.index; if (attrStr[attrStr.length - 1] === '/') { //self closing tag const attrStrStart = i - attrStr.length; attrStr = attrStr.substring(0, attrStr.length - 1); const isValid = validateAttributeString(attrStr, options); if (isValid === true) { tagFound = true; //continue; //text may presents after self closing tag } else { //the result from the nested function returns the position of the error within the attribute //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute //this gives us the absolute index in the entire xml, which we can use to find the line at last return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, attrStrStart + isValid.err.line)); } } else if (closingTag) { if (!result.tagClosed) { return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' doesn't have proper closing.", getLineNumberForPosition(xmlData, i)); } else if (attrStr.trim().length > 0) { return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' can't have attributes or invalid starting.", getLineNumberForPosition(xmlData, tagStartPos)); } else if (tags.length === 0) { return getErrorObject('InvalidTag', "Closing tag '"+tagName+"' has not been opened.", getLineNumberForPosition(xmlData, tagStartPos)); } else { const otg = tags.pop(); if (tagName !== otg.tagName) { let openPos = getLineNumberForPosition(xmlData, otg.tagStartPos); return getErrorObject('InvalidTag', "Expected closing tag '"+otg.tagName+"' (opened in line "+openPos.line+", col "+openPos.col+") instead of closing tag '"+tagName+"'.", getLineNumberForPosition(xmlData, tagStartPos)); } //when there are no more tags, we reached the root level. if (tags.length == 0) { reachedRoot = true; } } } else { const isValid = validateAttributeString(attrStr, options); if (isValid !== true) { //the result from the nested function returns the position of the error within the attribute //in order to get the 'true' error line, we need to calculate the position where the attribute begins (i - attrStr.length) and then add the position within the attribute //this gives us the absolute index in the entire xml, which we can use to find the line at last return getErrorObject(isValid.err.code, isValid.err.msg, getLineNumberForPosition(xmlData, i - attrStr.length + isValid.err.line)); } //if the root level has been reached before ... if (reachedRoot === true) { return getErrorObject('InvalidXml', 'Multiple possible root nodes found.', getLineNumberForPosition(xmlData, i)); } else if(options.unpairedTags.indexOf(tagName) !== -1){ //don't push into stack } else { tags.push({tagName, tagStartPos}); } tagFound = true; } //skip tag text value //It may include comments and CDATA value for (i++; i < xmlData.length; i++) { if (xmlData[i] === '<') { if (xmlData[i + 1] === '!') { //comment or CADATA i++; i = readCommentAndCDATA(xmlData, i); continue; } else if (xmlData[i+1] === '?') { i = readPI(xmlData, ++i); if (i.err) return i; } else{ break; } } else if (xmlData[i] === '&') { const afterAmp = validateAmpersand(xmlData, i); if (afterAmp == -1) return getErrorObject('InvalidChar', "char '&' is not expected.", getLineNumberForPosition(xmlData, i)); i = afterAmp; }else{ if (reachedRoot === true && !isWhiteSpace(xmlData[i])) { return getErrorObject('InvalidXml', "Extra text at the end", getLineNumberForPosition(xmlData, i)); } } } //end of reading tag text value if (xmlData[i] === '<') { i--; } } } else { if ( isWhiteSpace(xmlData[i])) { continue; } return getErrorObject('InvalidChar', "char '"+xmlData[i]+"' is not expected.", getLineNumberForPosition(xmlData, i)); } } if (!tagFound) { return getErrorObject('InvalidXml', 'Start tag expected.', 1); }else if (tags.length == 1) { return getErrorObject('InvalidTag', "Unclosed tag '"+tags[0].tagName+"'.", getLineNumberForPosition(xmlData, tags[0].tagStartPos)); }else if (tags.length > 0) { return getErrorObject('InvalidXml', "Invalid '"+ JSON.stringify(tags.map(t => t.tagName), null, 4).replace(/\r?\n/g, '')+ "' found.", {line: 1, col: 1}); } return true; }; function isWhiteSpace(char){ return char === ' ' || char === '\t' || char === '\n' || char === '\r'; } /** * Read Processing insstructions and skip * @param {*} xmlData * @param {*} i */ function readPI(xmlData, i) { const start = i; for (; i < xmlData.length; i++) { if (xmlData[i] == '?' || xmlData[i] == ' ') { //tagname const tagname = xmlData.substr(start, i - start); if (i > 5 && tagname === 'xml') { return getErrorObject('InvalidXml', 'XML declaration allowed only at the start of the document.', getLineNumberForPosition(xmlData, i)); } else if (xmlData[i] == '?' && xmlData[i + 1] == '>') { //check if valid attribut string i++; break; } else { continue; } } } return i; } function readCommentAndCDATA(xmlData, i) { if (xmlData.length > i + 5 && xmlData[i + 1] === '-' && xmlData[i + 2] === '-') { //comment for (i += 3; i < xmlData.length; i++) { if (xmlData[i] === '-' && xmlData[i + 1] === '-' && xmlData[i + 2] === '>') { i += 2; break; } } } else if ( xmlData.length > i + 8 && xmlData[i + 1] === 'D' && xmlData[i + 2] === 'O' && xmlData[i + 3] === 'C' && xmlData[i + 4] === 'T' && xmlData[i + 5] === 'Y' && xmlData[i + 6] === 'P' && xmlData[i + 7] === 'E' ) { let angleBracketsCount = 1; for (i += 8; i < xmlData.length; i++) { if (xmlData[i] === '<') { angleBracketsCount++; } else if (xmlData[i] === '>') { angleBracketsCount--; if (angleBracketsCount === 0) { break; } } } } else if ( xmlData.length > i + 9 && xmlData[i + 1] === '[' && xmlData[i + 2] === 'C' && xmlData[i + 3] === 'D' && xmlData[i + 4] === 'A' && xmlData[i + 5] === 'T' && xmlData[i + 6] === 'A' && xmlData[i + 7] === '[' ) { for (i += 8; i < xmlData.length; i++) { if (xmlData[i] === ']' && xmlData[i + 1] === ']' && xmlData[i + 2] === '>') { i += 2; break; } } } return i; } const doubleQuote = '"'; const singleQuote = "'"; /** * Keep reading xmlData until '<' is found outside the attribute value. * @param {string} xmlData * @param {number} i */ function readAttributeStr(xmlData, i) { let attrStr = ''; let startChar = ''; let tagClosed = false; for (; i < xmlData.length; i++) { if (xmlData[i] === doubleQuote || xmlData[i] === singleQuote) { if (startChar === '') { startChar = xmlData[i]; } else if (startChar !== xmlData[i]) { //if vaue is enclosed with double quote then single quotes are allowed inside the value and vice versa } else { startChar = ''; } } else if (xmlData[i] === '>') { if (startChar === '') { tagClosed = true; break; } } attrStr += xmlData[i]; } if (startChar !== '') { return false; } return { value: attrStr, index: i, tagClosed: tagClosed }; } /** * Select all the attributes whether valid or invalid. */ const validAttrStrRegxp = new RegExp('(\\s*)([^\\s=]+)(\\s*=)?(\\s*([\'"])(([\\s\\S])*?)\\5)?', 'g'); //attr, ="sd", a="amit's", a="sd"b="saf", ab cd="" function validateAttributeString(attrStr, options) { //console.log("start:"+attrStr+":end"); //if(attrStr.trim().length === 0) return true; //empty string const matches = util.getAllMatches(attrStr, validAttrStrRegxp); const attrNames = {}; for (let i = 0; i < matches.length; i++) { if (matches[i][1].length === 0) { //nospace before attribute name: a="sd"b="saf" return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' has no space in starting.", getPositionFromMatch(matches[i])) } else if (matches[i][3] !== undefined && matches[i][4] === undefined) { return getErrorObject('InvalidAttr', "Attribute '"+matches[i][2]+"' is without value.", getPositionFromMatch(matches[i])); } else if (matches[i][3] === undefined && !options.allowBooleanAttributes) { //independent attribute: ab return getErrorObject('InvalidAttr', "boolean attribute '"+matches[i][2]+"' is not allowed.", getPositionFromMatch(matches[i])); } /* else if(matches[i][6] === undefined){//attribute without value: ab= return { err: { code:"InvalidAttr",msg:"attribute " + matches[i][2] + " has no value assigned."}}; } */ const attrName = matches[i][2]; if (!validateAttrName(attrName)) { return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is an invalid name.", getPositionFromMatch(matches[i])); } if (!attrNames.hasOwnProperty(attrName)) { //check for duplicate attribute. attrNames[attrName] = 1; } else { return getErrorObject('InvalidAttr', "Attribute '"+attrName+"' is repeated.", getPositionFromMatch(matches[i])); } } return true; } function validateNumberAmpersand(xmlData, i) { let re = /\d/; if (xmlData[i] === 'x') { i++; re = /[\da-fA-F]/; } for (; i < xmlData.length; i++) { if (xmlData[i] === ';') return i; if (!xmlData[i].match(re)) break; } return -1; } function validateAmpersand(xmlData, i) { // https://www.w3.org/TR/xml/#dt-charref i++; if (xmlData[i] === ';') return -1; if (xmlData[i] === '#') { i++; return validateNumberAmpersand(xmlData, i); } let count = 0; for (; i < xmlData.length; i++, count++) { if (xmlData[i].match(/\w/) && count < 20) continue; if (xmlData[i] === ';') break; return -1; } return i; } function getErrorObject(code, message, lineNumber) { return { err: { code: code, msg: message, line: lineNumber.line || lineNumber, col: lineNumber.col, }, }; } function validateAttrName(attrName) { return util.isName(attrName); } // const startsWithXML = /^xml/i; function validateTagName(tagname) { return util.isName(tagname) /* && !tagname.match(startsWithXML) */; } //this function returns the line number for the character at the given index function getLineNumberForPosition(xmlData, index) { const lines = xmlData.substring(0, index).split(/\r?\n/); return { line: lines.length, // column number is last line's length + 1, because column numbering starts at 1: col: lines[lines.length - 1].length + 1 }; } //this function returns the position of the first character of match within attrStr function getPositionFromMatch(match) { return match.startIndex + match[1].length; } /***/ }), /***/ 80659: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; //parse Empty Node as self closing node const buildFromOrderedJs = __nccwpck_require__(43997); const getIgnoreAttributesFn = __nccwpck_require__(30812) const defaultOptions = { attributeNamePrefix: '@_', attributesGroupName: false, textNodeName: '#text', ignoreAttributes: true, cdataPropName: false, format: false, indentBy: ' ', suppressEmptyNode: false, suppressUnpairedNode: true, suppressBooleanAttributes: true, tagValueProcessor: function(key, a) { return a; }, attributeValueProcessor: function(attrName, a) { return a; }, preserveOrder: false, commentPropName: false, unpairedTags: [], entities: [ { regex: new RegExp("&", "g"), val: "&" },//it must be on top { regex: new RegExp(">", "g"), val: ">" }, { regex: new RegExp("<", "g"), val: "<" }, { regex: new RegExp("\'", "g"), val: "'" }, { regex: new RegExp("\"", "g"), val: """ } ], processEntities: true, stopNodes: [], // transformTagName: false, // transformAttributeName: false, oneListGroup: false }; function Builder(options) { this.options = Object.assign({}, defaultOptions, options); if (this.options.ignoreAttributes === true || this.options.attributesGroupName) { this.isAttribute = function(/*a*/) { return false; }; } else { this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) this.attrPrefixLen = this.options.attributeNamePrefix.length; this.isAttribute = isAttribute; } this.processTextOrObjNode = processTextOrObjNode if (this.options.format) { this.indentate = indentate; this.tagEndChar = '>\n'; this.newLine = '\n'; } else { this.indentate = function() { return ''; }; this.tagEndChar = '>'; this.newLine = ''; } } Builder.prototype.build = function(jObj) { if(this.options.preserveOrder){ return buildFromOrderedJs(jObj, this.options); }else { if(Array.isArray(jObj) && this.options.arrayNodeName && this.options.arrayNodeName.length > 1){ jObj = { [this.options.arrayNodeName] : jObj } } return this.j2x(jObj, 0, []).val; } }; Builder.prototype.j2x = function(jObj, level, ajPath) { let attrStr = ''; let val = ''; const jPath = ajPath.join('.') for (let key in jObj) { if(!Object.prototype.hasOwnProperty.call(jObj, key)) continue; if (typeof jObj[key] === 'undefined') { // supress undefined node only if it is not an attribute if (this.isAttribute(key)) { val += ''; } } else if (jObj[key] === null) { // null attribute should be ignored by the attribute list, but should not cause the tag closing if (this.isAttribute(key)) { val += ''; } else if (key[0] === '?') { val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; } else { val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; } // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; } else if (jObj[key] instanceof Date) { val += this.buildTextValNode(jObj[key], key, '', level); } else if (typeof jObj[key] !== 'object') { //premitive type const attr = this.isAttribute(key); if (attr && !this.ignoreAttributesFn(attr, jPath)) { attrStr += this.buildAttrPairStr(attr, '' + jObj[key]); } else if (!attr) { //tag value if (key === this.options.textNodeName) { let newval = this.options.tagValueProcessor(key, '' + jObj[key]); val += this.replaceEntitiesValue(newval); } else { val += this.buildTextValNode(jObj[key], key, '', level); } } } else if (Array.isArray(jObj[key])) { //repeated nodes const arrLen = jObj[key].length; let listTagVal = ""; let listTagAttr = ""; for (let j = 0; j < arrLen; j++) { const item = jObj[key][j]; if (typeof item === 'undefined') { // supress undefined node } else if (item === null) { if(key[0] === "?") val += this.indentate(level) + '<' + key + '?' + this.tagEndChar; else val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; // val += this.indentate(level) + '<' + key + '/' + this.tagEndChar; } else if (typeof item === 'object') { if(this.options.oneListGroup){ const result = this.j2x(item, level + 1, ajPath.concat(key)); listTagVal += result.val; if (this.options.attributesGroupName && item.hasOwnProperty(this.options.attributesGroupName)) { listTagAttr += result.attrStr } }else{ listTagVal += this.processTextOrObjNode(item, key, level, ajPath) } } else { if (this.options.oneListGroup) { let textValue = this.options.tagValueProcessor(key, item); textValue = this.replaceEntitiesValue(textValue); listTagVal += textValue; } else { listTagVal += this.buildTextValNode(item, key, '', level); } } } if(this.options.oneListGroup){ listTagVal = this.buildObjectNode(listTagVal, key, listTagAttr, level); } val += listTagVal; } else { //nested node if (this.options.attributesGroupName && key === this.options.attributesGroupName) { const Ks = Object.keys(jObj[key]); const L = Ks.length; for (let j = 0; j < L; j++) { attrStr += this.buildAttrPairStr(Ks[j], '' + jObj[key][Ks[j]]); } } else { val += this.processTextOrObjNode(jObj[key], key, level, ajPath) } } } return {attrStr: attrStr, val: val}; }; Builder.prototype.buildAttrPairStr = function(attrName, val){ val = this.options.attributeValueProcessor(attrName, '' + val); val = this.replaceEntitiesValue(val); if (this.options.suppressBooleanAttributes && val === "true") { return ' ' + attrName; } else return ' ' + attrName + '="' + val + '"'; } function processTextOrObjNode (object, key, level, ajPath) { const result = this.j2x(object, level + 1, ajPath.concat(key)); if (object[this.options.textNodeName] !== undefined && Object.keys(object).length === 1) { return this.buildTextValNode(object[this.options.textNodeName], key, result.attrStr, level); } else { return this.buildObjectNode(result.val, key, result.attrStr, level); } } Builder.prototype.buildObjectNode = function(val, key, attrStr, level) { if(val === ""){ if(key[0] === "?") return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; else { return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; } }else{ let tagEndExp = '' + val + tagEndExp ); } else if (this.options.commentPropName !== false && key === this.options.commentPropName && piClosingChar.length === 0) { return this.indentate(level) + `` + this.newLine; }else { return ( this.indentate(level) + '<' + key + attrStr + piClosingChar + this.tagEndChar + val + this.indentate(level) + tagEndExp ); } } } Builder.prototype.closeTag = function(key){ let closeTag = ""; if(this.options.unpairedTags.indexOf(key) !== -1){ //unpaired if(!this.options.suppressUnpairedNode) closeTag = "/" }else if(this.options.suppressEmptyNode){ //empty closeTag = "/"; }else{ closeTag = `>` + this.newLine; }else if (this.options.commentPropName !== false && key === this.options.commentPropName) { return this.indentate(level) + `` + this.newLine; }else if(key[0] === "?") {//PI tag return this.indentate(level) + '<' + key + attrStr+ '?' + this.tagEndChar; }else{ let textValue = this.options.tagValueProcessor(key, val); textValue = this.replaceEntitiesValue(textValue); if( textValue === ''){ return this.indentate(level) + '<' + key + attrStr + this.closeTag(key) + this.tagEndChar; }else{ return this.indentate(level) + '<' + key + attrStr + '>' + textValue + ' 0 && this.options.processEntities){ for (let i=0; i { const EOL = "\n"; /** * * @param {array} jArray * @param {any} options * @returns */ function toXml(jArray, options) { let indentation = ""; if (options.format && options.indentBy.length > 0) { indentation = EOL; } return arrToStr(jArray, options, "", indentation); } function arrToStr(arr, options, jPath, indentation) { let xmlStr = ""; let isPreviousElementTag = false; for (let i = 0; i < arr.length; i++) { const tagObj = arr[i]; const tagName = propName(tagObj); if(tagName === undefined) continue; let newJPath = ""; if (jPath.length === 0) newJPath = tagName else newJPath = `${jPath}.${tagName}`; if (tagName === options.textNodeName) { let tagText = tagObj[tagName]; if (!isStopNode(newJPath, options)) { tagText = options.tagValueProcessor(tagName, tagText); tagText = replaceEntitiesValue(tagText, options); } if (isPreviousElementTag) { xmlStr += indentation; } xmlStr += tagText; isPreviousElementTag = false; continue; } else if (tagName === options.cdataPropName) { if (isPreviousElementTag) { xmlStr += indentation; } xmlStr += ``; isPreviousElementTag = false; continue; } else if (tagName === options.commentPropName) { xmlStr += indentation + ``; isPreviousElementTag = true; continue; } else if (tagName[0] === "?") { const attStr = attr_to_str(tagObj[":@"], options); const tempInd = tagName === "?xml" ? "" : indentation; let piTextNodeName = tagObj[tagName][0][options.textNodeName]; piTextNodeName = piTextNodeName.length !== 0 ? " " + piTextNodeName : ""; //remove extra spacing xmlStr += tempInd + `<${tagName}${piTextNodeName}${attStr}?>`; isPreviousElementTag = true; continue; } let newIdentation = indentation; if (newIdentation !== "") { newIdentation += options.indentBy; } const attStr = attr_to_str(tagObj[":@"], options); const tagStart = indentation + `<${tagName}${attStr}`; const tagValue = arrToStr(tagObj[tagName], options, newJPath, newIdentation); if (options.unpairedTags.indexOf(tagName) !== -1) { if (options.suppressUnpairedNode) xmlStr += tagStart + ">"; else xmlStr += tagStart + "/>"; } else if ((!tagValue || tagValue.length === 0) && options.suppressEmptyNode) { xmlStr += tagStart + "/>"; } else if (tagValue && tagValue.endsWith(">")) { xmlStr += tagStart + `>${tagValue}${indentation}`; } else { xmlStr += tagStart + ">"; if (tagValue && indentation !== "" && (tagValue.includes("/>") || tagValue.includes("`; } isPreviousElementTag = true; } return xmlStr; } function propName(obj) { const keys = Object.keys(obj); for (let i = 0; i < keys.length; i++) { const key = keys[i]; if(!obj.hasOwnProperty(key)) continue; if (key !== ":@") return key; } } function attr_to_str(attrMap, options) { let attrStr = ""; if (attrMap && !options.ignoreAttributes) { for (let attr in attrMap) { if(!attrMap.hasOwnProperty(attr)) continue; let attrVal = options.attributeValueProcessor(attr, attrMap[attr]); attrVal = replaceEntitiesValue(attrVal, options); if (attrVal === true && options.suppressBooleanAttributes) { attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}`; } else { attrStr += ` ${attr.substr(options.attributeNamePrefix.length)}="${attrVal}"`; } } } return attrStr; } function isStopNode(jPath, options) { jPath = jPath.substr(0, jPath.length - options.textNodeName.length - 1); let tagName = jPath.substr(jPath.lastIndexOf(".") + 1); for (let index in options.stopNodes) { if (options.stopNodes[index] === jPath || options.stopNodes[index] === "*." + tagName) return true; } return false; } function replaceEntitiesValue(textValue, options) { if (textValue && textValue.length > 0 && options.processEntities) { for (let i = 0; i < options.entities.length; i++) { const entity = options.entities[i]; textValue = textValue.replace(entity.regex, entity.val); } } return textValue; } module.exports = toXml; /***/ }), /***/ 50151: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const util = __nccwpck_require__(47019); //TODO: handle comments function readDocType(xmlData, i){ const entities = {}; if( xmlData[i + 3] === 'O' && xmlData[i + 4] === 'C' && xmlData[i + 5] === 'T' && xmlData[i + 6] === 'Y' && xmlData[i + 7] === 'P' && xmlData[i + 8] === 'E') { i = i+9; let angleBracketsCount = 1; let hasBody = false, comment = false; let exp = ""; for(;i') { //Read tag content if(comment){ if( xmlData[i - 1] === "-" && xmlData[i - 2] === "-"){ comment = false; angleBracketsCount--; } }else{ angleBracketsCount--; } if (angleBracketsCount === 0) { break; } }else if( xmlData[i] === '['){ hasBody = true; }else{ exp += xmlData[i]; } } if(angleBracketsCount !== 0){ throw new Error(`Unclosed DOCTYPE`); } }else{ throw new Error(`Invalid Tag instead of DOCTYPE`); } return {entities, i}; } function readEntityExp(xmlData,i){ //External entities are not supported // //Parameter entities are not supported // //Internal entities are supported // //read EntityName let entityName = ""; for (; i < xmlData.length && (xmlData[i] !== "'" && xmlData[i] !== '"' ); i++) { // if(xmlData[i] === " ") continue; // else entityName += xmlData[i]; } entityName = entityName.trim(); if(entityName.indexOf(" ") !== -1) throw new Error("External entites are not supported"); //read Entity Value const startChar = xmlData[i++]; let val = "" for (; i < xmlData.length && xmlData[i] !== startChar ; i++) { val += xmlData[i]; } return [entityName, val, i]; } function isComment(xmlData, i){ if(xmlData[i+1] === '!' && xmlData[i+2] === '-' && xmlData[i+3] === '-') return true return false } function isEntity(xmlData, i){ if(xmlData[i+1] === '!' && xmlData[i+2] === 'E' && xmlData[i+3] === 'N' && xmlData[i+4] === 'T' && xmlData[i+5] === 'I' && xmlData[i+6] === 'T' && xmlData[i+7] === 'Y') return true return false } function isElement(xmlData, i){ if(xmlData[i+1] === '!' && xmlData[i+2] === 'E' && xmlData[i+3] === 'L' && xmlData[i+4] === 'E' && xmlData[i+5] === 'M' && xmlData[i+6] === 'E' && xmlData[i+7] === 'N' && xmlData[i+8] === 'T') return true return false } function isAttlist(xmlData, i){ if(xmlData[i+1] === '!' && xmlData[i+2] === 'A' && xmlData[i+3] === 'T' && xmlData[i+4] === 'T' && xmlData[i+5] === 'L' && xmlData[i+6] === 'I' && xmlData[i+7] === 'S' && xmlData[i+8] === 'T') return true return false } function isNotation(xmlData, i){ if(xmlData[i+1] === '!' && xmlData[i+2] === 'N' && xmlData[i+3] === 'O' && xmlData[i+4] === 'T' && xmlData[i+5] === 'A' && xmlData[i+6] === 'T' && xmlData[i+7] === 'I' && xmlData[i+8] === 'O' && xmlData[i+9] === 'N') return true return false } function validateEntityName(name){ if (util.isName(name)) return name; else throw new Error(`Invalid entity name ${name}`); } module.exports = readDocType; /***/ }), /***/ 84769: /***/ ((__unused_webpack_module, exports) => { const defaultOptions = { preserveOrder: false, attributeNamePrefix: '@_', attributesGroupName: false, textNodeName: '#text', ignoreAttributes: true, removeNSPrefix: false, // remove NS from tag name or attribute name if true allowBooleanAttributes: false, //a tag can have attributes without any value //ignoreRootElement : false, parseTagValue: true, parseAttributeValue: false, trimValues: true, //Trim string values of tag and attributes cdataPropName: false, numberParseOptions: { hex: true, leadingZeros: true, eNotation: true }, tagValueProcessor: function(tagName, val) { return val; }, attributeValueProcessor: function(attrName, val) { return val; }, stopNodes: [], //nested tags will not be parsed even for errors alwaysCreateTextNode: false, isArray: () => false, commentPropName: false, unpairedTags: [], processEntities: true, htmlEntities: false, ignoreDeclaration: false, ignorePiTags: false, transformTagName: false, transformAttributeName: false, updateTag: function(tagName, jPath, attrs){ return tagName }, // skipEmptyListItem: false }; const buildOptions = function(options) { return Object.assign({}, defaultOptions, options); }; exports.buildOptions = buildOptions; exports.defaultOptions = defaultOptions; /***/ }), /***/ 13017: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; ///@ts-check const util = __nccwpck_require__(47019); const xmlNode = __nccwpck_require__(49307); const readDocType = __nccwpck_require__(50151); const toNumber = __nccwpck_require__(56496); const getIgnoreAttributesFn = __nccwpck_require__(30812) // const regx = // '<((!\\[CDATA\\[([\\s\\S]*?)(]]>))|((NAME:)?(NAME))([^>]*)>|((\\/)(NAME)\\s*>))([^<]*)' // .replace(/NAME/g, util.nameRegexp); //const tagsRegx = new RegExp("<(\\/?[\\w:\\-\._]+)([^>]*)>(\\s*"+cdataRegx+")*([^<]+)?","g"); //const tagsRegx = new RegExp("<(\\/?)((\\w*:)?([\\w:\\-\._]+))([^>]*)>([^<]*)("+cdataRegx+"([^<]*))*([^<]+)?","g"); class OrderedObjParser{ constructor(options){ this.options = options; this.currentNode = null; this.tagsNodeStack = []; this.docTypeEntities = {}; this.lastEntities = { "apos" : { regex: /&(apos|#39|#x27);/g, val : "'"}, "gt" : { regex: /&(gt|#62|#x3E);/g, val : ">"}, "lt" : { regex: /&(lt|#60|#x3C);/g, val : "<"}, "quot" : { regex: /&(quot|#34|#x22);/g, val : "\""}, }; this.ampEntity = { regex: /&(amp|#38|#x26);/g, val : "&"}; this.htmlEntities = { "space": { regex: /&(nbsp|#160);/g, val: " " }, // "lt" : { regex: /&(lt|#60);/g, val: "<" }, // "gt" : { regex: /&(gt|#62);/g, val: ">" }, // "amp" : { regex: /&(amp|#38);/g, val: "&" }, // "quot" : { regex: /&(quot|#34);/g, val: "\"" }, // "apos" : { regex: /&(apos|#39);/g, val: "'" }, "cent" : { regex: /&(cent|#162);/g, val: "¢" }, "pound" : { regex: /&(pound|#163);/g, val: "£" }, "yen" : { regex: /&(yen|#165);/g, val: "¥" }, "euro" : { regex: /&(euro|#8364);/g, val: "€" }, "copyright" : { regex: /&(copy|#169);/g, val: "©" }, "reg" : { regex: /&(reg|#174);/g, val: "®" }, "inr" : { regex: /&(inr|#8377);/g, val: "₹" }, "num_dec": { regex: /&#([0-9]{1,7});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 10)) }, "num_hex": { regex: /&#x([0-9a-fA-F]{1,6});/g, val : (_, str) => String.fromCharCode(Number.parseInt(str, 16)) }, }; this.addExternalEntities = addExternalEntities; this.parseXml = parseXml; this.parseTextData = parseTextData; this.resolveNameSpace = resolveNameSpace; this.buildAttributesMap = buildAttributesMap; this.isItStopNode = isItStopNode; this.replaceEntitiesValue = replaceEntitiesValue; this.readStopNodeData = readStopNodeData; this.saveTextToParentTag = saveTextToParentTag; this.addChild = addChild; this.ignoreAttributesFn = getIgnoreAttributesFn(this.options.ignoreAttributes) } } function addExternalEntities(externalEntities){ const entKeys = Object.keys(externalEntities); for (let i = 0; i < entKeys.length; i++) { const ent = entKeys[i]; this.lastEntities[ent] = { regex: new RegExp("&"+ent+";","g"), val : externalEntities[ent] } } } /** * @param {string} val * @param {string} tagName * @param {string} jPath * @param {boolean} dontTrim * @param {boolean} hasAttributes * @param {boolean} isLeafNode * @param {boolean} escapeEntities */ function parseTextData(val, tagName, jPath, dontTrim, hasAttributes, isLeafNode, escapeEntities) { if (val !== undefined) { if (this.options.trimValues && !dontTrim) { val = val.trim(); } if(val.length > 0){ if(!escapeEntities) val = this.replaceEntitiesValue(val); const newval = this.options.tagValueProcessor(tagName, val, jPath, hasAttributes, isLeafNode); if(newval === null || newval === undefined){ //don't parse return val; }else if(typeof newval !== typeof val || newval !== val){ //overwrite return newval; }else if(this.options.trimValues){ return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); }else{ const trimmedVal = val.trim(); if(trimmedVal === val){ return parseValue(val, this.options.parseTagValue, this.options.numberParseOptions); }else{ return val; } } } } } function resolveNameSpace(tagname) { if (this.options.removeNSPrefix) { const tags = tagname.split(':'); const prefix = tagname.charAt(0) === '/' ? '/' : ''; if (tags[0] === 'xmlns') { return ''; } if (tags.length === 2) { tagname = prefix + tags[1]; } } return tagname; } //TODO: change regex to capture NS //const attrsRegx = new RegExp("([\\w\\-\\.\\:]+)\\s*=\\s*(['\"])((.|\n)*?)\\2","gm"); const attrsRegx = new RegExp('([^\\s=]+)\\s*(=\\s*([\'"])([\\s\\S]*?)\\3)?', 'gm'); function buildAttributesMap(attrStr, jPath, tagName) { if (this.options.ignoreAttributes !== true && typeof attrStr === 'string') { // attrStr = attrStr.replace(/\r?\n/g, ' '); //attrStr = attrStr || attrStr.trim(); const matches = util.getAllMatches(attrStr, attrsRegx); const len = matches.length; //don't make it inline const attrs = {}; for (let i = 0; i < len; i++) { const attrName = this.resolveNameSpace(matches[i][1]); if (this.ignoreAttributesFn(attrName, jPath)) { continue } let oldVal = matches[i][4]; let aName = this.options.attributeNamePrefix + attrName; if (attrName.length) { if (this.options.transformAttributeName) { aName = this.options.transformAttributeName(aName); } if(aName === "__proto__") aName = "#__proto__"; if (oldVal !== undefined) { if (this.options.trimValues) { oldVal = oldVal.trim(); } oldVal = this.replaceEntitiesValue(oldVal); const newVal = this.options.attributeValueProcessor(attrName, oldVal, jPath); if(newVal === null || newVal === undefined){ //don't parse attrs[aName] = oldVal; }else if(typeof newVal !== typeof oldVal || newVal !== oldVal){ //overwrite attrs[aName] = newVal; }else{ //parse attrs[aName] = parseValue( oldVal, this.options.parseAttributeValue, this.options.numberParseOptions ); } } else if (this.options.allowBooleanAttributes) { attrs[aName] = true; } } } if (!Object.keys(attrs).length) { return; } if (this.options.attributesGroupName) { const attrCollection = {}; attrCollection[this.options.attributesGroupName] = attrs; return attrCollection; } return attrs } } const parseXml = function(xmlData) { xmlData = xmlData.replace(/\r\n?/g, "\n"); //TODO: remove this line const xmlObj = new xmlNode('!xml'); let currentNode = xmlObj; let textData = ""; let jPath = ""; for(let i=0; i< xmlData.length; i++){//for each char in XML data const ch = xmlData[i]; if(ch === '<'){ // const nextIndex = i+1; // const _2ndChar = xmlData[nextIndex]; if( xmlData[i+1] === '/') {//Closing Tag const closeIndex = findClosingIndex(xmlData, ">", i, "Closing Tag is not closed.") let tagName = xmlData.substring(i+2,closeIndex).trim(); if(this.options.removeNSPrefix){ const colonIndex = tagName.indexOf(":"); if(colonIndex !== -1){ tagName = tagName.substr(colonIndex+1); } } if(this.options.transformTagName) { tagName = this.options.transformTagName(tagName); } if(currentNode){ textData = this.saveTextToParentTag(textData, currentNode, jPath); } //check if last tag of nested tag was unpaired tag const lastTagName = jPath.substring(jPath.lastIndexOf(".")+1); if(tagName && this.options.unpairedTags.indexOf(tagName) !== -1 ){ throw new Error(`Unpaired tag can not be used as closing tag: `); } let propIndex = 0 if(lastTagName && this.options.unpairedTags.indexOf(lastTagName) !== -1 ){ propIndex = jPath.lastIndexOf('.', jPath.lastIndexOf('.')-1) this.tagsNodeStack.pop(); }else{ propIndex = jPath.lastIndexOf("."); } jPath = jPath.substring(0, propIndex); currentNode = this.tagsNodeStack.pop();//avoid recursion, set the parent tag scope textData = ""; i = closeIndex; } else if( xmlData[i+1] === '?') { let tagData = readTagExp(xmlData,i, false, "?>"); if(!tagData) throw new Error("Pi Tag is not closed."); textData = this.saveTextToParentTag(textData, currentNode, jPath); if( (this.options.ignoreDeclaration && tagData.tagName === "?xml") || this.options.ignorePiTags){ }else{ const childNode = new xmlNode(tagData.tagName); childNode.add(this.options.textNodeName, ""); if(tagData.tagName !== tagData.tagExp && tagData.attrExpPresent){ childNode[":@"] = this.buildAttributesMap(tagData.tagExp, jPath, tagData.tagName); } this.addChild(currentNode, childNode, jPath) } i = tagData.closeIndex + 1; } else if(xmlData.substr(i + 1, 3) === '!--') { const endIndex = findClosingIndex(xmlData, "-->", i+4, "Comment is not closed.") if(this.options.commentPropName){ const comment = xmlData.substring(i + 4, endIndex - 2); textData = this.saveTextToParentTag(textData, currentNode, jPath); currentNode.add(this.options.commentPropName, [ { [this.options.textNodeName] : comment } ]); } i = endIndex; } else if( xmlData.substr(i + 1, 2) === '!D') { const result = readDocType(xmlData, i); this.docTypeEntities = result.entities; i = result.i; }else if(xmlData.substr(i + 1, 2) === '![') { const closeIndex = findClosingIndex(xmlData, "]]>", i, "CDATA is not closed.") - 2; const tagExp = xmlData.substring(i + 9,closeIndex); textData = this.saveTextToParentTag(textData, currentNode, jPath); let val = this.parseTextData(tagExp, currentNode.tagname, jPath, true, false, true, true); if(val == undefined) val = ""; //cdata should be set even if it is 0 length string if(this.options.cdataPropName){ currentNode.add(this.options.cdataPropName, [ { [this.options.textNodeName] : tagExp } ]); }else{ currentNode.add(this.options.textNodeName, val); } i = closeIndex + 2; }else {//Opening tag let result = readTagExp(xmlData,i, this.options.removeNSPrefix); let tagName= result.tagName; const rawTagName = result.rawTagName; let tagExp = result.tagExp; let attrExpPresent = result.attrExpPresent; let closeIndex = result.closeIndex; if (this.options.transformTagName) { tagName = this.options.transformTagName(tagName); } //save text as child node if (currentNode && textData) { if(currentNode.tagname !== '!xml'){ //when nested tag is found textData = this.saveTextToParentTag(textData, currentNode, jPath, false); } } //check if last tag was unpaired tag const lastTag = currentNode; if(lastTag && this.options.unpairedTags.indexOf(lastTag.tagname) !== -1 ){ currentNode = this.tagsNodeStack.pop(); jPath = jPath.substring(0, jPath.lastIndexOf(".")); } if(tagName !== xmlObj.tagname){ jPath += jPath ? "." + tagName : tagName; } if (this.isItStopNode(this.options.stopNodes, jPath, tagName)) { let tagContent = ""; //self-closing tag if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' tagName = tagName.substr(0, tagName.length - 1); jPath = jPath.substr(0, jPath.length - 1); tagExp = tagName; }else{ tagExp = tagExp.substr(0, tagExp.length - 1); } i = result.closeIndex; } //unpaired tag else if(this.options.unpairedTags.indexOf(tagName) !== -1){ i = result.closeIndex; } //normal tag else{ //read until closing tag is found const result = this.readStopNodeData(xmlData, rawTagName, closeIndex + 1); if(!result) throw new Error(`Unexpected end of ${rawTagName}`); i = result.i; tagContent = result.tagContent; } const childNode = new xmlNode(tagName); if(tagName !== tagExp && attrExpPresent){ childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); } if(tagContent) { tagContent = this.parseTextData(tagContent, tagName, jPath, true, attrExpPresent, true, true); } jPath = jPath.substr(0, jPath.lastIndexOf(".")); childNode.add(this.options.textNodeName, tagContent); this.addChild(currentNode, childNode, jPath) }else{ //selfClosing tag if(tagExp.length > 0 && tagExp.lastIndexOf("/") === tagExp.length - 1){ if(tagName[tagName.length - 1] === "/"){ //remove trailing '/' tagName = tagName.substr(0, tagName.length - 1); jPath = jPath.substr(0, jPath.length - 1); tagExp = tagName; }else{ tagExp = tagExp.substr(0, tagExp.length - 1); } if(this.options.transformTagName) { tagName = this.options.transformTagName(tagName); } const childNode = new xmlNode(tagName); if(tagName !== tagExp && attrExpPresent){ childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); } this.addChild(currentNode, childNode, jPath) jPath = jPath.substr(0, jPath.lastIndexOf(".")); } //opening tag else{ const childNode = new xmlNode( tagName); this.tagsNodeStack.push(currentNode); if(tagName !== tagExp && attrExpPresent){ childNode[":@"] = this.buildAttributesMap(tagExp, jPath, tagName); } this.addChild(currentNode, childNode, jPath) currentNode = childNode; } textData = ""; i = closeIndex; } } }else{ textData += xmlData[i]; } } return xmlObj.child; } function addChild(currentNode, childNode, jPath){ const result = this.options.updateTag(childNode.tagname, jPath, childNode[":@"]) if(result === false){ }else if(typeof result === "string"){ childNode.tagname = result currentNode.addChild(childNode); }else{ currentNode.addChild(childNode); } } const replaceEntitiesValue = function(val){ if(this.options.processEntities){ for(let entityName in this.docTypeEntities){ const entity = this.docTypeEntities[entityName]; val = val.replace( entity.regx, entity.val); } for(let entityName in this.lastEntities){ const entity = this.lastEntities[entityName]; val = val.replace( entity.regex, entity.val); } if(this.options.htmlEntities){ for(let entityName in this.htmlEntities){ const entity = this.htmlEntities[entityName]; val = val.replace( entity.regex, entity.val); } } val = val.replace( this.ampEntity.regex, this.ampEntity.val); } return val; } function saveTextToParentTag(textData, currentNode, jPath, isLeafNode) { if (textData) { //store previously collected data as textNode if(isLeafNode === undefined) isLeafNode = Object.keys(currentNode.child).length === 0 textData = this.parseTextData(textData, currentNode.tagname, jPath, false, currentNode[":@"] ? Object.keys(currentNode[":@"]).length !== 0 : false, isLeafNode); if (textData !== undefined && textData !== "") currentNode.add(this.options.textNodeName, textData); textData = ""; } return textData; } //TODO: use jPath to simplify the logic /** * * @param {string[]} stopNodes * @param {string} jPath * @param {string} currentTagName */ function isItStopNode(stopNodes, jPath, currentTagName){ const allNodesExp = "*." + currentTagName; for (const stopNodePath in stopNodes) { const stopNodeExp = stopNodes[stopNodePath]; if( allNodesExp === stopNodeExp || jPath === stopNodeExp ) return true; } return false; } /** * Returns the tag Expression and where it is ending handling single-double quotes situation * @param {string} xmlData * @param {number} i starting index * @returns */ function tagExpWithClosingIndex(xmlData, i, closingChar = ">"){ let attrBoundary; let tagExp = ""; for (let index = i; index < xmlData.length; index++) { let ch = xmlData[index]; if (attrBoundary) { if (ch === attrBoundary) attrBoundary = "";//reset } else if (ch === '"' || ch === "'") { attrBoundary = ch; } else if (ch === closingChar[0]) { if(closingChar[1]){ if(xmlData[index + 1] === closingChar[1]){ return { data: tagExp, index: index } } }else{ return { data: tagExp, index: index } } } else if (ch === '\t') { ch = " " } tagExp += ch; } } function findClosingIndex(xmlData, str, i, errMsg){ const closingIndex = xmlData.indexOf(str, i); if(closingIndex === -1){ throw new Error(errMsg) }else{ return closingIndex + str.length - 1; } } function readTagExp(xmlData,i, removeNSPrefix, closingChar = ">"){ const result = tagExpWithClosingIndex(xmlData, i+1, closingChar); if(!result) return; let tagExp = result.data; const closeIndex = result.index; const separatorIndex = tagExp.search(/\s/); let tagName = tagExp; let attrExpPresent = true; if(separatorIndex !== -1){//separate tag name and attributes expression tagName = tagExp.substring(0, separatorIndex); tagExp = tagExp.substring(separatorIndex + 1).trimStart(); } const rawTagName = tagName; if(removeNSPrefix){ const colonIndex = tagName.indexOf(":"); if(colonIndex !== -1){ tagName = tagName.substr(colonIndex+1); attrExpPresent = tagName !== result.data.substr(colonIndex + 1); } } return { tagName: tagName, tagExp: tagExp, closeIndex: closeIndex, attrExpPresent: attrExpPresent, rawTagName: rawTagName, } } /** * find paired tag for a stop node * @param {string} xmlData * @param {string} tagName * @param {number} i */ function readStopNodeData(xmlData, tagName, i){ const startIndex = i; // Starting at 1 since we already have an open tag let openTagCount = 1; for (; i < xmlData.length; i++) { if( xmlData[i] === "<"){ if (xmlData[i+1] === "/") {//close tag const closeIndex = findClosingIndex(xmlData, ">", i, `${tagName} is not closed`); let closeTagName = xmlData.substring(i+2,closeIndex).trim(); if(closeTagName === tagName){ openTagCount--; if (openTagCount === 0) { return { tagContent: xmlData.substring(startIndex, i), i : closeIndex } } } i=closeIndex; } else if(xmlData[i+1] === '?') { const closeIndex = findClosingIndex(xmlData, "?>", i+1, "StopNode is not closed.") i=closeIndex; } else if(xmlData.substr(i + 1, 3) === '!--') { const closeIndex = findClosingIndex(xmlData, "-->", i+3, "StopNode is not closed.") i=closeIndex; } else if(xmlData.substr(i + 1, 2) === '![') { const closeIndex = findClosingIndex(xmlData, "]]>", i, "StopNode is not closed.") - 2; i=closeIndex; } else { const tagData = readTagExp(xmlData, i, '>') if (tagData) { const openTagName = tagData && tagData.tagName; if (openTagName === tagName && tagData.tagExp[tagData.tagExp.length-1] !== "/") { openTagCount++; } i=tagData.closeIndex; } } } }//end for loop } function parseValue(val, shouldParse, options) { if (shouldParse && typeof val === 'string') { //console.log(options) const newval = val.trim(); if(newval === 'true' ) return true; else if(newval === 'false' ) return false; else return toNumber(val, options); } else { if (util.isExist(val)) { return val; } else { return ''; } } } module.exports = OrderedObjParser; /***/ }), /***/ 79844: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const { buildOptions} = __nccwpck_require__(84769); const OrderedObjParser = __nccwpck_require__(13017); const { prettify} = __nccwpck_require__(37594); const validator = __nccwpck_require__(39433); class XMLParser{ constructor(options){ this.externalEntities = {}; this.options = buildOptions(options); } /** * Parse XML dats to JS object * @param {string|Buffer} xmlData * @param {boolean|Object} validationOption */ parse(xmlData,validationOption){ if(typeof xmlData === "string"){ }else if( xmlData.toString){ xmlData = xmlData.toString(); }else{ throw new Error("XML data is accepted in String or Bytes[] form.") } if( validationOption){ if(validationOption === true) validationOption = {}; //validate with default options const result = validator.validate(xmlData, validationOption); if (result !== true) { throw Error( `${result.err.msg}:${result.err.line}:${result.err.col}` ) } } const orderedObjParser = new OrderedObjParser(this.options); orderedObjParser.addExternalEntities(this.externalEntities); const orderedResult = orderedObjParser.parseXml(xmlData); if(this.options.preserveOrder || orderedResult === undefined) return orderedResult; else return prettify(orderedResult, this.options); } /** * Add Entity which is not by default supported by this library * @param {string} key * @param {string} value */ addEntity(key, value){ if(value.indexOf("&") !== -1){ throw new Error("Entity value can't have '&'") }else if(key.indexOf("&") !== -1 || key.indexOf(";") !== -1){ throw new Error("An entity must be set without '&' and ';'. Eg. use '#xD' for ' '") }else if(value === "&"){ throw new Error("An entity with value '&' is not permitted"); }else{ this.externalEntities[key] = value; } } } module.exports = XMLParser; /***/ }), /***/ 37594: /***/ ((__unused_webpack_module, exports) => { "use strict"; /** * * @param {array} node * @param {any} options * @returns */ function prettify(node, options){ return compress( node, options); } /** * * @param {array} arr * @param {object} options * @param {string} jPath * @returns object */ function compress(arr, options, jPath){ let text; const compressedObj = {}; for (let i = 0; i < arr.length; i++) { const tagObj = arr[i]; const property = propName(tagObj); let newJpath = ""; if(jPath === undefined) newJpath = property; else newJpath = jPath + "." + property; if(property === options.textNodeName){ if(text === undefined) text = tagObj[property]; else text += "" + tagObj[property]; }else if(property === undefined){ continue; }else if(tagObj[property]){ let val = compress(tagObj[property], options, newJpath); const isLeaf = isLeafTag(val, options); if(tagObj[":@"]){ assignAttributes( val, tagObj[":@"], newJpath, options); }else if(Object.keys(val).length === 1 && val[options.textNodeName] !== undefined && !options.alwaysCreateTextNode){ val = val[options.textNodeName]; }else if(Object.keys(val).length === 0){ if(options.alwaysCreateTextNode) val[options.textNodeName] = ""; else val = ""; } if(compressedObj[property] !== undefined && compressedObj.hasOwnProperty(property)) { if(!Array.isArray(compressedObj[property])) { compressedObj[property] = [ compressedObj[property] ]; } compressedObj[property].push(val); }else{ //TODO: if a node is not an array, then check if it should be an array //also determine if it is a leaf node if (options.isArray(property, newJpath, isLeaf )) { compressedObj[property] = [val]; }else{ compressedObj[property] = val; } } } } // if(text && text.length > 0) compressedObj[options.textNodeName] = text; if(typeof text === "string"){ if(text.length > 0) compressedObj[options.textNodeName] = text; }else if(text !== undefined) compressedObj[options.textNodeName] = text; return compressedObj; } function propName(obj){ const keys = Object.keys(obj); for (let i = 0; i < keys.length; i++) { const key = keys[i]; if(key !== ":@") return key; } } function assignAttributes(obj, attrMap, jpath, options){ if (attrMap) { const keys = Object.keys(attrMap); const len = keys.length; //don't make it inline for (let i = 0; i < len; i++) { const atrrName = keys[i]; if (options.isArray(atrrName, jpath + "." + atrrName, true, true)) { obj[atrrName] = [ attrMap[atrrName] ]; } else { obj[atrrName] = attrMap[atrrName]; } } } } function isLeafTag(obj, options){ const { textNodeName } = options; const propCount = Object.keys(obj).length; if (propCount === 0) { return true; } if ( propCount === 1 && (obj[textNodeName] || typeof obj[textNodeName] === "boolean" || obj[textNodeName] === 0) ) { return true; } return false; } exports.prettify = prettify; /***/ }), /***/ 49307: /***/ ((module) => { "use strict"; class XmlNode{ constructor(tagname) { this.tagname = tagname; this.child = []; //nested tags, text, cdata, comments in order this[":@"] = {}; //attributes map } add(key,val){ // this.child.push( {name : key, val: val, isCdata: isCdata }); if(key === "__proto__") key = "#__proto__"; this.child.push( {[key]: val }); } addChild(node) { if(node.tagname === "__proto__") node.tagname = "#__proto__"; if(node[":@"] && Object.keys(node[":@"]).length > 0){ this.child.push( { [node.tagname]: node.child, [":@"]: node[":@"] }); }else{ this.child.push( { [node.tagname]: node.child }); } }; }; module.exports = XmlNode; /***/ }), /***/ 34778: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var debug; module.exports = function () { if (!debug) { try { /* eslint global-require: off */ debug = __nccwpck_require__(2830)("follow-redirects"); } catch (error) { /* */ } if (typeof debug !== "function") { debug = function () { /* */ }; } } debug.apply(null, arguments); }; /***/ }), /***/ 1573: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var url = __nccwpck_require__(87016); var URL = url.URL; var http = __nccwpck_require__(58611); var https = __nccwpck_require__(65692); var Writable = (__nccwpck_require__(2203).Writable); var assert = __nccwpck_require__(42613); var debug = __nccwpck_require__(34778); // Preventive platform detection // istanbul ignore next (function detectUnsupportedEnvironment() { var looksLikeNode = typeof process !== "undefined"; var looksLikeBrowser = typeof window !== "undefined" && typeof document !== "undefined"; var looksLikeV8 = isFunction(Error.captureStackTrace); if (!looksLikeNode && (looksLikeBrowser || !looksLikeV8)) { console.warn("The follow-redirects package should be excluded from browser builds."); } }()); // Whether to use the native URL object or the legacy url module var useNativeURL = false; try { assert(new URL("")); } catch (error) { useNativeURL = error.code === "ERR_INVALID_URL"; } // URL fields to preserve in copy operations var preservedUrlFields = [ "auth", "host", "hostname", "href", "path", "pathname", "port", "protocol", "query", "search", "hash", ]; // Create handlers that pass events from native requests var events = ["abort", "aborted", "connect", "error", "socket", "timeout"]; var eventHandlers = Object.create(null); events.forEach(function (event) { eventHandlers[event] = function (arg1, arg2, arg3) { this._redirectable.emit(event, arg1, arg2, arg3); }; }); // Error types with codes var InvalidUrlError = createErrorType( "ERR_INVALID_URL", "Invalid URL", TypeError ); var RedirectionError = createErrorType( "ERR_FR_REDIRECTION_FAILURE", "Redirected request failed" ); var TooManyRedirectsError = createErrorType( "ERR_FR_TOO_MANY_REDIRECTS", "Maximum number of redirects exceeded", RedirectionError ); var MaxBodyLengthExceededError = createErrorType( "ERR_FR_MAX_BODY_LENGTH_EXCEEDED", "Request body larger than maxBodyLength limit" ); var WriteAfterEndError = createErrorType( "ERR_STREAM_WRITE_AFTER_END", "write after end" ); // istanbul ignore next var destroy = Writable.prototype.destroy || noop; // An HTTP(S) request that can be redirected function RedirectableRequest(options, responseCallback) { // Initialize the request Writable.call(this); this._sanitizeOptions(options); this._options = options; this._ended = false; this._ending = false; this._redirectCount = 0; this._redirects = []; this._requestBodyLength = 0; this._requestBodyBuffers = []; // Attach a callback if passed if (responseCallback) { this.on("response", responseCallback); } // React to responses of native requests var self = this; this._onNativeResponse = function (response) { try { self._processResponse(response); } catch (cause) { self.emit("error", cause instanceof RedirectionError ? cause : new RedirectionError({ cause: cause })); } }; // Perform the first request this._performRequest(); } RedirectableRequest.prototype = Object.create(Writable.prototype); RedirectableRequest.prototype.abort = function () { destroyRequest(this._currentRequest); this._currentRequest.abort(); this.emit("abort"); }; RedirectableRequest.prototype.destroy = function (error) { destroyRequest(this._currentRequest, error); destroy.call(this, error); return this; }; // Writes buffered data to the current native request RedirectableRequest.prototype.write = function (data, encoding, callback) { // Writing is not allowed if end has been called if (this._ending) { throw new WriteAfterEndError(); } // Validate input and shift parameters if necessary if (!isString(data) && !isBuffer(data)) { throw new TypeError("data should be a string, Buffer or Uint8Array"); } if (isFunction(encoding)) { callback = encoding; encoding = null; } // Ignore empty buffers, since writing them doesn't invoke the callback // https://github.com/nodejs/node/issues/22066 if (data.length === 0) { if (callback) { callback(); } return; } // Only write when we don't exceed the maximum body length if (this._requestBodyLength + data.length <= this._options.maxBodyLength) { this._requestBodyLength += data.length; this._requestBodyBuffers.push({ data: data, encoding: encoding }); this._currentRequest.write(data, encoding, callback); } // Error when we exceed the maximum body length else { this.emit("error", new MaxBodyLengthExceededError()); this.abort(); } }; // Ends the current native request RedirectableRequest.prototype.end = function (data, encoding, callback) { // Shift parameters if necessary if (isFunction(data)) { callback = data; data = encoding = null; } else if (isFunction(encoding)) { callback = encoding; encoding = null; } // Write data if needed and end if (!data) { this._ended = this._ending = true; this._currentRequest.end(null, null, callback); } else { var self = this; var currentRequest = this._currentRequest; this.write(data, encoding, function () { self._ended = true; currentRequest.end(null, null, callback); }); this._ending = true; } }; // Sets a header value on the current native request RedirectableRequest.prototype.setHeader = function (name, value) { this._options.headers[name] = value; this._currentRequest.setHeader(name, value); }; // Clears a header value on the current native request RedirectableRequest.prototype.removeHeader = function (name) { delete this._options.headers[name]; this._currentRequest.removeHeader(name); }; // Global timeout for all underlying requests RedirectableRequest.prototype.setTimeout = function (msecs, callback) { var self = this; // Destroys the socket on timeout function destroyOnTimeout(socket) { socket.setTimeout(msecs); socket.removeListener("timeout", socket.destroy); socket.addListener("timeout", socket.destroy); } // Sets up a timer to trigger a timeout event function startTimer(socket) { if (self._timeout) { clearTimeout(self._timeout); } self._timeout = setTimeout(function () { self.emit("timeout"); clearTimer(); }, msecs); destroyOnTimeout(socket); } // Stops a timeout from triggering function clearTimer() { // Clear the timeout if (self._timeout) { clearTimeout(self._timeout); self._timeout = null; } // Clean up all attached listeners self.removeListener("abort", clearTimer); self.removeListener("error", clearTimer); self.removeListener("response", clearTimer); self.removeListener("close", clearTimer); if (callback) { self.removeListener("timeout", callback); } if (!self.socket) { self._currentRequest.removeListener("socket", startTimer); } } // Attach callback if passed if (callback) { this.on("timeout", callback); } // Start the timer if or when the socket is opened if (this.socket) { startTimer(this.socket); } else { this._currentRequest.once("socket", startTimer); } // Clean up on events this.on("socket", destroyOnTimeout); this.on("abort", clearTimer); this.on("error", clearTimer); this.on("response", clearTimer); this.on("close", clearTimer); return this; }; // Proxy all other public ClientRequest methods [ "flushHeaders", "getHeader", "setNoDelay", "setSocketKeepAlive", ].forEach(function (method) { RedirectableRequest.prototype[method] = function (a, b) { return this._currentRequest[method](a, b); }; }); // Proxy all public ClientRequest properties ["aborted", "connection", "socket"].forEach(function (property) { Object.defineProperty(RedirectableRequest.prototype, property, { get: function () { return this._currentRequest[property]; }, }); }); RedirectableRequest.prototype._sanitizeOptions = function (options) { // Ensure headers are always present if (!options.headers) { options.headers = {}; } // Since http.request treats host as an alias of hostname, // but the url module interprets host as hostname plus port, // eliminate the host property to avoid confusion. if (options.host) { // Use hostname if set, because it has precedence if (!options.hostname) { options.hostname = options.host; } delete options.host; } // Complete the URL object when necessary if (!options.pathname && options.path) { var searchPos = options.path.indexOf("?"); if (searchPos < 0) { options.pathname = options.path; } else { options.pathname = options.path.substring(0, searchPos); options.search = options.path.substring(searchPos); } } }; // Executes the next native request (initial or redirect) RedirectableRequest.prototype._performRequest = function () { // Load the native protocol var protocol = this._options.protocol; var nativeProtocol = this._options.nativeProtocols[protocol]; if (!nativeProtocol) { throw new TypeError("Unsupported protocol " + protocol); } // If specified, use the agent corresponding to the protocol // (HTTP and HTTPS use different types of agents) if (this._options.agents) { var scheme = protocol.slice(0, -1); this._options.agent = this._options.agents[scheme]; } // Create the native request and set up its event handlers var request = this._currentRequest = nativeProtocol.request(this._options, this._onNativeResponse); request._redirectable = this; for (var event of events) { request.on(event, eventHandlers[event]); } // RFC7230§5.3.1: When making a request directly to an origin server, […] // a client MUST send only the absolute path […] as the request-target. this._currentUrl = /^\//.test(this._options.path) ? url.format(this._options) : // When making a request to a proxy, […] // a client MUST send the target URI in absolute-form […]. this._options.path; // End a redirected request // (The first request must be ended explicitly with RedirectableRequest#end) if (this._isRedirect) { // Write the request entity and end var i = 0; var self = this; var buffers = this._requestBodyBuffers; (function writeNext(error) { // Only write if this request has not been redirected yet // istanbul ignore else if (request === self._currentRequest) { // Report any write errors // istanbul ignore if if (error) { self.emit("error", error); } // Write the next buffer if there are still left else if (i < buffers.length) { var buffer = buffers[i++]; // istanbul ignore else if (!request.finished) { request.write(buffer.data, buffer.encoding, writeNext); } } // End the request if `end` has been called on us else if (self._ended) { request.end(); } } }()); } }; // Processes a response from the current native request RedirectableRequest.prototype._processResponse = function (response) { // Store the redirected response var statusCode = response.statusCode; if (this._options.trackRedirects) { this._redirects.push({ url: this._currentUrl, headers: response.headers, statusCode: statusCode, }); } // RFC7231§6.4: The 3xx (Redirection) class of status code indicates // that further action needs to be taken by the user agent in order to // fulfill the request. If a Location header field is provided, // the user agent MAY automatically redirect its request to the URI // referenced by the Location field value, // even if the specific status code is not understood. // If the response is not a redirect; return it as-is var location = response.headers.location; if (!location || this._options.followRedirects === false || statusCode < 300 || statusCode >= 400) { response.responseUrl = this._currentUrl; response.redirects = this._redirects; this.emit("response", response); // Clean up this._requestBodyBuffers = []; return; } // The response is a redirect, so abort the current request destroyRequest(this._currentRequest); // Discard the remainder of the response to avoid waiting for data response.destroy(); // RFC7231§6.4: A client SHOULD detect and intervene // in cyclical redirections (i.e., "infinite" redirection loops). if (++this._redirectCount > this._options.maxRedirects) { throw new TooManyRedirectsError(); } // Store the request headers if applicable var requestHeaders; var beforeRedirect = this._options.beforeRedirect; if (beforeRedirect) { requestHeaders = Object.assign({ // The Host header was set by nativeProtocol.request Host: response.req.getHeader("host"), }, this._options.headers); } // RFC7231§6.4: Automatic redirection needs to done with // care for methods not known to be safe, […] // RFC7231§6.4.2–3: For historical reasons, a user agent MAY change // the request method from POST to GET for the subsequent request. var method = this._options.method; if ((statusCode === 301 || statusCode === 302) && this._options.method === "POST" || // RFC7231§6.4.4: The 303 (See Other) status code indicates that // the server is redirecting the user agent to a different resource […] // A user agent can perform a retrieval request targeting that URI // (a GET or HEAD request if using HTTP) […] (statusCode === 303) && !/^(?:GET|HEAD)$/.test(this._options.method)) { this._options.method = "GET"; // Drop a possible entity and headers related to it this._requestBodyBuffers = []; removeMatchingHeaders(/^content-/i, this._options.headers); } // Drop the Host header, as the redirect might lead to a different host var currentHostHeader = removeMatchingHeaders(/^host$/i, this._options.headers); // If the redirect is relative, carry over the host of the last request var currentUrlParts = parseUrl(this._currentUrl); var currentHost = currentHostHeader || currentUrlParts.host; var currentUrl = /^\w+:/.test(location) ? this._currentUrl : url.format(Object.assign(currentUrlParts, { host: currentHost })); // Create the redirected request var redirectUrl = resolveUrl(location, currentUrl); debug("redirecting to", redirectUrl.href); this._isRedirect = true; spreadUrlObject(redirectUrl, this._options); // Drop confidential headers when redirecting to a less secure protocol // or to a different domain that is not a superdomain if (redirectUrl.protocol !== currentUrlParts.protocol && redirectUrl.protocol !== "https:" || redirectUrl.host !== currentHost && !isSubdomain(redirectUrl.host, currentHost)) { removeMatchingHeaders(/^(?:(?:proxy-)?authorization|cookie)$/i, this._options.headers); } // Evaluate the beforeRedirect callback if (isFunction(beforeRedirect)) { var responseDetails = { headers: response.headers, statusCode: statusCode, }; var requestDetails = { url: currentUrl, method: method, headers: requestHeaders, }; beforeRedirect(this._options, responseDetails, requestDetails); this._sanitizeOptions(this._options); } // Perform the redirected request this._performRequest(); }; // Wraps the key/value object of protocols with redirect functionality function wrap(protocols) { // Default settings var exports = { maxRedirects: 21, maxBodyLength: 10 * 1024 * 1024, }; // Wrap each protocol var nativeProtocols = {}; Object.keys(protocols).forEach(function (scheme) { var protocol = scheme + ":"; var nativeProtocol = nativeProtocols[protocol] = protocols[scheme]; var wrappedProtocol = exports[scheme] = Object.create(nativeProtocol); // Executes a request, following redirects function request(input, options, callback) { // Parse parameters, ensuring that input is an object if (isURL(input)) { input = spreadUrlObject(input); } else if (isString(input)) { input = spreadUrlObject(parseUrl(input)); } else { callback = options; options = validateUrl(input); input = { protocol: protocol }; } if (isFunction(options)) { callback = options; options = null; } // Set defaults options = Object.assign({ maxRedirects: exports.maxRedirects, maxBodyLength: exports.maxBodyLength, }, input, options); options.nativeProtocols = nativeProtocols; if (!isString(options.host) && !isString(options.hostname)) { options.hostname = "::1"; } assert.equal(options.protocol, protocol, "protocol mismatch"); debug("options", options); return new RedirectableRequest(options, callback); } // Executes a GET request, following redirects function get(input, options, callback) { var wrappedRequest = wrappedProtocol.request(input, options, callback); wrappedRequest.end(); return wrappedRequest; } // Expose the properties on the wrapped protocol Object.defineProperties(wrappedProtocol, { request: { value: request, configurable: true, enumerable: true, writable: true }, get: { value: get, configurable: true, enumerable: true, writable: true }, }); }); return exports; } function noop() { /* empty */ } function parseUrl(input) { var parsed; // istanbul ignore else if (useNativeURL) { parsed = new URL(input); } else { // Ensure the URL is valid and absolute parsed = validateUrl(url.parse(input)); if (!isString(parsed.protocol)) { throw new InvalidUrlError({ input }); } } return parsed; } function resolveUrl(relative, base) { // istanbul ignore next return useNativeURL ? new URL(relative, base) : parseUrl(url.resolve(base, relative)); } function validateUrl(input) { if (/^\[/.test(input.hostname) && !/^\[[:0-9a-f]+\]$/i.test(input.hostname)) { throw new InvalidUrlError({ input: input.href || input }); } if (/^\[/.test(input.host) && !/^\[[:0-9a-f]+\](:\d+)?$/i.test(input.host)) { throw new InvalidUrlError({ input: input.href || input }); } return input; } function spreadUrlObject(urlObject, target) { var spread = target || {}; for (var key of preservedUrlFields) { spread[key] = urlObject[key]; } // Fix IPv6 hostname if (spread.hostname.startsWith("[")) { spread.hostname = spread.hostname.slice(1, -1); } // Ensure port is a number if (spread.port !== "") { spread.port = Number(spread.port); } // Concatenate path spread.path = spread.search ? spread.pathname + spread.search : spread.pathname; return spread; } function removeMatchingHeaders(regex, headers) { var lastValue; for (var header in headers) { if (regex.test(header)) { lastValue = headers[header]; delete headers[header]; } } return (lastValue === null || typeof lastValue === "undefined") ? undefined : String(lastValue).trim(); } function createErrorType(code, message, baseClass) { // Create constructor function CustomError(properties) { // istanbul ignore else if (isFunction(Error.captureStackTrace)) { Error.captureStackTrace(this, this.constructor); } Object.assign(this, properties || {}); this.code = code; this.message = this.cause ? message + ": " + this.cause.message : message; } // Attach constructor and set default properties CustomError.prototype = new (baseClass || Error)(); Object.defineProperties(CustomError.prototype, { constructor: { value: CustomError, enumerable: false, }, name: { value: "Error [" + code + "]", enumerable: false, }, }); return CustomError; } function destroyRequest(request, error) { for (var event of events) { request.removeListener(event, eventHandlers[event]); } request.on("error", noop); request.destroy(error); } function isSubdomain(subdomain, domain) { assert(isString(subdomain) && isString(domain)); var dot = subdomain.length - domain.length - 1; return dot > 0 && subdomain[dot] === "." && subdomain.endsWith(domain); } function isString(value) { return typeof value === "string" || value instanceof String; } function isFunction(value) { return typeof value === "function"; } function isBuffer(value) { return typeof value === "object" && ("length" in value); } function isURL(value) { return URL && value instanceof URL; } // Exports module.exports = wrap({ http: http, https: https }); module.exports.wrap = wrap; /***/ }), /***/ 99808: /***/ ((module) => { "use strict"; /* eslint no-invalid-this: 1 */ var ERROR_MESSAGE = 'Function.prototype.bind called on incompatible '; var toStr = Object.prototype.toString; var max = Math.max; var funcType = '[object Function]'; var concatty = function concatty(a, b) { var arr = []; for (var i = 0; i < a.length; i += 1) { arr[i] = a[i]; } for (var j = 0; j < b.length; j += 1) { arr[j + a.length] = b[j]; } return arr; }; var slicy = function slicy(arrLike, offset) { var arr = []; for (var i = offset || 0, j = 0; i < arrLike.length; i += 1, j += 1) { arr[j] = arrLike[i]; } return arr; }; var joiny = function (arr, joiner) { var str = ''; for (var i = 0; i < arr.length; i += 1) { str += arr[i]; if (i + 1 < arr.length) { str += joiner; } } return str; }; module.exports = function bind(that) { var target = this; if (typeof target !== 'function' || toStr.apply(target) !== funcType) { throw new TypeError(ERROR_MESSAGE + target); } var args = slicy(arguments, 1); var bound; var binder = function () { if (this instanceof bound) { var result = target.apply( this, concatty(args, arguments) ); if (Object(result) === result) { return result; } return this; } return target.apply( that, concatty(args, arguments) ); }; var boundLength = max(0, target.length - args.length); var boundArgs = []; for (var i = 0; i < boundLength; i++) { boundArgs[i] = '$' + i; } bound = Function('binder', 'return function (' + joiny(boundArgs, ',') + '){ return binder.apply(this,arguments); }')(binder); if (target.prototype) { var Empty = function Empty() {}; Empty.prototype = target.prototype; bound.prototype = new Empty(); Empty.prototype = null; } return bound; }; /***/ }), /***/ 37564: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var implementation = __nccwpck_require__(99808); module.exports = Function.prototype.bind || implementation; /***/ }), /***/ 47506: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2018 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GaxiosError = exports.GAXIOS_ERROR_SYMBOL = void 0; exports.defaultErrorRedactor = defaultErrorRedactor; const url_1 = __nccwpck_require__(87016); const util_1 = __nccwpck_require__(63155); const extend_1 = __importDefault(__nccwpck_require__(23860)); /** * Support `instanceof` operator for `GaxiosError`s in different versions of this library. * * @see {@link GaxiosError[Symbol.hasInstance]} */ exports.GAXIOS_ERROR_SYMBOL = Symbol.for(`${util_1.pkg.name}-gaxios-error`); /* eslint-disable-next-line @typescript-eslint/no-explicit-any */ class GaxiosError extends Error { /** * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. * * @see {@link GAXIOS_ERROR_SYMBOL} * @see {@link GaxiosError[GAXIOS_ERROR_SYMBOL]} */ static [(_a = exports.GAXIOS_ERROR_SYMBOL, Symbol.hasInstance)](instance) { if (instance && typeof instance === 'object' && exports.GAXIOS_ERROR_SYMBOL in instance && instance[exports.GAXIOS_ERROR_SYMBOL] === util_1.pkg.version) { return true; } // fallback to native return Function.prototype[Symbol.hasInstance].call(GaxiosError, instance); } constructor(message, config, response, error) { var _b; super(message); this.config = config; this.response = response; this.error = error; /** * Support `instanceof` operator for `GaxiosError` across builds/duplicated files. * * @see {@link GAXIOS_ERROR_SYMBOL} * @see {@link GaxiosError[Symbol.hasInstance]} * @see {@link https://github.com/microsoft/TypeScript/issues/13965#issuecomment-278570200} * @see {@link https://stackoverflow.com/questions/46618852/require-and-instanceof} * @see {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/@@hasInstance#reverting_to_default_instanceof_behavior} */ this[_a] = util_1.pkg.version; // deep-copy config as we do not want to mutate // the existing config for future retries/use this.config = (0, extend_1.default)(true, {}, config); if (this.response) { this.response.config = (0, extend_1.default)(true, {}, this.response.config); } if (this.response) { try { this.response.data = translateData(this.config.responseType, (_b = this.response) === null || _b === void 0 ? void 0 : _b.data); } catch (_c) { // best effort - don't throw an error within an error // we could set `this.response.config.responseType = 'unknown'`, but // that would mutate future calls with this config object. } this.status = this.response.status; } if (error && 'code' in error && error.code) { this.code = error.code; } if (config.errorRedactor) { config.errorRedactor({ config: this.config, response: this.response, }); } } } exports.GaxiosError = GaxiosError; function translateData(responseType, data) { switch (responseType) { case 'stream': return data; case 'json': return JSON.parse(JSON.stringify(data)); case 'arraybuffer': return JSON.parse(Buffer.from(data).toString('utf8')); case 'blob': return JSON.parse(data.text()); default: return data; } } /** * An experimental error redactor. * * @param config Config to potentially redact properties of * @param response Config to potentially redact properties of * * @experimental */ function defaultErrorRedactor(data) { const REDACT = '< - See `errorRedactor` option in `gaxios` for configuration>.'; function redactHeaders(headers) { if (!headers) return; for (const key of Object.keys(headers)) { // any casing of `Authentication` if (/^authentication$/i.test(key)) { headers[key] = REDACT; } // any casing of `Authorization` if (/^authorization$/i.test(key)) { headers[key] = REDACT; } // anything containing secret, such as 'client secret' if (/secret/i.test(key)) { headers[key] = REDACT; } } } function redactString(obj, key) { if (typeof obj === 'object' && obj !== null && typeof obj[key] === 'string') { const text = obj[key]; if (/grant_type=/i.test(text) || /assertion=/i.test(text) || /secret/i.test(text)) { obj[key] = REDACT; } } } function redactObject(obj) { if (typeof obj === 'object' && obj !== null) { if ('grant_type' in obj) { obj['grant_type'] = REDACT; } if ('assertion' in obj) { obj['assertion'] = REDACT; } if ('client_secret' in obj) { obj['client_secret'] = REDACT; } } } if (data.config) { redactHeaders(data.config.headers); redactString(data.config, 'data'); redactObject(data.config.data); redactString(data.config, 'body'); redactObject(data.config.body); try { const url = new url_1.URL('', data.config.url); if (url.searchParams.has('token')) { url.searchParams.set('token', REDACT); } if (url.searchParams.has('client_secret')) { url.searchParams.set('client_secret', REDACT); } data.config.url = url.toString(); } catch (_b) { // ignore error - no need to parse an invalid URL } } if (data.response) { defaultErrorRedactor({ config: data.response.config }); redactHeaders(data.response.headers); redactString(data.response, 'data'); redactObject(data.response.data); } return data; } //# sourceMappingURL=common.js.map /***/ }), /***/ 6010: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2018 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var _Gaxios_instances, _a, _Gaxios_urlMayUseProxy, _Gaxios_applyRequestInterceptors, _Gaxios_applyResponseInterceptors, _Gaxios_prepareRequest, _Gaxios_proxyAgent, _Gaxios_getProxyAgent; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Gaxios = void 0; const extend_1 = __importDefault(__nccwpck_require__(23860)); const https_1 = __nccwpck_require__(65692); const node_fetch_1 = __importDefault(__nccwpck_require__(26705)); const querystring_1 = __importDefault(__nccwpck_require__(83480)); const is_stream_1 = __importDefault(__nccwpck_require__(96543)); const url_1 = __nccwpck_require__(87016); const common_1 = __nccwpck_require__(47506); const retry_1 = __nccwpck_require__(32789); const stream_1 = __nccwpck_require__(2203); const uuid_1 = __nccwpck_require__(93187); const interceptor_1 = __nccwpck_require__(85608); /* eslint-disable @typescript-eslint/no-explicit-any */ const fetch = hasFetch() ? window.fetch : node_fetch_1.default; function hasWindow() { return typeof window !== 'undefined' && !!window; } function hasFetch() { return hasWindow() && !!window.fetch; } function hasBuffer() { return typeof Buffer !== 'undefined'; } function hasHeader(options, header) { return !!getHeader(options, header); } function getHeader(options, header) { header = header.toLowerCase(); for (const key of Object.keys((options === null || options === void 0 ? void 0 : options.headers) || {})) { if (header === key.toLowerCase()) { return options.headers[key]; } } return undefined; } class Gaxios { /** * The Gaxios class is responsible for making HTTP requests. * @param defaults The default set of options to be used for this instance. */ constructor(defaults) { _Gaxios_instances.add(this); this.agentCache = new Map(); this.defaults = defaults || {}; this.interceptors = { request: new interceptor_1.GaxiosInterceptorManager(), response: new interceptor_1.GaxiosInterceptorManager(), }; } /** * Perform an HTTP request with the given options. * @param opts Set of HTTP options that will be used for this HTTP request. */ async request(opts = {}) { opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_prepareRequest).call(this, opts); opts = await __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyRequestInterceptors).call(this, opts); return __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_applyResponseInterceptors).call(this, this._request(opts)); } async _defaultAdapter(opts) { const fetchImpl = opts.fetchImplementation || fetch; const res = (await fetchImpl(opts.url, opts)); const data = await this.getResponseData(opts, res); return this.translateResponse(opts, res, data); } /** * Internal, retryable version of the `request` method. * @param opts Set of HTTP options that will be used for this HTTP request. */ async _request(opts = {}) { var _b; try { let translatedResponse; if (opts.adapter) { translatedResponse = await opts.adapter(opts, this._defaultAdapter.bind(this)); } else { translatedResponse = await this._defaultAdapter(opts); } if (!opts.validateStatus(translatedResponse.status)) { if (opts.responseType === 'stream') { let response = ''; await new Promise(resolve => { (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('data', chunk => { response += chunk; }); (translatedResponse === null || translatedResponse === void 0 ? void 0 : translatedResponse.data).on('end', resolve); }); translatedResponse.data = response; } throw new common_1.GaxiosError(`Request failed with status code ${translatedResponse.status}`, opts, translatedResponse); } return translatedResponse; } catch (e) { const err = e instanceof common_1.GaxiosError ? e : new common_1.GaxiosError(e.message, opts, undefined, e); const { shouldRetry, config } = await (0, retry_1.getRetryConfig)(err); if (shouldRetry && config) { err.config.retryConfig.currentRetryAttempt = config.retryConfig.currentRetryAttempt; // The error's config could be redacted - therefore we only want to // copy the retry state over to the existing config opts.retryConfig = (_b = err.config) === null || _b === void 0 ? void 0 : _b.retryConfig; return this._request(opts); } throw err; } } async getResponseData(opts, res) { switch (opts.responseType) { case 'stream': return res.body; case 'json': { let data = await res.text(); try { data = JSON.parse(data); } catch (_b) { // continue } return data; } case 'arraybuffer': return res.arrayBuffer(); case 'blob': return res.blob(); case 'text': return res.text(); default: return this.getResponseDataFromContentType(res); } } /** * By default, throw for any non-2xx status code * @param status status code from the HTTP response */ validateStatus(status) { return status >= 200 && status < 300; } /** * Encode a set of key/value pars into a querystring format (?foo=bar&baz=boo) * @param params key value pars to encode */ paramsSerializer(params) { return querystring_1.default.stringify(params); } translateResponse(opts, res, data) { // headers need to be converted from a map to an obj const headers = {}; res.headers.forEach((value, key) => { headers[key] = value; }); return { config: opts, data: data, headers, status: res.status, statusText: res.statusText, // XMLHttpRequestLike request: { responseURL: res.url, }, }; } /** * Attempts to parse a response by looking at the Content-Type header. * @param {FetchResponse} response the HTTP response. * @returns {Promise} a promise that resolves to the response data. */ async getResponseDataFromContentType(response) { let contentType = response.headers.get('Content-Type'); if (contentType === null) { // Maintain existing functionality by calling text() return response.text(); } contentType = contentType.toLowerCase(); if (contentType.includes('application/json')) { let data = await response.text(); try { data = JSON.parse(data); } catch (_b) { // continue } return data; } else if (contentType.match(/^text\//)) { return response.text(); } else { // If the content type is something not easily handled, just return the raw data (blob) return response.blob(); } } /** * Creates an async generator that yields the pieces of a multipart/related request body. * This implementation follows the spec: https://www.ietf.org/rfc/rfc2387.txt. However, recursive * multipart/related requests are not currently supported. * * @param {GaxioMultipartOptions[]} multipartOptions the pieces to turn into a multipart/related body. * @param {string} boundary the boundary string to be placed between each part. */ async *getMultipartRequest(multipartOptions, boundary) { const finale = `--${boundary}--`; for (const currentPart of multipartOptions) { const partContentType = currentPart.headers['Content-Type'] || 'application/octet-stream'; const preamble = `--${boundary}\r\nContent-Type: ${partContentType}\r\n\r\n`; yield preamble; if (typeof currentPart.content === 'string') { yield currentPart.content; } else { yield* currentPart.content; } yield '\r\n'; } yield finale; } } exports.Gaxios = Gaxios; _a = Gaxios, _Gaxios_instances = new WeakSet(), _Gaxios_urlMayUseProxy = function _Gaxios_urlMayUseProxy(url, noProxy = []) { var _b, _c; const candidate = new url_1.URL(url); const noProxyList = [...noProxy]; const noProxyEnvList = ((_c = ((_b = process.env.NO_PROXY) !== null && _b !== void 0 ? _b : process.env.no_proxy)) === null || _c === void 0 ? void 0 : _c.split(',')) || []; for (const rule of noProxyEnvList) { noProxyList.push(rule.trim()); } for (const rule of noProxyList) { // Match regex if (rule instanceof RegExp) { if (rule.test(candidate.toString())) { return false; } } // Match URL else if (rule instanceof url_1.URL) { if (rule.origin === candidate.origin) { return false; } } // Match string regex else if (rule.startsWith('*.') || rule.startsWith('.')) { const cleanedRule = rule.replace(/^\*\./, '.'); if (candidate.hostname.endsWith(cleanedRule)) { return false; } } // Basic string match else if (rule === candidate.origin || rule === candidate.hostname || rule === candidate.href) { return false; } } return true; }, _Gaxios_applyRequestInterceptors = /** * Applies the request interceptors. The request interceptors are applied after the * call to prepareRequest is completed. * * @param {GaxiosOptions} options The current set of options. * * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. */ async function _Gaxios_applyRequestInterceptors(options) { let promiseChain = Promise.resolve(options); for (const interceptor of this.interceptors.request.values()) { if (interceptor) { promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); } } return promiseChain; }, _Gaxios_applyResponseInterceptors = /** * Applies the response interceptors. The response interceptors are applied after the * call to request is made. * * @param {GaxiosOptions} options The current set of options. * * @returns {Promise} Promise that resolves to the set of options or response after interceptors are applied. */ async function _Gaxios_applyResponseInterceptors(response) { let promiseChain = Promise.resolve(response); for (const interceptor of this.interceptors.response.values()) { if (interceptor) { promiseChain = promiseChain.then(interceptor.resolved, interceptor.rejected); } } return promiseChain; }, _Gaxios_prepareRequest = /** * Validates the options, merges them with defaults, and prepare request. * * @param options The original options passed from the client. * @returns Prepared options, ready to make a request */ async function _Gaxios_prepareRequest(options) { var _b, _c, _d, _e; const opts = (0, extend_1.default)(true, {}, this.defaults, options); if (!opts.url) { throw new Error('URL is required.'); } // baseUrl has been deprecated, remove in 2.0 const baseUrl = opts.baseUrl || opts.baseURL; if (baseUrl) { opts.url = baseUrl.toString() + opts.url; } opts.paramsSerializer = opts.paramsSerializer || this.paramsSerializer; if (opts.params && Object.keys(opts.params).length > 0) { let additionalQueryParams = opts.paramsSerializer(opts.params); if (additionalQueryParams.startsWith('?')) { additionalQueryParams = additionalQueryParams.slice(1); } const prefix = opts.url.toString().includes('?') ? '&' : '?'; opts.url = opts.url + prefix + additionalQueryParams; } if (typeof options.maxContentLength === 'number') { opts.size = options.maxContentLength; } if (typeof options.maxRedirects === 'number') { opts.follow = options.maxRedirects; } opts.headers = opts.headers || {}; if (opts.multipart === undefined && opts.data) { const isFormData = typeof FormData === 'undefined' ? false : (opts === null || opts === void 0 ? void 0 : opts.data) instanceof FormData; if (is_stream_1.default.readable(opts.data)) { opts.body = opts.data; } else if (hasBuffer() && Buffer.isBuffer(opts.data)) { // Do not attempt to JSON.stringify() a Buffer: opts.body = opts.data; if (!hasHeader(opts, 'Content-Type')) { opts.headers['Content-Type'] = 'application/json'; } } else if (typeof opts.data === 'object') { // If www-form-urlencoded content type has been set, but data is // provided as an object, serialize the content using querystring: if (!isFormData) { if (getHeader(opts, 'content-type') === 'application/x-www-form-urlencoded') { opts.body = opts.paramsSerializer(opts.data); } else { // } else if (!(opts.data instanceof FormData)) { if (!hasHeader(opts, 'Content-Type')) { opts.headers['Content-Type'] = 'application/json'; } opts.body = JSON.stringify(opts.data); } } } else { opts.body = opts.data; } } else if (opts.multipart && opts.multipart.length > 0) { // note: once the minimum version reaches Node 16, // this can be replaced with randomUUID() function from crypto // and the dependency on UUID removed const boundary = (0, uuid_1.v4)(); opts.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; const bodyStream = new stream_1.PassThrough(); opts.body = bodyStream; (0, stream_1.pipeline)(this.getMultipartRequest(opts.multipart, boundary), bodyStream, () => { }); } opts.validateStatus = opts.validateStatus || this.validateStatus; opts.responseType = opts.responseType || 'unknown'; if (!opts.headers['Accept'] && opts.responseType === 'json') { opts.headers['Accept'] = 'application/json'; } opts.method = opts.method || 'GET'; const proxy = opts.proxy || ((_b = process === null || process === void 0 ? void 0 : process.env) === null || _b === void 0 ? void 0 : _b.HTTPS_PROXY) || ((_c = process === null || process === void 0 ? void 0 : process.env) === null || _c === void 0 ? void 0 : _c.https_proxy) || ((_d = process === null || process === void 0 ? void 0 : process.env) === null || _d === void 0 ? void 0 : _d.HTTP_PROXY) || ((_e = process === null || process === void 0 ? void 0 : process.env) === null || _e === void 0 ? void 0 : _e.http_proxy); const urlMayUseProxy = __classPrivateFieldGet(this, _Gaxios_instances, "m", _Gaxios_urlMayUseProxy).call(this, opts.url, opts.noProxy); if (opts.agent) { // don't do any of the following options - use the user-provided agent. } else if (proxy && urlMayUseProxy) { const HttpsProxyAgent = await __classPrivateFieldGet(_a, _a, "m", _Gaxios_getProxyAgent).call(_a); if (this.agentCache.has(proxy)) { opts.agent = this.agentCache.get(proxy); } else { opts.agent = new HttpsProxyAgent(proxy, { cert: opts.cert, key: opts.key, }); this.agentCache.set(proxy, opts.agent); } } else if (opts.cert && opts.key) { // Configure client for mTLS if (this.agentCache.has(opts.key)) { opts.agent = this.agentCache.get(opts.key); } else { opts.agent = new https_1.Agent({ cert: opts.cert, key: opts.key, }); this.agentCache.set(opts.key, opts.agent); } } if (typeof opts.errorRedactor !== 'function' && opts.errorRedactor !== false) { opts.errorRedactor = common_1.defaultErrorRedactor; } return opts; }, _Gaxios_getProxyAgent = async function _Gaxios_getProxyAgent() { __classPrivateFieldSet(this, _a, __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent) || (await Promise.resolve().then(() => __importStar(__nccwpck_require__(3669)))).HttpsProxyAgent, "f", _Gaxios_proxyAgent); return __classPrivateFieldGet(this, _a, "f", _Gaxios_proxyAgent); }; /** * A cache for the lazily-loaded proxy agent. * * Should use {@link Gaxios[#getProxyAgent]} to retrieve. */ // using `import` to dynamically import the types here _Gaxios_proxyAgent = { value: void 0 }; //# sourceMappingURL=gaxios.js.map /***/ }), /***/ 97003: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2018 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.instance = exports.Gaxios = exports.GaxiosError = void 0; exports.request = request; const gaxios_1 = __nccwpck_require__(6010); Object.defineProperty(exports, "Gaxios", ({ enumerable: true, get: function () { return gaxios_1.Gaxios; } })); var common_1 = __nccwpck_require__(47506); Object.defineProperty(exports, "GaxiosError", ({ enumerable: true, get: function () { return common_1.GaxiosError; } })); __exportStar(__nccwpck_require__(85608), exports); /** * The default instance used when the `request` method is directly * invoked. */ exports.instance = new gaxios_1.Gaxios(); /** * Make an HTTP request using the given options. * @param opts Options for the request */ async function request(opts) { return exports.instance.request(opts); } //# sourceMappingURL=index.js.map /***/ }), /***/ 85608: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2024 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GaxiosInterceptorManager = void 0; /** * Class to manage collections of GaxiosInterceptors for both requests and responses. */ class GaxiosInterceptorManager extends Set { } exports.GaxiosInterceptorManager = GaxiosInterceptorManager; //# sourceMappingURL=interceptor.js.map /***/ }), /***/ 32789: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2018 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getRetryConfig = getRetryConfig; async function getRetryConfig(err) { let config = getConfig(err); if (!err || !err.config || (!config && !err.config.retry)) { return { shouldRetry: false }; } config = config || {}; config.currentRetryAttempt = config.currentRetryAttempt || 0; config.retry = config.retry === undefined || config.retry === null ? 3 : config.retry; config.httpMethodsToRetry = config.httpMethodsToRetry || [ 'GET', 'HEAD', 'PUT', 'OPTIONS', 'DELETE', ]; config.noResponseRetries = config.noResponseRetries === undefined || config.noResponseRetries === null ? 2 : config.noResponseRetries; config.retryDelayMultiplier = config.retryDelayMultiplier ? config.retryDelayMultiplier : 2; config.timeOfFirstRequest = config.timeOfFirstRequest ? config.timeOfFirstRequest : Date.now(); config.totalTimeout = config.totalTimeout ? config.totalTimeout : Number.MAX_SAFE_INTEGER; config.maxRetryDelay = config.maxRetryDelay ? config.maxRetryDelay : Number.MAX_SAFE_INTEGER; // If this wasn't in the list of status codes where we want // to automatically retry, return. const retryRanges = [ // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes // 1xx - Retry (Informational, request still processing) // 2xx - Do not retry (Success) // 3xx - Do not retry (Redirect) // 4xx - Do not retry (Client errors) // 408 - Retry ("Request Timeout") // 429 - Retry ("Too Many Requests") // 5xx - Retry (Server errors) [100, 199], [408, 408], [429, 429], [500, 599], ]; config.statusCodesToRetry = config.statusCodesToRetry || retryRanges; // Put the config back into the err err.config.retryConfig = config; // Determine if we should retry the request const shouldRetryFn = config.shouldRetry || shouldRetryRequest; if (!(await shouldRetryFn(err))) { return { shouldRetry: false, config: err.config }; } const delay = getNextRetryDelay(config); // We're going to retry! Incremenent the counter. err.config.retryConfig.currentRetryAttempt += 1; // Create a promise that invokes the retry after the backOffDelay const backoff = config.retryBackoff ? config.retryBackoff(err, delay) : new Promise(resolve => { setTimeout(resolve, delay); }); // Notify the user if they added an `onRetryAttempt` handler if (config.onRetryAttempt) { config.onRetryAttempt(err); } // Return the promise in which recalls Gaxios to retry the request await backoff; return { shouldRetry: true, config: err.config }; } /** * Determine based on config if we should retry the request. * @param err The GaxiosError passed to the interceptor. */ function shouldRetryRequest(err) { var _a; const config = getConfig(err); // node-fetch raises an AbortError if signaled: // https://github.com/bitinn/node-fetch#request-cancellation-with-abortsignal if (err.name === 'AbortError' || ((_a = err.error) === null || _a === void 0 ? void 0 : _a.name) === 'AbortError') { return false; } // If there's no config, or retries are disabled, return. if (!config || config.retry === 0) { return false; } // Check if this error has no response (ETIMEDOUT, ENOTFOUND, etc) if (!err.response && (config.currentRetryAttempt || 0) >= config.noResponseRetries) { return false; } // Only retry with configured HttpMethods. if (!err.config.method || config.httpMethodsToRetry.indexOf(err.config.method.toUpperCase()) < 0) { return false; } // If this wasn't in the list of status codes where we want // to automatically retry, return. if (err.response && err.response.status) { let isInRange = false; for (const [min, max] of config.statusCodesToRetry) { const status = err.response.status; if (status >= min && status <= max) { isInRange = true; break; } } if (!isInRange) { return false; } } // If we are out of retry attempts, return config.currentRetryAttempt = config.currentRetryAttempt || 0; if (config.currentRetryAttempt >= config.retry) { return false; } return true; } /** * Acquire the raxConfig object from an GaxiosError if available. * @param err The Gaxios error with a config object. */ function getConfig(err) { if (err && err.config && err.config.retryConfig) { return err.config.retryConfig; } return; } /** * Gets the delay to wait before the next retry. * * @param {RetryConfig} config The current set of retry options * @returns {number} the amount of ms to wait before the next retry attempt. */ function getNextRetryDelay(config) { var _a; // Calculate time to wait with exponential backoff. // If this is the first retry, look for a configured retryDelay. const retryDelay = config.currentRetryAttempt ? 0 : (_a = config.retryDelay) !== null && _a !== void 0 ? _a : 100; // Formula: retryDelay + ((retryDelayMultiplier^currentRetryAttempt - 1 / 2) * 1000) const calculatedDelay = retryDelay + ((Math.pow(config.retryDelayMultiplier, config.currentRetryAttempt) - 1) / 2) * 1000; const maxAllowableDelay = config.totalTimeout - (Date.now() - config.timeOfFirstRequest); return Math.min(calculatedDelay, maxAllowableDelay, config.maxRetryDelay); } //# sourceMappingURL=retry.js.map /***/ }), /***/ 63155: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2023 Google LLC // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.pkg = void 0; exports.pkg = __nccwpck_require__(66495); //# sourceMappingURL=util.js.map /***/ }), /***/ 93187: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "NIL", ({ enumerable: true, get: function () { return _nil.default; } })); Object.defineProperty(exports, "parse", ({ enumerable: true, get: function () { return _parse.default; } })); Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return _stringify.default; } })); Object.defineProperty(exports, "v1", ({ enumerable: true, get: function () { return _v.default; } })); Object.defineProperty(exports, "v3", ({ enumerable: true, get: function () { return _v2.default; } })); Object.defineProperty(exports, "v4", ({ enumerable: true, get: function () { return _v3.default; } })); Object.defineProperty(exports, "v5", ({ enumerable: true, get: function () { return _v4.default; } })); Object.defineProperty(exports, "validate", ({ enumerable: true, get: function () { return _validate.default; } })); Object.defineProperty(exports, "version", ({ enumerable: true, get: function () { return _version.default; } })); var _v = _interopRequireDefault(__nccwpck_require__(96698)); var _v2 = _interopRequireDefault(__nccwpck_require__(76272)); var _v3 = _interopRequireDefault(__nccwpck_require__(48485)); var _v4 = _interopRequireDefault(__nccwpck_require__(80742)); var _nil = _interopRequireDefault(__nccwpck_require__(18212)); var _version = _interopRequireDefault(__nccwpck_require__(66651)); var _validate = _interopRequireDefault(__nccwpck_require__(25913)); var _stringify = _interopRequireDefault(__nccwpck_require__(34386)); var _parse = _interopRequireDefault(__nccwpck_require__(9884)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), /***/ 81979: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function md5(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('md5').update(bytes).digest(); } var _default = md5; exports["default"] = _default; /***/ }), /***/ 93968: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var _default = { randomUUID: _crypto.default.randomUUID }; exports["default"] = _default; /***/ }), /***/ 18212: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = '00000000-0000-0000-0000-000000000000'; exports["default"] = _default; /***/ }), /***/ 9884: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(25913)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function parse(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } let v; const arr = new Uint8Array(16); // Parse ########-....-....-....-............ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; arr[1] = v >>> 16 & 0xff; arr[2] = v >>> 8 & 0xff; arr[3] = v & 0xff; // Parse ........-####-....-....-............ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; arr[5] = v & 0xff; // Parse ........-....-####-....-............ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; arr[7] = v & 0xff; // Parse ........-....-....-####-............ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; arr[9] = v & 0xff; // Parse ........-....-....-....-############ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; arr[11] = v / 0x100000000 & 0xff; arr[12] = v >>> 24 & 0xff; arr[13] = v >>> 16 & 0xff; arr[14] = v >>> 8 & 0xff; arr[15] = v & 0xff; return arr; } var _default = parse; exports["default"] = _default; /***/ }), /***/ 80956: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; exports["default"] = _default; /***/ }), /***/ 54566: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = rng; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate let poolPtr = rnds8Pool.length; function rng() { if (poolPtr > rnds8Pool.length - 16) { _crypto.default.randomFillSync(rnds8Pool); poolPtr = 0; } return rnds8Pool.slice(poolPtr, poolPtr += 16); } /***/ }), /***/ 5398: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function sha1(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('sha1').update(bytes).digest(); } var _default = sha1; exports["default"] = _default; /***/ }), /***/ 34386: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; exports.unsafeStringify = unsafeStringify; var _validate = _interopRequireDefault(__nccwpck_require__(25913)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** * Convert array of 16 byte values to UUID string format of the form: * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ const byteToHex = []; for (let i = 0; i < 256; ++i) { byteToHex.push((i + 0x100).toString(16).slice(1)); } function unsafeStringify(arr, offset = 0) { // Note: Be careful editing this code! It's been tuned for performance // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; } function stringify(arr, offset = 0) { const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one // of the following: // - One or more input array values don't map to a hex octet (leading to // "undefined" in the uuid) // - Invalid input values for the RFC `version` or `variant` fields if (!(0, _validate.default)(uuid)) { throw TypeError('Stringified UUID is invalid'); } return uuid; } var _default = stringify; exports["default"] = _default; /***/ }), /***/ 96698: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(54566)); var _stringify = __nccwpck_require__(34386); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } // **`v1()` - Generate time-based UUID** // // Inspired by https://github.com/LiosK/UUID.js // and http://docs.python.org/library/uuid.html let _nodeId; let _clockseq; // Previous uuid creation time let _lastMSecs = 0; let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details function v1(options, buf, offset) { let i = buf && offset || 0; const b = buf || new Array(16); options = options || {}; let node = options.node || _nodeId; let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not // specified. We do this lazily to minimize issues related to insufficient // system entropy. See #189 if (node == null || clockseq == null) { const seedBytes = options.random || (options.rng || _rng.default)(); if (node == null) { // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } if (clockseq == null) { // Per 4.2.2, randomize (14 bit) clockseq clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } } // UUID timestamps are 100 nano-second units since the Gregorian epoch, // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock // cycle to simulate higher resolution clock let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression if (dt < 0 && options.clockseq === undefined) { clockseq = clockseq + 1 & 0x3fff; } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new // time interval if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { nsecs = 0; } // Per 4.2.1.2 Throw error if too many uuids are requested if (nsecs >= 10000) { throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } _lastMSecs = msecs; _lastNSecs = nsecs; _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch msecs += 12219292800000; // `time_low` const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; b[i++] = tl >>> 24 & 0xff; b[i++] = tl >>> 16 & 0xff; b[i++] = tl >>> 8 & 0xff; b[i++] = tl & 0xff; // `time_mid` const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; b[i++] = tmh >>> 8 & 0xff; b[i++] = tmh & 0xff; // `time_high_and_version` b[i++] = tmh >>> 24 & 0xf | 0x10; // include version b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` b[i++] = clockseq & 0xff; // `node` for (let n = 0; n < 6; ++n) { b[i + n] = node[n]; } return buf || (0, _stringify.unsafeStringify)(b); } var _default = v1; exports["default"] = _default; /***/ }), /***/ 76272: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(52781)); var _md = _interopRequireDefault(__nccwpck_require__(81979)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v3 = (0, _v.default)('v3', 0x30, _md.default); var _default = v3; exports["default"] = _default; /***/ }), /***/ 52781: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.URL = exports.DNS = void 0; exports["default"] = v35; var _stringify = __nccwpck_require__(34386); var _parse = _interopRequireDefault(__nccwpck_require__(9884)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function stringToBytes(str) { str = unescape(encodeURIComponent(str)); // UTF8 escape const bytes = []; for (let i = 0; i < str.length; ++i) { bytes.push(str.charCodeAt(i)); } return bytes; } const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; exports.DNS = DNS; const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; exports.URL = URL; function v35(name, version, hashfunc) { function generateUUID(value, namespace, buf, offset) { var _namespace; if (typeof value === 'string') { value = stringToBytes(value); } if (typeof namespace === 'string') { namespace = (0, _parse.default)(namespace); } if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); } // Compute hash of namespace and value, Per 4.3 // Future: Use spread syntax when supported on all platforms, e.g. `bytes = // hashfunc([...namespace, ... value])` let bytes = new Uint8Array(16 + value.length); bytes.set(namespace); bytes.set(value, namespace.length); bytes = hashfunc(bytes); bytes[6] = bytes[6] & 0x0f | version; bytes[8] = bytes[8] & 0x3f | 0x80; if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = bytes[i]; } return buf; } return (0, _stringify.unsafeStringify)(bytes); } // Function#name is not settable on some platforms (#270) try { generateUUID.name = name; // eslint-disable-next-line no-empty } catch (err) {} // For CommonJS default export support generateUUID.DNS = DNS; generateUUID.URL = URL; return generateUUID; } /***/ }), /***/ 48485: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _native = _interopRequireDefault(__nccwpck_require__(93968)); var _rng = _interopRequireDefault(__nccwpck_require__(54566)); var _stringify = __nccwpck_require__(34386); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function v4(options, buf, offset) { if (_native.default.randomUUID && !buf && !options) { return _native.default.randomUUID(); } options = options || {}; const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` rnds[6] = rnds[6] & 0x0f | 0x40; rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = rnds[i]; } return buf; } return (0, _stringify.unsafeStringify)(rnds); } var _default = v4; exports["default"] = _default; /***/ }), /***/ 80742: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(52781)); var _sha = _interopRequireDefault(__nccwpck_require__(5398)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v5 = (0, _v.default)('v5', 0x50, _sha.default); var _default = v5; exports["default"] = _default; /***/ }), /***/ 25913: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _regex = _interopRequireDefault(__nccwpck_require__(80956)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function validate(uuid) { return typeof uuid === 'string' && _regex.default.test(uuid); } var _default = validate; exports["default"] = _default; /***/ }), /***/ 66651: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(25913)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function version(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } return parseInt(uuid.slice(14, 15), 16); } var _default = version; exports["default"] = _default; /***/ }), /***/ 70381: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /** * Copyright 2022 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GCE_LINUX_BIOS_PATHS = void 0; exports.isGoogleCloudServerless = isGoogleCloudServerless; exports.isGoogleComputeEngineLinux = isGoogleComputeEngineLinux; exports.isGoogleComputeEngineMACAddress = isGoogleComputeEngineMACAddress; exports.isGoogleComputeEngine = isGoogleComputeEngine; exports.detectGCPResidency = detectGCPResidency; const fs_1 = __nccwpck_require__(79896); const os_1 = __nccwpck_require__(70857); /** * Known paths unique to Google Compute Engine Linux instances */ exports.GCE_LINUX_BIOS_PATHS = { BIOS_DATE: '/sys/class/dmi/id/bios_date', BIOS_VENDOR: '/sys/class/dmi/id/bios_vendor', }; const GCE_MAC_ADDRESS_REGEX = /^42:01/; /** * Determines if the process is running on a Google Cloud Serverless environment (Cloud Run or Cloud Functions instance). * * Uses the: * - {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. * - {@link https://cloud.google.com/functions/docs/env-var Cloud Functions environment variables}. * * @returns {boolean} `true` if the process is running on GCP serverless, `false` otherwise. */ function isGoogleCloudServerless() { /** * `CLOUD_RUN_JOB` is used for Cloud Run Jobs * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. * * `FUNCTION_NAME` is used in older Cloud Functions environments: * - See {@link https://cloud.google.com/functions/docs/env-var Python 3.7 and Go 1.11}. * * `K_SERVICE` is used in Cloud Run and newer Cloud Functions environments: * - See {@link https://cloud.google.com/run/docs/container-contract#env-vars Cloud Run environment variables}. * - See {@link https://cloud.google.com/functions/docs/env-var Cloud Functions newer runtimes}. */ const isGFEnvironment = process.env.CLOUD_RUN_JOB || process.env.FUNCTION_NAME || process.env.K_SERVICE; return !!isGFEnvironment; } /** * Determines if the process is running on a Linux Google Compute Engine instance. * * @returns {boolean} `true` if the process is running on Linux GCE, `false` otherwise. */ function isGoogleComputeEngineLinux() { if ((0, os_1.platform)() !== 'linux') return false; try { // ensure this file exist (0, fs_1.statSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_DATE); // ensure this file exist and matches const biosVendor = (0, fs_1.readFileSync)(exports.GCE_LINUX_BIOS_PATHS.BIOS_VENDOR, 'utf8'); return /Google/.test(biosVendor); } catch (_a) { return false; } } /** * Determines if the process is running on a Google Compute Engine instance with a known * MAC address. * * @returns {boolean} `true` if the process is running on GCE (as determined by MAC address), `false` otherwise. */ function isGoogleComputeEngineMACAddress() { const interfaces = (0, os_1.networkInterfaces)(); for (const item of Object.values(interfaces)) { if (!item) continue; for (const { mac } of item) { if (GCE_MAC_ADDRESS_REGEX.test(mac)) { return true; } } } return false; } /** * Determines if the process is running on a Google Compute Engine instance. * * @returns {boolean} `true` if the process is running on GCE, `false` otherwise. */ function isGoogleComputeEngine() { return isGoogleComputeEngineLinux() || isGoogleComputeEngineMACAddress(); } /** * Determines if the process is running on Google Cloud Platform. * * @returns {boolean} `true` if the process is running on GCP, `false` otherwise. */ function detectGCPResidency() { return isGoogleCloudServerless() || isGoogleComputeEngine(); } //# sourceMappingURL=gcp-residency.js.map /***/ }), /***/ 23046: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /** * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.gcpResidencyCache = exports.METADATA_SERVER_DETECTION = exports.HEADERS = exports.HEADER_VALUE = exports.HEADER_NAME = exports.SECONDARY_HOST_ADDRESS = exports.HOST_ADDRESS = exports.BASE_PATH = void 0; exports.instance = instance; exports.project = project; exports.universe = universe; exports.bulk = bulk; exports.isAvailable = isAvailable; exports.resetIsAvailableCache = resetIsAvailableCache; exports.getGCPResidency = getGCPResidency; exports.setGCPResidency = setGCPResidency; exports.requestTimeout = requestTimeout; const gaxios_1 = __nccwpck_require__(97003); const jsonBigint = __nccwpck_require__(14826); const gcp_residency_1 = __nccwpck_require__(70381); const logger = __nccwpck_require__(81577); exports.BASE_PATH = '/computeMetadata/v1'; exports.HOST_ADDRESS = 'http://169.254.169.254'; exports.SECONDARY_HOST_ADDRESS = 'http://metadata.google.internal.'; exports.HEADER_NAME = 'Metadata-Flavor'; exports.HEADER_VALUE = 'Google'; exports.HEADERS = Object.freeze({ [exports.HEADER_NAME]: exports.HEADER_VALUE }); const log = logger.log('gcp metadata'); /** * Metadata server detection override options. * * Available via `process.env.METADATA_SERVER_DETECTION`. */ exports.METADATA_SERVER_DETECTION = Object.freeze({ 'assume-present': "don't try to ping the metadata server, but assume it's present", none: "don't try to ping the metadata server, but don't try to use it either", 'bios-only': "treat the result of a BIOS probe as canonical (don't fall back to pinging)", 'ping-only': 'skip the BIOS probe, and go straight to pinging', }); /** * Returns the base URL while taking into account the GCE_METADATA_HOST * environment variable if it exists. * * @returns The base URL, e.g., http://169.254.169.254/computeMetadata/v1. */ function getBaseUrl(baseUrl) { if (!baseUrl) { baseUrl = process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST || exports.HOST_ADDRESS; } // If no scheme is provided default to HTTP: if (!/^https?:\/\//.test(baseUrl)) { baseUrl = `http://${baseUrl}`; } return new URL(exports.BASE_PATH, baseUrl).href; } // Accepts an options object passed from the user to the API. In previous // versions of the API, it referred to a `Request` or an `Axios` request // options object. Now it refers to an object with very limited property // names. This is here to help ensure users don't pass invalid options when // they upgrade from 0.4 to 0.5 to 0.8. function validate(options) { Object.keys(options).forEach(key => { switch (key) { case 'params': case 'property': case 'headers': break; case 'qs': throw new Error("'qs' is not a valid configuration option. Please use 'params' instead."); default: throw new Error(`'${key}' is not a valid configuration option.`); } }); } async function metadataAccessor(type, options = {}, noResponseRetries = 3, fastFail = false) { let metadataKey = ''; let params = {}; let headers = {}; if (typeof type === 'object') { const metadataAccessor = type; metadataKey = metadataAccessor.metadataKey; params = metadataAccessor.params || params; headers = metadataAccessor.headers || headers; noResponseRetries = metadataAccessor.noResponseRetries || noResponseRetries; fastFail = metadataAccessor.fastFail || fastFail; } else { metadataKey = type; } if (typeof options === 'string') { metadataKey += `/${options}`; } else { validate(options); if (options.property) { metadataKey += `/${options.property}`; } headers = options.headers || headers; params = options.params || params; } const requestMethod = fastFail ? fastFailMetadataRequest : gaxios_1.request; const req = { url: `${getBaseUrl()}/${metadataKey}`, headers: { ...exports.HEADERS, ...headers }, retryConfig: { noResponseRetries }, params, responseType: 'text', timeout: requestTimeout(), }; log.info('instance request %j', req); const res = await requestMethod(req); log.info('instance metadata is %s', res.data); // NOTE: node.js converts all incoming headers to lower case. if (res.headers[exports.HEADER_NAME.toLowerCase()] !== exports.HEADER_VALUE) { throw new Error(`Invalid response from metadata service: incorrect ${exports.HEADER_NAME} header. Expected '${exports.HEADER_VALUE}', got ${res.headers[exports.HEADER_NAME.toLowerCase()] ? `'${res.headers[exports.HEADER_NAME.toLowerCase()]}'` : 'no header'}`); } if (typeof res.data === 'string') { try { return jsonBigint.parse(res.data); } catch (_a) { /* ignore */ } } return res.data; } async function fastFailMetadataRequest(options) { var _a; const secondaryOptions = { ...options, url: (_a = options.url) === null || _a === void 0 ? void 0 : _a.toString().replace(getBaseUrl(), getBaseUrl(exports.SECONDARY_HOST_ADDRESS)), }; // We race a connection between DNS/IP to metadata server. There are a couple // reasons for this: // // 1. the DNS is slow in some GCP environments; by checking both, we might // detect the runtime environment signficantly faster. // 2. we can't just check the IP, which is tarpitted and slow to respond // on a user's local machine. // // Additional logic has been added to make sure that we don't create an // unhandled rejection in scenarios where a failure happens sometime // after a success. // // Note, however, if a failure happens prior to a success, a rejection should // occur, this is for folks running locally. // let responded = false; const r1 = (0, gaxios_1.request)(options) .then(res => { responded = true; return res; }) .catch(err => { if (responded) { return r2; } else { responded = true; throw err; } }); const r2 = (0, gaxios_1.request)(secondaryOptions) .then(res => { responded = true; return res; }) .catch(err => { if (responded) { return r1; } else { responded = true; throw err; } }); return Promise.race([r1, r2]); } /** * Obtain metadata for the current GCE instance. * * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} * * @example * ``` * const serviceAccount: {} = await instance('service-accounts/'); * const serviceAccountEmail: string = await instance('service-accounts/default/email'); * ``` */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function instance(options) { return metadataAccessor('instance', options); } /** * Obtain metadata for the current GCP project. * * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} * * @example * ``` * const projectId: string = await project('project-id'); * const numericProjectId: number = await project('numeric-project-id'); * ``` */ // eslint-disable-next-line @typescript-eslint/no-explicit-any function project(options) { return metadataAccessor('project', options); } /** * Obtain metadata for the current universe. * * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} * * @example * ``` * const universeDomain: string = await universe('universe-domain'); * ``` */ function universe(options) { return metadataAccessor('universe', options); } /** * Retrieve metadata items in parallel. * * @see {@link https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys} * * @example * ``` * const data = await bulk([ * { * metadataKey: 'instance', * }, * { * metadataKey: 'project/project-id', * }, * ] as const); * * // data.instance; * // data['project/project-id']; * ``` * * @param properties The metadata properties to retrieve * @returns The metadata in `metadatakey:value` format */ async function bulk(properties) { const r = {}; await Promise.all(properties.map(item => { return (async () => { const res = await metadataAccessor(item); const key = item.metadataKey; r[key] = res; })(); })); return r; } /* * How many times should we retry detecting GCP environment. */ function detectGCPAvailableRetries() { return process.env.DETECT_GCP_RETRIES ? Number(process.env.DETECT_GCP_RETRIES) : 0; } let cachedIsAvailableResponse; /** * Determine if the metadata server is currently available. */ async function isAvailable() { if (process.env.METADATA_SERVER_DETECTION) { const value = process.env.METADATA_SERVER_DETECTION.trim().toLocaleLowerCase(); if (!(value in exports.METADATA_SERVER_DETECTION)) { throw new RangeError(`Unknown \`METADATA_SERVER_DETECTION\` env variable. Got \`${value}\`, but it should be \`${Object.keys(exports.METADATA_SERVER_DETECTION).join('`, `')}\`, or unset`); } switch (value) { case 'assume-present': return true; case 'none': return false; case 'bios-only': return getGCPResidency(); case 'ping-only': // continue, we want to ping the server } } try { // If a user is instantiating several GCP libraries at the same time, // this may result in multiple calls to isAvailable(), to detect the // runtime environment. We use the same promise for each of these calls // to reduce the network load. if (cachedIsAvailableResponse === undefined) { cachedIsAvailableResponse = metadataAccessor('instance', undefined, detectGCPAvailableRetries(), // If the default HOST_ADDRESS has been overridden, we should not // make an effort to try SECONDARY_HOST_ADDRESS (as we are likely in // a non-GCP environment): !(process.env.GCE_METADATA_IP || process.env.GCE_METADATA_HOST)); } await cachedIsAvailableResponse; return true; } catch (e) { const err = e; if (process.env.DEBUG_AUTH) { console.info(err); } if (err.type === 'request-timeout') { // If running in a GCP environment, metadata endpoint should return // within ms. return false; } if (err.response && err.response.status === 404) { return false; } else { if (!(err.response && err.response.status === 404) && // A warning is emitted if we see an unexpected err.code, or err.code // is not populated: (!err.code || ![ 'EHOSTDOWN', 'EHOSTUNREACH', 'ENETUNREACH', 'ENOENT', 'ENOTFOUND', 'ECONNREFUSED', ].includes(err.code))) { let code = 'UNKNOWN'; if (err.code) code = err.code; process.emitWarning(`received unexpected error = ${err.message} code = ${code}`, 'MetadataLookupWarning'); } // Failure to resolve the metadata service means that it is not available. return false; } } } /** * reset the memoized isAvailable() lookup. */ function resetIsAvailableCache() { cachedIsAvailableResponse = undefined; } /** * A cache for the detected GCP Residency. */ exports.gcpResidencyCache = null; /** * Detects GCP Residency. * Caches results to reduce costs for subsequent calls. * * @see setGCPResidency for setting */ function getGCPResidency() { if (exports.gcpResidencyCache === null) { setGCPResidency(); } return exports.gcpResidencyCache; } /** * Sets the detected GCP Residency. * Useful for forcing metadata server detection behavior. * * Set `null` to autodetect the environment (default behavior). * @see getGCPResidency for getting */ function setGCPResidency(value = null) { exports.gcpResidencyCache = value !== null ? value : (0, gcp_residency_1.detectGCPResidency)(); } /** * Obtain the timeout for requests to the metadata server. * * In certain environments and conditions requests can take longer than * the default timeout to complete. This function will determine the * appropriate timeout based on the environment. * * @returns {number} a request timeout duration in milliseconds. */ function requestTimeout() { return getGCPResidency() ? 0 : 3000; } __exportStar(__nccwpck_require__(70381), exports); //# sourceMappingURL=index.js.map /***/ }), /***/ 60470: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var undefined; var $Object = __nccwpck_require__(95399); var $Error = __nccwpck_require__(31620); var $EvalError = __nccwpck_require__(33056); var $RangeError = __nccwpck_require__(14585); var $ReferenceError = __nccwpck_require__(46905); var $SyntaxError = __nccwpck_require__(80105); var $TypeError = __nccwpck_require__(73314); var $URIError = __nccwpck_require__(32578); var abs = __nccwpck_require__(55641); var floor = __nccwpck_require__(96171); var max = __nccwpck_require__(57147); var min = __nccwpck_require__(41017); var pow = __nccwpck_require__(56947); var round = __nccwpck_require__(42621); var sign = __nccwpck_require__(30156); var $Function = Function; // eslint-disable-next-line consistent-return var getEvalledConstructor = function (expressionSyntax) { try { return $Function('"use strict"; return (' + expressionSyntax + ').constructor;')(); } catch (e) {} }; var $gOPD = __nccwpck_require__(33170); var $defineProperty = __nccwpck_require__(79094); var throwTypeError = function () { throw new $TypeError(); }; var ThrowTypeError = $gOPD ? (function () { try { // eslint-disable-next-line no-unused-expressions, no-caller, no-restricted-properties arguments.callee; // IE 8 does not throw here return throwTypeError; } catch (calleeThrows) { try { // IE 8 throws on Object.getOwnPropertyDescriptor(arguments, '') return $gOPD(arguments, 'callee').get; } catch (gOPDthrows) { return throwTypeError; } } }()) : throwTypeError; var hasSymbols = __nccwpck_require__(23336)(); var getProto = __nccwpck_require__(81967); var $ObjectGPO = __nccwpck_require__(91311); var $ReflectGPO = __nccwpck_require__(48681); var $apply = __nccwpck_require__(33945); var $call = __nccwpck_require__(88093); var needsEval = {}; var TypedArray = typeof Uint8Array === 'undefined' || !getProto ? undefined : getProto(Uint8Array); var INTRINSICS = { __proto__: null, '%AggregateError%': typeof AggregateError === 'undefined' ? undefined : AggregateError, '%Array%': Array, '%ArrayBuffer%': typeof ArrayBuffer === 'undefined' ? undefined : ArrayBuffer, '%ArrayIteratorPrototype%': hasSymbols && getProto ? getProto([][Symbol.iterator]()) : undefined, '%AsyncFromSyncIteratorPrototype%': undefined, '%AsyncFunction%': needsEval, '%AsyncGenerator%': needsEval, '%AsyncGeneratorFunction%': needsEval, '%AsyncIteratorPrototype%': needsEval, '%Atomics%': typeof Atomics === 'undefined' ? undefined : Atomics, '%BigInt%': typeof BigInt === 'undefined' ? undefined : BigInt, '%BigInt64Array%': typeof BigInt64Array === 'undefined' ? undefined : BigInt64Array, '%BigUint64Array%': typeof BigUint64Array === 'undefined' ? undefined : BigUint64Array, '%Boolean%': Boolean, '%DataView%': typeof DataView === 'undefined' ? undefined : DataView, '%Date%': Date, '%decodeURI%': decodeURI, '%decodeURIComponent%': decodeURIComponent, '%encodeURI%': encodeURI, '%encodeURIComponent%': encodeURIComponent, '%Error%': $Error, '%eval%': eval, // eslint-disable-line no-eval '%EvalError%': $EvalError, '%Float16Array%': typeof Float16Array === 'undefined' ? undefined : Float16Array, '%Float32Array%': typeof Float32Array === 'undefined' ? undefined : Float32Array, '%Float64Array%': typeof Float64Array === 'undefined' ? undefined : Float64Array, '%FinalizationRegistry%': typeof FinalizationRegistry === 'undefined' ? undefined : FinalizationRegistry, '%Function%': $Function, '%GeneratorFunction%': needsEval, '%Int8Array%': typeof Int8Array === 'undefined' ? undefined : Int8Array, '%Int16Array%': typeof Int16Array === 'undefined' ? undefined : Int16Array, '%Int32Array%': typeof Int32Array === 'undefined' ? undefined : Int32Array, '%isFinite%': isFinite, '%isNaN%': isNaN, '%IteratorPrototype%': hasSymbols && getProto ? getProto(getProto([][Symbol.iterator]())) : undefined, '%JSON%': typeof JSON === 'object' ? JSON : undefined, '%Map%': typeof Map === 'undefined' ? undefined : Map, '%MapIteratorPrototype%': typeof Map === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Map()[Symbol.iterator]()), '%Math%': Math, '%Number%': Number, '%Object%': $Object, '%Object.getOwnPropertyDescriptor%': $gOPD, '%parseFloat%': parseFloat, '%parseInt%': parseInt, '%Promise%': typeof Promise === 'undefined' ? undefined : Promise, '%Proxy%': typeof Proxy === 'undefined' ? undefined : Proxy, '%RangeError%': $RangeError, '%ReferenceError%': $ReferenceError, '%Reflect%': typeof Reflect === 'undefined' ? undefined : Reflect, '%RegExp%': RegExp, '%Set%': typeof Set === 'undefined' ? undefined : Set, '%SetIteratorPrototype%': typeof Set === 'undefined' || !hasSymbols || !getProto ? undefined : getProto(new Set()[Symbol.iterator]()), '%SharedArrayBuffer%': typeof SharedArrayBuffer === 'undefined' ? undefined : SharedArrayBuffer, '%String%': String, '%StringIteratorPrototype%': hasSymbols && getProto ? getProto(''[Symbol.iterator]()) : undefined, '%Symbol%': hasSymbols ? Symbol : undefined, '%SyntaxError%': $SyntaxError, '%ThrowTypeError%': ThrowTypeError, '%TypedArray%': TypedArray, '%TypeError%': $TypeError, '%Uint8Array%': typeof Uint8Array === 'undefined' ? undefined : Uint8Array, '%Uint8ClampedArray%': typeof Uint8ClampedArray === 'undefined' ? undefined : Uint8ClampedArray, '%Uint16Array%': typeof Uint16Array === 'undefined' ? undefined : Uint16Array, '%Uint32Array%': typeof Uint32Array === 'undefined' ? undefined : Uint32Array, '%URIError%': $URIError, '%WeakMap%': typeof WeakMap === 'undefined' ? undefined : WeakMap, '%WeakRef%': typeof WeakRef === 'undefined' ? undefined : WeakRef, '%WeakSet%': typeof WeakSet === 'undefined' ? undefined : WeakSet, '%Function.prototype.call%': $call, '%Function.prototype.apply%': $apply, '%Object.defineProperty%': $defineProperty, '%Object.getPrototypeOf%': $ObjectGPO, '%Math.abs%': abs, '%Math.floor%': floor, '%Math.max%': max, '%Math.min%': min, '%Math.pow%': pow, '%Math.round%': round, '%Math.sign%': sign, '%Reflect.getPrototypeOf%': $ReflectGPO }; if (getProto) { try { null.error; // eslint-disable-line no-unused-expressions } catch (e) { // https://github.com/tc39/proposal-shadowrealm/pull/384#issuecomment-1364264229 var errorProto = getProto(getProto(e)); INTRINSICS['%Error.prototype%'] = errorProto; } } var doEval = function doEval(name) { var value; if (name === '%AsyncFunction%') { value = getEvalledConstructor('async function () {}'); } else if (name === '%GeneratorFunction%') { value = getEvalledConstructor('function* () {}'); } else if (name === '%AsyncGeneratorFunction%') { value = getEvalledConstructor('async function* () {}'); } else if (name === '%AsyncGenerator%') { var fn = doEval('%AsyncGeneratorFunction%'); if (fn) { value = fn.prototype; } } else if (name === '%AsyncIteratorPrototype%') { var gen = doEval('%AsyncGenerator%'); if (gen && getProto) { value = getProto(gen.prototype); } } INTRINSICS[name] = value; return value; }; var LEGACY_ALIASES = { __proto__: null, '%ArrayBufferPrototype%': ['ArrayBuffer', 'prototype'], '%ArrayPrototype%': ['Array', 'prototype'], '%ArrayProto_entries%': ['Array', 'prototype', 'entries'], '%ArrayProto_forEach%': ['Array', 'prototype', 'forEach'], '%ArrayProto_keys%': ['Array', 'prototype', 'keys'], '%ArrayProto_values%': ['Array', 'prototype', 'values'], '%AsyncFunctionPrototype%': ['AsyncFunction', 'prototype'], '%AsyncGenerator%': ['AsyncGeneratorFunction', 'prototype'], '%AsyncGeneratorPrototype%': ['AsyncGeneratorFunction', 'prototype', 'prototype'], '%BooleanPrototype%': ['Boolean', 'prototype'], '%DataViewPrototype%': ['DataView', 'prototype'], '%DatePrototype%': ['Date', 'prototype'], '%ErrorPrototype%': ['Error', 'prototype'], '%EvalErrorPrototype%': ['EvalError', 'prototype'], '%Float32ArrayPrototype%': ['Float32Array', 'prototype'], '%Float64ArrayPrototype%': ['Float64Array', 'prototype'], '%FunctionPrototype%': ['Function', 'prototype'], '%Generator%': ['GeneratorFunction', 'prototype'], '%GeneratorPrototype%': ['GeneratorFunction', 'prototype', 'prototype'], '%Int8ArrayPrototype%': ['Int8Array', 'prototype'], '%Int16ArrayPrototype%': ['Int16Array', 'prototype'], '%Int32ArrayPrototype%': ['Int32Array', 'prototype'], '%JSONParse%': ['JSON', 'parse'], '%JSONStringify%': ['JSON', 'stringify'], '%MapPrototype%': ['Map', 'prototype'], '%NumberPrototype%': ['Number', 'prototype'], '%ObjectPrototype%': ['Object', 'prototype'], '%ObjProto_toString%': ['Object', 'prototype', 'toString'], '%ObjProto_valueOf%': ['Object', 'prototype', 'valueOf'], '%PromisePrototype%': ['Promise', 'prototype'], '%PromiseProto_then%': ['Promise', 'prototype', 'then'], '%Promise_all%': ['Promise', 'all'], '%Promise_reject%': ['Promise', 'reject'], '%Promise_resolve%': ['Promise', 'resolve'], '%RangeErrorPrototype%': ['RangeError', 'prototype'], '%ReferenceErrorPrototype%': ['ReferenceError', 'prototype'], '%RegExpPrototype%': ['RegExp', 'prototype'], '%SetPrototype%': ['Set', 'prototype'], '%SharedArrayBufferPrototype%': ['SharedArrayBuffer', 'prototype'], '%StringPrototype%': ['String', 'prototype'], '%SymbolPrototype%': ['Symbol', 'prototype'], '%SyntaxErrorPrototype%': ['SyntaxError', 'prototype'], '%TypedArrayPrototype%': ['TypedArray', 'prototype'], '%TypeErrorPrototype%': ['TypeError', 'prototype'], '%Uint8ArrayPrototype%': ['Uint8Array', 'prototype'], '%Uint8ClampedArrayPrototype%': ['Uint8ClampedArray', 'prototype'], '%Uint16ArrayPrototype%': ['Uint16Array', 'prototype'], '%Uint32ArrayPrototype%': ['Uint32Array', 'prototype'], '%URIErrorPrototype%': ['URIError', 'prototype'], '%WeakMapPrototype%': ['WeakMap', 'prototype'], '%WeakSetPrototype%': ['WeakSet', 'prototype'] }; var bind = __nccwpck_require__(37564); var hasOwn = __nccwpck_require__(54076); var $concat = bind.call($call, Array.prototype.concat); var $spliceApply = bind.call($apply, Array.prototype.splice); var $replace = bind.call($call, String.prototype.replace); var $strSlice = bind.call($call, String.prototype.slice); var $exec = bind.call($call, RegExp.prototype.exec); /* adapted from https://github.com/lodash/lodash/blob/4.17.15/dist/lodash.js#L6735-L6744 */ var rePropName = /[^%.[\]]+|\[(?:(-?\d+(?:\.\d+)?)|(["'])((?:(?!\2)[^\\]|\\.)*?)\2)\]|(?=(?:\.|\[\])(?:\.|\[\]|%$))/g; var reEscapeChar = /\\(\\)?/g; /** Used to match backslashes in property paths. */ var stringToPath = function stringToPath(string) { var first = $strSlice(string, 0, 1); var last = $strSlice(string, -1); if (first === '%' && last !== '%') { throw new $SyntaxError('invalid intrinsic syntax, expected closing `%`'); } else if (last === '%' && first !== '%') { throw new $SyntaxError('invalid intrinsic syntax, expected opening `%`'); } var result = []; $replace(string, rePropName, function (match, number, quote, subString) { result[result.length] = quote ? $replace(subString, reEscapeChar, '$1') : number || match; }); return result; }; /* end adaptation */ var getBaseIntrinsic = function getBaseIntrinsic(name, allowMissing) { var intrinsicName = name; var alias; if (hasOwn(LEGACY_ALIASES, intrinsicName)) { alias = LEGACY_ALIASES[intrinsicName]; intrinsicName = '%' + alias[0] + '%'; } if (hasOwn(INTRINSICS, intrinsicName)) { var value = INTRINSICS[intrinsicName]; if (value === needsEval) { value = doEval(intrinsicName); } if (typeof value === 'undefined' && !allowMissing) { throw new $TypeError('intrinsic ' + name + ' exists, but is not available. Please file an issue!'); } return { alias: alias, name: intrinsicName, value: value }; } throw new $SyntaxError('intrinsic ' + name + ' does not exist!'); }; module.exports = function GetIntrinsic(name, allowMissing) { if (typeof name !== 'string' || name.length === 0) { throw new $TypeError('intrinsic name must be a non-empty string'); } if (arguments.length > 1 && typeof allowMissing !== 'boolean') { throw new $TypeError('"allowMissing" argument must be a boolean'); } if ($exec(/^%?[^%]*%?$/, name) === null) { throw new $SyntaxError('`%` may not be present anywhere but at the beginning and end of the intrinsic name'); } var parts = stringToPath(name); var intrinsicBaseName = parts.length > 0 ? parts[0] : ''; var intrinsic = getBaseIntrinsic('%' + intrinsicBaseName + '%', allowMissing); var intrinsicRealName = intrinsic.name; var value = intrinsic.value; var skipFurtherCaching = false; var alias = intrinsic.alias; if (alias) { intrinsicBaseName = alias[0]; $spliceApply(parts, $concat([0, 1], alias)); } for (var i = 1, isOwn = true; i < parts.length; i += 1) { var part = parts[i]; var first = $strSlice(part, 0, 1); var last = $strSlice(part, -1); if ( ( (first === '"' || first === "'" || first === '`') || (last === '"' || last === "'" || last === '`') ) && first !== last ) { throw new $SyntaxError('property names with quotes must have matching quotes'); } if (part === 'constructor' || !isOwn) { skipFurtherCaching = true; } intrinsicBaseName += '.' + part; intrinsicRealName = '%' + intrinsicBaseName + '%'; if (hasOwn(INTRINSICS, intrinsicRealName)) { value = INTRINSICS[intrinsicRealName]; } else if (value != null) { if (!(part in value)) { if (!allowMissing) { throw new $TypeError('base intrinsic for ' + name + ' exists, but the property is not available.'); } return void undefined; } if ($gOPD && (i + 1) >= parts.length) { var desc = $gOPD(value, part); isOwn = !!desc; // By convention, when a data property is converted to an accessor // property to emulate a data property that does not suffer from // the override mistake, that accessor's getter is marked with // an `originalValue` property. Here, when we detect this, we // uphold the illusion by pretending to see that original data // property, i.e., returning the value rather than the getter // itself. if (isOwn && 'get' in desc && !('originalValue' in desc.get)) { value = desc.get; } else { value = value[part]; } } else { isOwn = hasOwn(value, part); value = value[part]; } if (isOwn && !skipFurtherCaching) { INTRINSICS[intrinsicRealName] = value; } } } return value; }; /***/ }), /***/ 91311: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var $Object = __nccwpck_require__(95399); /** @type {import('./Object.getPrototypeOf')} */ module.exports = $Object.getPrototypeOf || null; /***/ }), /***/ 48681: /***/ ((module) => { "use strict"; /** @type {import('./Reflect.getPrototypeOf')} */ module.exports = (typeof Reflect !== 'undefined' && Reflect.getPrototypeOf) || null; /***/ }), /***/ 81967: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var reflectGetProto = __nccwpck_require__(48681); var originalGetProto = __nccwpck_require__(91311); var getDunderProto = __nccwpck_require__(26669); /** @type {import('.')} */ module.exports = reflectGetProto ? function getProto(O) { // @ts-expect-error TS can't narrow inside a closure, for some reason return reflectGetProto(O); } : originalGetProto ? function getProto(O) { if (!O || (typeof O !== 'object' && typeof O !== 'function')) { throw new TypeError('getProto: not an object'); } // @ts-expect-error TS can't narrow inside a closure, for some reason return originalGetProto(O); } : getDunderProto ? function getProto(O) { // @ts-expect-error TS can't narrow inside a closure, for some reason return getDunderProto(O); } : null; /***/ }), /***/ 34810: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2012 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AuthClient = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = exports.DEFAULT_UNIVERSE = void 0; const events_1 = __nccwpck_require__(24434); const gaxios_1 = __nccwpck_require__(97003); const transporters_1 = __nccwpck_require__(67633); const util_1 = __nccwpck_require__(37870); /** * The default cloud universe * * @see {@link AuthJSONOptions.universe_domain} */ exports.DEFAULT_UNIVERSE = 'googleapis.com'; /** * The default {@link AuthClientOptions.eagerRefreshThresholdMillis} */ exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS = 5 * 60 * 1000; class AuthClient extends events_1.EventEmitter { constructor(opts = {}) { var _a, _b, _c, _d, _e; super(); this.credentials = {}; this.eagerRefreshThresholdMillis = exports.DEFAULT_EAGER_REFRESH_THRESHOLD_MILLIS; this.forceRefreshOnFailure = false; this.universeDomain = exports.DEFAULT_UNIVERSE; const options = (0, util_1.originalOrCamelOptions)(opts); // Shared auth options this.apiKey = opts.apiKey; this.projectId = (_a = options.get('project_id')) !== null && _a !== void 0 ? _a : null; this.quotaProjectId = options.get('quota_project_id'); this.credentials = (_b = options.get('credentials')) !== null && _b !== void 0 ? _b : {}; this.universeDomain = (_c = options.get('universe_domain')) !== null && _c !== void 0 ? _c : exports.DEFAULT_UNIVERSE; // Shared client options this.transporter = (_d = opts.transporter) !== null && _d !== void 0 ? _d : new transporters_1.DefaultTransporter(); if (opts.transporterOptions) { this.transporter.defaults = opts.transporterOptions; } if (opts.eagerRefreshThresholdMillis) { this.eagerRefreshThresholdMillis = opts.eagerRefreshThresholdMillis; } this.forceRefreshOnFailure = (_e = opts.forceRefreshOnFailure) !== null && _e !== void 0 ? _e : false; } /** * Return the {@link Gaxios `Gaxios`} instance from the {@link AuthClient.transporter}. * * @expiremental */ get gaxios() { if (this.transporter instanceof gaxios_1.Gaxios) { return this.transporter; } else if (this.transporter instanceof transporters_1.DefaultTransporter) { return this.transporter.instance; } else if ('instance' in this.transporter && this.transporter.instance instanceof gaxios_1.Gaxios) { return this.transporter.instance; } return null; } /** * Sets the auth credentials. */ setCredentials(credentials) { this.credentials = credentials; } /** * Append additional headers, e.g., x-goog-user-project, shared across the * classes inheriting AuthClient. This method should be used by any method * that overrides getRequestMetadataAsync(), which is a shared helper for * setting request information in both gRPC and HTTP API calls. * * @param headers object to append additional headers to. */ addSharedMetadataHeaders(headers) { // quota_project_id, stored in application_default_credentials.json, is set in // the x-goog-user-project header, to indicate an alternate account for // billing and quota: if (!headers['x-goog-user-project'] && // don't override a value the user sets. this.quotaProjectId) { headers['x-goog-user-project'] = this.quotaProjectId; } return headers; } /** * Retry config for Auth-related requests. * * @remarks * * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} * config as some downstream APIs would prefer if customers explicitly enable retries, * such as GCS. */ static get RETRY_CONFIG() { return { retry: true, retryConfig: { httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], }, }; } } exports.AuthClient = AuthClient; /***/ }), /***/ 81261: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _a, _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AwsClient = void 0; const awsrequestsigner_1 = __nccwpck_require__(27647); const baseexternalclient_1 = __nccwpck_require__(142); const defaultawssecuritycredentialssupplier_1 = __nccwpck_require__(69157); const util_1 = __nccwpck_require__(37870); /** * AWS external account client. This is used for AWS workloads, where * AWS STS GetCallerIdentity serialized signed requests are exchanged for * GCP access token. */ class AwsClient extends baseexternalclient_1.BaseExternalAccountClient { /** * Instantiates an AwsClient instance using the provided JSON * object loaded from an external account credentials file. * An error is thrown if the credential is not a valid AWS credential. * @param options The external account options object typically loaded * from the external account JSON credential file. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. */ constructor(options, additionalOptions) { super(options, additionalOptions); const opts = (0, util_1.originalOrCamelOptions)(options); const credentialSource = opts.get('credential_source'); const awsSecurityCredentialsSupplier = opts.get('aws_security_credentials_supplier'); // Validate credential sourcing configuration. if (!credentialSource && !awsSecurityCredentialsSupplier) { throw new Error('A credential source or AWS security credentials supplier must be specified.'); } if (credentialSource && awsSecurityCredentialsSupplier) { throw new Error('Only one of credential source or AWS security credentials supplier can be specified.'); } if (awsSecurityCredentialsSupplier) { this.awsSecurityCredentialsSupplier = awsSecurityCredentialsSupplier; this.regionalCredVerificationUrl = __classPrivateFieldGet(_a, _a, "f", _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL); this.credentialSourceType = 'programmatic'; } else { const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); this.environmentId = credentialSourceOpts.get('environment_id'); // This is only required if the AWS region is not available in the // AWS_REGION or AWS_DEFAULT_REGION environment variables. const regionUrl = credentialSourceOpts.get('region_url'); // This is only required if AWS security credentials are not available in // environment variables. const securityCredentialsUrl = credentialSourceOpts.get('url'); const imdsV2SessionTokenUrl = credentialSourceOpts.get('imdsv2_session_token_url'); this.awsSecurityCredentialsSupplier = new defaultawssecuritycredentialssupplier_1.DefaultAwsSecurityCredentialsSupplier({ regionUrl: regionUrl, securityCredentialsUrl: securityCredentialsUrl, imdsV2SessionTokenUrl: imdsV2SessionTokenUrl, }); this.regionalCredVerificationUrl = credentialSourceOpts.get('regional_cred_verification_url'); this.credentialSourceType = 'aws'; // Data validators. this.validateEnvironmentId(); } this.awsRequestSigner = null; this.region = ''; } validateEnvironmentId() { var _b; const match = (_b = this.environmentId) === null || _b === void 0 ? void 0 : _b.match(/^(aws)(\d+)$/); if (!match || !this.regionalCredVerificationUrl) { throw new Error('No valid AWS "credential_source" provided'); } else if (parseInt(match[2], 10) !== 1) { throw new Error(`aws version "${match[2]}" is not supported in the current build.`); } } /** * Triggered when an external subject token is needed to be exchanged for a * GCP access token via GCP STS endpoint. This will call the * {@link AwsSecurityCredentialsSupplier} to retrieve an AWS region and AWS * Security Credentials, then use them to create a signed AWS STS request that * can be exchanged for a GCP access token. * @return A promise that resolves with the external subject token. */ async retrieveSubjectToken() { // Initialize AWS request signer if not already initialized. if (!this.awsRequestSigner) { this.region = await this.awsSecurityCredentialsSupplier.getAwsRegion(this.supplierContext); this.awsRequestSigner = new awsrequestsigner_1.AwsRequestSigner(async () => { return this.awsSecurityCredentialsSupplier.getAwsSecurityCredentials(this.supplierContext); }, this.region); } // Generate signed request to AWS STS GetCallerIdentity API. // Use the required regional endpoint. Otherwise, the request will fail. const options = await this.awsRequestSigner.getRequestOptions({ ..._a.RETRY_CONFIG, url: this.regionalCredVerificationUrl.replace('{region}', this.region), method: 'POST', }); // The GCP STS endpoint expects the headers to be formatted as: // [ // {key: 'x-amz-date', value: '...'}, // {key: 'Authorization', value: '...'}, // ... // ] // And then serialized as: // encodeURIComponent(JSON.stringify({ // url: '...', // method: 'POST', // headers: [{key: 'x-amz-date', value: '...'}, ...] // })) const reformattedHeader = []; const extendedHeaders = Object.assign({ // The full, canonical resource name of the workload identity pool // provider, with or without the HTTPS prefix. // Including this header as part of the signature is recommended to // ensure data integrity. 'x-goog-cloud-target-resource': this.audience, }, options.headers); // Reformat header to GCP STS expected format. for (const key in extendedHeaders) { reformattedHeader.push({ key, value: extendedHeaders[key], }); } // Serialize the reformatted signed request. return encodeURIComponent(JSON.stringify({ url: options.url, method: options.method, headers: reformattedHeader, })); } } exports.AwsClient = AwsClient; _a = AwsClient; _AwsClient_DEFAULT_AWS_REGIONAL_CREDENTIAL_VERIFICATION_URL = { value: 'https://sts.{region}.amazonaws.com?Action=GetCallerIdentity&Version=2011-06-15' }; /** * @deprecated AWS client no validates the EC2 metadata address. **/ AwsClient.AWS_EC2_METADATA_IPV4_ADDRESS = '169.254.169.254'; /** * @deprecated AWS client no validates the EC2 metadata address. **/ AwsClient.AWS_EC2_METADATA_IPV6_ADDRESS = 'fd00:ec2::254'; /***/ }), /***/ 27647: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AwsRequestSigner = void 0; const crypto_1 = __nccwpck_require__(88851); /** AWS Signature Version 4 signing algorithm identifier. */ const AWS_ALGORITHM = 'AWS4-HMAC-SHA256'; /** * The termination string for the AWS credential scope value as defined in * https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html */ const AWS_REQUEST_TYPE = 'aws4_request'; /** * Implements an AWS API request signer based on the AWS Signature Version 4 * signing process. * https://docs.aws.amazon.com/general/latest/gr/signature-version-4.html */ class AwsRequestSigner { /** * Instantiates an AWS API request signer used to send authenticated signed * requests to AWS APIs based on the AWS Signature Version 4 signing process. * This also provides a mechanism to generate the signed request without * sending it. * @param getCredentials A mechanism to retrieve AWS security credentials * when needed. * @param region The AWS region to use. */ constructor(getCredentials, region) { this.getCredentials = getCredentials; this.region = region; this.crypto = (0, crypto_1.createCrypto)(); } /** * Generates the signed request for the provided HTTP request for calling * an AWS API. This follows the steps described at: * https://docs.aws.amazon.com/general/latest/gr/sigv4_signing.html * @param amzOptions The AWS request options that need to be signed. * @return A promise that resolves with the GaxiosOptions containing the * signed HTTP request parameters. */ async getRequestOptions(amzOptions) { if (!amzOptions.url) { throw new Error('"url" is required in "amzOptions"'); } // Stringify JSON requests. This will be set in the request body of the // generated signed request. const requestPayloadData = typeof amzOptions.data === 'object' ? JSON.stringify(amzOptions.data) : amzOptions.data; const url = amzOptions.url; const method = amzOptions.method || 'GET'; const requestPayload = amzOptions.body || requestPayloadData; const additionalAmzHeaders = amzOptions.headers; const awsSecurityCredentials = await this.getCredentials(); const uri = new URL(url); const headerMap = await generateAuthenticationHeaderMap({ crypto: this.crypto, host: uri.host, canonicalUri: uri.pathname, canonicalQuerystring: uri.search.substr(1), method, region: this.region, securityCredentials: awsSecurityCredentials, requestPayload, additionalAmzHeaders, }); // Append additional optional headers, eg. X-Amz-Target, Content-Type, etc. const headers = Object.assign( // Add x-amz-date if available. headerMap.amzDate ? { 'x-amz-date': headerMap.amzDate } : {}, { Authorization: headerMap.authorizationHeader, host: uri.host, }, additionalAmzHeaders || {}); if (awsSecurityCredentials.token) { Object.assign(headers, { 'x-amz-security-token': awsSecurityCredentials.token, }); } const awsSignedReq = { url, method: method, headers, }; if (typeof requestPayload !== 'undefined') { awsSignedReq.body = requestPayload; } return awsSignedReq; } } exports.AwsRequestSigner = AwsRequestSigner; /** * Creates the HMAC-SHA256 hash of the provided message using the * provided key. * * @param crypto The crypto instance used to facilitate cryptographic * operations. * @param key The HMAC-SHA256 key to use. * @param msg The message to hash. * @return The computed hash bytes. */ async function sign(crypto, key, msg) { return await crypto.signWithHmacSha256(key, msg); } /** * Calculates the signing key used to calculate the signature for * AWS Signature Version 4 based on: * https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html * * @param crypto The crypto instance used to facilitate cryptographic * operations. * @param key The AWS secret access key. * @param dateStamp The '%Y%m%d' date format. * @param region The AWS region. * @param serviceName The AWS service name, eg. sts. * @return The signing key bytes. */ async function getSigningKey(crypto, key, dateStamp, region, serviceName) { const kDate = await sign(crypto, `AWS4${key}`, dateStamp); const kRegion = await sign(crypto, kDate, region); const kService = await sign(crypto, kRegion, serviceName); const kSigning = await sign(crypto, kService, 'aws4_request'); return kSigning; } /** * Generates the authentication header map needed for generating the AWS * Signature Version 4 signed request. * * @param option The options needed to compute the authentication header map. * @return The AWS authentication header map which constitutes of the following * components: amz-date, authorization header and canonical query string. */ async function generateAuthenticationHeaderMap(options) { const additionalAmzHeaders = options.additionalAmzHeaders || {}; const requestPayload = options.requestPayload || ''; // iam.amazonaws.com host => iam service. // sts.us-east-2.amazonaws.com => sts service. const serviceName = options.host.split('.')[0]; const now = new Date(); // Format: '%Y%m%dT%H%M%SZ'. const amzDate = now .toISOString() .replace(/[-:]/g, '') .replace(/\.[0-9]+/, ''); // Format: '%Y%m%d'. const dateStamp = now.toISOString().replace(/[-]/g, '').replace(/T.*/, ''); // Change all additional headers to be lower case. const reformattedAdditionalAmzHeaders = {}; Object.keys(additionalAmzHeaders).forEach(key => { reformattedAdditionalAmzHeaders[key.toLowerCase()] = additionalAmzHeaders[key]; }); // Add AWS token if available. if (options.securityCredentials.token) { reformattedAdditionalAmzHeaders['x-amz-security-token'] = options.securityCredentials.token; } // Header keys need to be sorted alphabetically. const amzHeaders = Object.assign({ host: options.host, }, // Previously the date was not fixed with x-amz- and could be provided manually. // https://github.com/boto/botocore/blob/879f8440a4e9ace5d3cf145ce8b3d5e5ffb892ef/tests/unit/auth/aws4_testsuite/get-header-value-trim.req reformattedAdditionalAmzHeaders.date ? {} : { 'x-amz-date': amzDate }, reformattedAdditionalAmzHeaders); let canonicalHeaders = ''; const signedHeadersList = Object.keys(amzHeaders).sort(); signedHeadersList.forEach(key => { canonicalHeaders += `${key}:${amzHeaders[key]}\n`; }); const signedHeaders = signedHeadersList.join(';'); const payloadHash = await options.crypto.sha256DigestHex(requestPayload); // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-canonical-request.html const canonicalRequest = `${options.method}\n` + `${options.canonicalUri}\n` + `${options.canonicalQuerystring}\n` + `${canonicalHeaders}\n` + `${signedHeaders}\n` + `${payloadHash}`; const credentialScope = `${dateStamp}/${options.region}/${serviceName}/${AWS_REQUEST_TYPE}`; // https://docs.aws.amazon.com/general/latest/gr/sigv4-create-string-to-sign.html const stringToSign = `${AWS_ALGORITHM}\n` + `${amzDate}\n` + `${credentialScope}\n` + (await options.crypto.sha256DigestHex(canonicalRequest)); // https://docs.aws.amazon.com/general/latest/gr/sigv4-calculate-signature.html const signingKey = await getSigningKey(options.crypto, options.securityCredentials.secretAccessKey, dateStamp, options.region, serviceName); const signature = await sign(options.crypto, signingKey, stringToSign); // https://docs.aws.amazon.com/general/latest/gr/sigv4-add-signature-to-request.html const authorizationHeader = `${AWS_ALGORITHM} Credential=${options.securityCredentials.accessKeyId}/` + `${credentialScope}, SignedHeaders=${signedHeaders}, ` + `Signature=${(0, crypto_1.fromArrayBufferToHex)(signature)}`; return { // Do not return x-amz-date if date is available. amzDate: reformattedAdditionalAmzHeaders.date ? undefined : amzDate, authorizationHeader, canonicalQuerystring: options.canonicalQuerystring, }; } /***/ }), /***/ 142: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var _BaseExternalAccountClient_instances, _BaseExternalAccountClient_pendingAccessToken, _BaseExternalAccountClient_internalRefreshAccessTokenAsync; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.BaseExternalAccountClient = exports.DEFAULT_UNIVERSE = exports.CLOUD_RESOURCE_MANAGER = exports.EXTERNAL_ACCOUNT_TYPE = exports.EXPIRATION_TIME_OFFSET = void 0; const stream = __nccwpck_require__(2203); const authclient_1 = __nccwpck_require__(34810); const sts = __nccwpck_require__(121); const util_1 = __nccwpck_require__(37870); /** * The required token exchange grant_type: rfc8693#section-2.1 */ const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; /** * The requested token exchange requested_token_type: rfc8693#section-2.1 */ const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** The default OAuth scope to request when none is provided. */ const DEFAULT_OAUTH_SCOPE = 'https://www.googleapis.com/auth/cloud-platform'; /** Default impersonated token lifespan in seconds.*/ const DEFAULT_TOKEN_LIFESPAN = 3600; /** * Offset to take into account network delays and server clock skews. */ exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; /** * The credentials JSON file type for external account clients. * There are 3 types of JSON configs: * 1. authorized_user => Google end user credential * 2. service_account => Google service account credential * 3. external_Account => non-GCP service (eg. AWS, Azure, K8s) */ exports.EXTERNAL_ACCOUNT_TYPE = 'external_account'; /** * Cloud resource manager URL used to retrieve project information. * * @deprecated use {@link BaseExternalAccountClient.cloudResourceManagerURL} instead **/ exports.CLOUD_RESOURCE_MANAGER = 'https://cloudresourcemanager.googleapis.com/v1/projects/'; /** The workforce audience pattern. */ const WORKFORCE_AUDIENCE_PATTERN = '//iam\\.googleapis\\.com/locations/[^/]+/workforcePools/[^/]+/providers/.+'; const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/token'; // eslint-disable-next-line @typescript-eslint/no-var-requires const pkg = __nccwpck_require__(96066); /** * For backwards compatibility. */ var authclient_2 = __nccwpck_require__(34810); Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_2.DEFAULT_UNIVERSE; } })); /** * Base external account client. This is used to instantiate AuthClients for * exchanging external account credentials for GCP access token and authorizing * requests to GCP APIs. * The base class implements common logic for exchanging various type of * external credentials for GCP access token. The logic of determining and * retrieving the external credential based on the environment and * credential_source will be left for the subclasses. */ class BaseExternalAccountClient extends authclient_1.AuthClient { /** * Instantiate a BaseExternalAccountClient instance using the provided JSON * object loaded from an external account credentials file. * @param options The external account options object typically loaded * from the external account JSON credential file. The camelCased options * are aliases for the snake_cased options. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. */ constructor(options, additionalOptions) { var _a; super({ ...options, ...additionalOptions }); _BaseExternalAccountClient_instances.add(this); /** * A pending access token request. Used for concurrent calls. */ _BaseExternalAccountClient_pendingAccessToken.set(this, null); const opts = (0, util_1.originalOrCamelOptions)(options); const type = opts.get('type'); if (type && type !== exports.EXTERNAL_ACCOUNT_TYPE) { throw new Error(`Expected "${exports.EXTERNAL_ACCOUNT_TYPE}" type but ` + `received "${options.type}"`); } const clientId = opts.get('client_id'); const clientSecret = opts.get('client_secret'); const tokenUrl = (_a = opts.get('token_url')) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain); const subjectTokenType = opts.get('subject_token_type'); const workforcePoolUserProject = opts.get('workforce_pool_user_project'); const serviceAccountImpersonationUrl = opts.get('service_account_impersonation_url'); const serviceAccountImpersonation = opts.get('service_account_impersonation'); const serviceAccountImpersonationLifetime = (0, util_1.originalOrCamelOptions)(serviceAccountImpersonation).get('token_lifetime_seconds'); this.cloudResourceManagerURL = new URL(opts.get('cloud_resource_manager_url') || `https://cloudresourcemanager.${this.universeDomain}/v1/projects/`); if (clientId) { this.clientAuth = { confidentialClientType: 'basic', clientId, clientSecret, }; } this.stsCredential = new sts.StsCredentials(tokenUrl, this.clientAuth); this.scopes = opts.get('scopes') || [DEFAULT_OAUTH_SCOPE]; this.cachedAccessToken = null; this.audience = opts.get('audience'); this.subjectTokenType = subjectTokenType; this.workforcePoolUserProject = workforcePoolUserProject; const workforceAudiencePattern = new RegExp(WORKFORCE_AUDIENCE_PATTERN); if (this.workforcePoolUserProject && !this.audience.match(workforceAudiencePattern)) { throw new Error('workforcePoolUserProject should not be set for non-workforce pool ' + 'credentials.'); } this.serviceAccountImpersonationUrl = serviceAccountImpersonationUrl; this.serviceAccountImpersonationLifetime = serviceAccountImpersonationLifetime; if (this.serviceAccountImpersonationLifetime) { this.configLifetimeRequested = true; } else { this.configLifetimeRequested = false; this.serviceAccountImpersonationLifetime = DEFAULT_TOKEN_LIFESPAN; } this.projectNumber = this.getProjectNumber(this.audience); this.supplierContext = { audience: this.audience, subjectTokenType: this.subjectTokenType, transporter: this.transporter, }; } /** The service account email to be impersonated, if available. */ getServiceAccountEmail() { var _a; if (this.serviceAccountImpersonationUrl) { if (this.serviceAccountImpersonationUrl.length > 256) { /** * Prevents DOS attacks. * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/84} **/ throw new RangeError(`URL is too long: ${this.serviceAccountImpersonationUrl}`); } // Parse email from URL. The formal looks as follows: // https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/name@project-id.iam.gserviceaccount.com:generateAccessToken const re = /serviceAccounts\/(?[^:]+):generateAccessToken$/; const result = re.exec(this.serviceAccountImpersonationUrl); return ((_a = result === null || result === void 0 ? void 0 : result.groups) === null || _a === void 0 ? void 0 : _a.email) || null; } return null; } /** * Provides a mechanism to inject GCP access tokens directly. * When the provided credential expires, a new credential, using the * external account options, is retrieved. * @param credentials The Credentials object to set on the current client. */ setCredentials(credentials) { super.setCredentials(credentials); this.cachedAccessToken = credentials; } /** * @return A promise that resolves with the current GCP access token * response. If the current credential is expired, a new one is retrieved. */ async getAccessToken() { // If cached access token is unavailable or expired, force refresh. if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { await this.refreshAccessTokenAsync(); } // Return GCP access token in GetAccessTokenResponse format. return { token: this.cachedAccessToken.access_token, res: this.cachedAccessToken.res, }; } /** * The main authentication interface. It takes an optional url which when * present is the endpoint being accessed, and returns a Promise which * resolves with authorization header fields. * * The result has the form: * { Authorization: 'Bearer ' } */ async getRequestHeaders() { const accessTokenResponse = await this.getAccessToken(); const headers = { Authorization: `Bearer ${accessTokenResponse.token}`, }; return this.addSharedMetadataHeaders(headers); } request(opts, callback) { if (callback) { this.requestAsync(opts).then(r => callback(null, r), e => { return callback(e, e.response); }); } else { return this.requestAsync(opts); } } /** * @return A promise that resolves with the project ID corresponding to the * current workload identity pool or current workforce pool if * determinable. For workforce pool credential, it returns the project ID * corresponding to the workforcePoolUserProject. * This is introduced to match the current pattern of using the Auth * library: * const projectId = await auth.getProjectId(); * const url = `https://dns.googleapis.com/dns/v1/projects/${projectId}`; * const res = await client.request({ url }); * The resource may not have permission * (resourcemanager.projects.get) to call this API or the required * scopes may not be selected: * https://cloud.google.com/resource-manager/reference/rest/v1/projects/get#authorization-scopes */ async getProjectId() { const projectNumber = this.projectNumber || this.workforcePoolUserProject; if (this.projectId) { // Return previously determined project ID. return this.projectId; } else if (projectNumber) { // Preferable not to use request() to avoid retrial policies. const headers = await this.getRequestHeaders(); const response = await this.transporter.request({ ...BaseExternalAccountClient.RETRY_CONFIG, headers, url: `${this.cloudResourceManagerURL.toString()}${projectNumber}`, responseType: 'json', }); this.projectId = response.data.projectId; return this.projectId; } return null; } /** * Authenticates the provided HTTP request, processes it and resolves with the * returned response. * @param opts The HTTP request options. * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. * @return A promise that resolves with the successful response. */ async requestAsync(opts, reAuthRetried = false) { let response; try { const requestHeaders = await this.getRequestHeaders(); opts.headers = opts.headers || {}; if (requestHeaders && requestHeaders['x-goog-user-project']) { opts.headers['x-goog-user-project'] = requestHeaders['x-goog-user-project']; } if (requestHeaders && requestHeaders.Authorization) { opts.headers.Authorization = requestHeaders.Authorization; } response = await this.transporter.request(opts); } catch (e) { const res = e.response; if (res) { const statusCode = res.status; // Retry the request for metadata if the following criteria are true: // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream // - forceRefreshOnFailure is true const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; if (!reAuthRetried && isAuthErr && !isReadableStream && this.forceRefreshOnFailure) { await this.refreshAccessTokenAsync(); return await this.requestAsync(opts, true); } } throw e; } return response; } /** * Forces token refresh, even if unexpired tokens are currently cached. * External credentials are exchanged for GCP access tokens via the token * exchange endpoint and other settings provided in the client options * object. * If the service_account_impersonation_url is provided, an additional * step to exchange the external account GCP access token for a service * account impersonated token is performed. * @return A promise that resolves with the fresh GCP access tokens. */ async refreshAccessTokenAsync() { // Use an existing access token request, or cache a new one __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f") || __classPrivateFieldGet(this, _BaseExternalAccountClient_instances, "m", _BaseExternalAccountClient_internalRefreshAccessTokenAsync).call(this), "f"); try { return await __classPrivateFieldGet(this, _BaseExternalAccountClient_pendingAccessToken, "f"); } finally { // clear pending access token for future requests __classPrivateFieldSet(this, _BaseExternalAccountClient_pendingAccessToken, null, "f"); } } /** * Returns the workload identity pool project number if it is determinable * from the audience resource name. * @param audience The STS audience used to determine the project number. * @return The project number associated with the workload identity pool, if * this can be determined from the STS audience field. Otherwise, null is * returned. */ getProjectNumber(audience) { // STS audience pattern: // //iam.googleapis.com/projects/$PROJECT_NUMBER/locations/... const match = audience.match(/\/projects\/([^/]+)/); if (!match) { return null; } return match[1]; } /** * Exchanges an external account GCP access token for a service * account impersonated access token using iamcredentials * GenerateAccessToken API. * @param token The access token to exchange for a service account access * token. * @return A promise that resolves with the service account impersonated * credentials response. */ async getImpersonatedAccessToken(token) { const opts = { ...BaseExternalAccountClient.RETRY_CONFIG, url: this.serviceAccountImpersonationUrl, method: 'POST', headers: { 'Content-Type': 'application/json', Authorization: `Bearer ${token}`, }, data: { scope: this.getScopesArray(), lifetime: this.serviceAccountImpersonationLifetime + 's', }, responseType: 'json', }; const response = await this.transporter.request(opts); const successResponse = response.data; return { access_token: successResponse.accessToken, // Convert from ISO format to timestamp. expiry_date: new Date(successResponse.expireTime).getTime(), res: response, }; } /** * Returns whether the provided credentials are expired or not. * If there is no expiry time, assumes the token is not expired or expiring. * @param accessToken The credentials to check for expiration. * @return Whether the credentials are expired or not. */ isExpired(accessToken) { const now = new Date().getTime(); return accessToken.expiry_date ? now >= accessToken.expiry_date - this.eagerRefreshThresholdMillis : false; } /** * @return The list of scopes for the requested GCP access token. */ getScopesArray() { // Since scopes can be provided as string or array, the type should // be normalized. if (typeof this.scopes === 'string') { return [this.scopes]; } return this.scopes || [DEFAULT_OAUTH_SCOPE]; } getMetricsHeaderValue() { const nodeVersion = process.version.replace(/^v/, ''); const saImpersonation = this.serviceAccountImpersonationUrl !== undefined; const credentialSourceType = this.credentialSourceType ? this.credentialSourceType : 'unknown'; return `gl-node/${nodeVersion} auth/${pkg.version} google-byoid-sdk source/${credentialSourceType} sa-impersonation/${saImpersonation} config-lifetime/${this.configLifetimeRequested}`; } } exports.BaseExternalAccountClient = BaseExternalAccountClient; _BaseExternalAccountClient_pendingAccessToken = new WeakMap(), _BaseExternalAccountClient_instances = new WeakSet(), _BaseExternalAccountClient_internalRefreshAccessTokenAsync = async function _BaseExternalAccountClient_internalRefreshAccessTokenAsync() { // Retrieve the external credential. const subjectToken = await this.retrieveSubjectToken(); // Construct the STS credentials options. const stsCredentialsOptions = { grantType: STS_GRANT_TYPE, audience: this.audience, requestedTokenType: STS_REQUEST_TOKEN_TYPE, subjectToken, subjectTokenType: this.subjectTokenType, // generateAccessToken requires the provided access token to have // scopes: // https://www.googleapis.com/auth/iam or // https://www.googleapis.com/auth/cloud-platform // The new service account access token scopes will match the user // provided ones. scope: this.serviceAccountImpersonationUrl ? [DEFAULT_OAUTH_SCOPE] : this.getScopesArray(), }; // Exchange the external credentials for a GCP access token. // Client auth is prioritized over passing the workforcePoolUserProject // parameter for STS token exchange. const additionalOptions = !this.clientAuth && this.workforcePoolUserProject ? { userProject: this.workforcePoolUserProject } : undefined; const additionalHeaders = { 'x-goog-api-client': this.getMetricsHeaderValue(), }; const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, additionalHeaders, additionalOptions); if (this.serviceAccountImpersonationUrl) { this.cachedAccessToken = await this.getImpersonatedAccessToken(stsResponse.access_token); } else if (stsResponse.expires_in) { // Save response in cached access token. this.cachedAccessToken = { access_token: stsResponse.access_token, expiry_date: new Date().getTime() + stsResponse.expires_in * 1000, res: stsResponse.res, }; } else { // Save response in cached access token. this.cachedAccessToken = { access_token: stsResponse.access_token, res: stsResponse.res, }; } // Save credentials. this.credentials = {}; Object.assign(this.credentials, this.cachedAccessToken); delete this.credentials.res; // Trigger tokens event to notify external listeners. this.emit('tokens', { refresh_token: null, expiry_date: this.cachedAccessToken.expiry_date, access_token: this.cachedAccessToken.access_token, token_type: 'Bearer', id_token: null, }); // Return the cached access token. return this.cachedAccessToken; }; /***/ }), /***/ 20977: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2013 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Compute = void 0; const gaxios_1 = __nccwpck_require__(97003); const gcpMetadata = __nccwpck_require__(23046); const oauth2client_1 = __nccwpck_require__(10091); class Compute extends oauth2client_1.OAuth2Client { /** * Google Compute Engine service account credentials. * * Retrieve access token from the metadata server. * See: https://cloud.google.com/compute/docs/access/authenticate-workloads#applications */ constructor(options = {}) { super(options); // Start with an expired refresh token, which will automatically be // refreshed before the first API call is made. this.credentials = { expiry_date: 1, refresh_token: 'compute-placeholder' }; this.serviceAccountEmail = options.serviceAccountEmail || 'default'; this.scopes = Array.isArray(options.scopes) ? options.scopes : options.scopes ? [options.scopes] : []; } /** * Refreshes the access token. * @param refreshToken Unused parameter */ async refreshTokenNoCache( // eslint-disable-next-line @typescript-eslint/no-unused-vars refreshToken) { const tokenPath = `service-accounts/${this.serviceAccountEmail}/token`; let data; try { const instanceOptions = { property: tokenPath, }; if (this.scopes.length > 0) { instanceOptions.params = { scopes: this.scopes.join(','), }; } data = await gcpMetadata.instance(instanceOptions); } catch (e) { if (e instanceof gaxios_1.GaxiosError) { e.message = `Could not refresh access token: ${e.message}`; this.wrapError(e); } throw e; } const tokens = data; if (data && data.expires_in) { tokens.expiry_date = new Date().getTime() + data.expires_in * 1000; delete tokens.expires_in; } this.emit('tokens', tokens); return { tokens, res: null }; } /** * Fetches an ID token. * @param targetAudience the audience for the fetched ID token. */ async fetchIdToken(targetAudience) { const idTokenPath = `service-accounts/${this.serviceAccountEmail}/identity` + `?format=full&audience=${targetAudience}`; let idToken; try { const instanceOptions = { property: idTokenPath, }; idToken = await gcpMetadata.instance(instanceOptions); } catch (e) { if (e instanceof Error) { e.message = `Could not fetch ID token: ${e.message}`; } throw e; } return idToken; } wrapError(e) { const res = e.response; if (res && res.status) { e.status = res.status; if (res.status === 403) { e.message = 'A Forbidden error was returned while attempting to retrieve an access ' + 'token for the Compute Engine built-in service account. This may be because the Compute ' + 'Engine instance does not have the correct permission scopes specified: ' + e.message; } else if (res.status === 404) { e.message = 'A Not Found error was returned while attempting to retrieve an access' + 'token for the Compute Engine built-in service account. This may be because the Compute ' + 'Engine instance does not have any permission scopes specified: ' + e.message; } } } } exports.Compute = Compute; /***/ }), /***/ 69157: /***/ (function(__unused_webpack_module, exports) { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _DefaultAwsSecurityCredentialsSupplier_instances, _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultAwsSecurityCredentialsSupplier = void 0; /** * Internal AWS security credentials supplier implementation used by {@link AwsClient} * when a credential source is provided instead of a user defined supplier. * The logic is summarized as: * 1. If imdsv2_session_token_url is provided in the credential source, then * fetch the aws session token and include it in the headers of the * metadata requests. This is a requirement for IDMSv2 but optional * for IDMSv1. * 2. Retrieve AWS region from availability-zone. * 3a. Check AWS credentials in environment variables. If not found, get * from security-credentials endpoint. * 3b. Get AWS credentials from security-credentials endpoint. In order * to retrieve this, the AWS role needs to be determined by calling * security-credentials endpoint without any argument. Then the * credentials can be retrieved via: security-credentials/role_name * 4. Generate the signed request to AWS STS GetCallerIdentity action. * 5. Inject x-goog-cloud-target-resource into header and serialize the * signed request. This will be the subject-token to pass to GCP STS. */ class DefaultAwsSecurityCredentialsSupplier { /** * Instantiates a new DefaultAwsSecurityCredentialsSupplier using information * from the credential_source stored in the ADC file. * @param opts The default aws security credentials supplier options object to * build the supplier with. */ constructor(opts) { _DefaultAwsSecurityCredentialsSupplier_instances.add(this); this.regionUrl = opts.regionUrl; this.securityCredentialsUrl = opts.securityCredentialsUrl; this.imdsV2SessionTokenUrl = opts.imdsV2SessionTokenUrl; this.additionalGaxiosOptions = opts.additionalGaxiosOptions; } /** * Returns the active AWS region. This first checks to see if the region * is available as an environment variable. If it is not, then the supplier * will call the region URL. * @param context {@link ExternalAccountSupplierContext} from the calling * {@link AwsClient}, contains the requested audience and subject token type * for the external account identity. * @return A promise that resolves with the AWS region string. */ async getAwsRegion(context) { // Priority order for region determination: // AWS_REGION > AWS_DEFAULT_REGION > metadata server. if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get)) { return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get); } const metadataHeaders = {}; if (!__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get) && this.imdsV2SessionTokenUrl) { metadataHeaders['x-aws-ec2-metadata-token'] = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); } if (!this.regionUrl) { throw new Error('Unable to determine AWS region due to missing ' + '"options.credential_source.region_url"'); } const opts = { ...this.additionalGaxiosOptions, url: this.regionUrl, method: 'GET', responseType: 'text', headers: metadataHeaders, }; const response = await context.transporter.request(opts); // Remove last character. For example, if us-east-2b is returned, // the region would be us-east-2. return response.data.substr(0, response.data.length - 1); } /** * Returns AWS security credentials. This first checks to see if the credentials * is available as environment variables. If it is not, then the supplier * will call the security credentials URL. * @param context {@link ExternalAccountSupplierContext} from the calling * {@link AwsClient}, contains the requested audience and subject token type * for the external account identity. * @return A promise that resolves with the AWS security credentials. */ async getAwsSecurityCredentials(context) { // Check environment variables for permanent credentials first. // https://docs.aws.amazon.com/general/latest/gr/aws-sec-cred-types.html if (__classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get)) { return __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "a", _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get); } const metadataHeaders = {}; if (this.imdsV2SessionTokenUrl) { metadataHeaders['x-aws-ec2-metadata-token'] = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken).call(this, context.transporter); } // Since the role on a VM can change, we don't need to cache it. const roleName = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName).call(this, metadataHeaders, context.transporter); // Temporary credentials typically last for several hours. // Expiration is returned in response. // Consider future optimization of this logic to cache AWS tokens // until their natural expiration. const awsCreds = await __classPrivateFieldGet(this, _DefaultAwsSecurityCredentialsSupplier_instances, "m", _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials).call(this, roleName, metadataHeaders, context.transporter); return { accessKeyId: awsCreds.AccessKeyId, secretAccessKey: awsCreds.SecretAccessKey, token: awsCreds.Token, }; } } exports.DefaultAwsSecurityCredentialsSupplier = DefaultAwsSecurityCredentialsSupplier; _DefaultAwsSecurityCredentialsSupplier_instances = new WeakSet(), _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken = /** * @param transporter The transporter to use for requests. * @return A promise that resolves with the IMDSv2 Session Token. */ async function _DefaultAwsSecurityCredentialsSupplier_getImdsV2SessionToken(transporter) { const opts = { ...this.additionalGaxiosOptions, url: this.imdsV2SessionTokenUrl, method: 'PUT', responseType: 'text', headers: { 'x-aws-ec2-metadata-token-ttl-seconds': '300' }, }; const response = await transporter.request(opts); return response.data; }, _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName = /** * @param headers The headers to be used in the metadata request. * @param transporter The transporter to use for requests. * @return A promise that resolves with the assigned role to the current * AWS VM. This is needed for calling the security-credentials endpoint. */ async function _DefaultAwsSecurityCredentialsSupplier_getAwsRoleName(headers, transporter) { if (!this.securityCredentialsUrl) { throw new Error('Unable to determine AWS role name due to missing ' + '"options.credential_source.url"'); } const opts = { ...this.additionalGaxiosOptions, url: this.securityCredentialsUrl, method: 'GET', responseType: 'text', headers: headers, }; const response = await transporter.request(opts); return response.data; }, _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials = /** * Retrieves the temporary AWS credentials by calling the security-credentials * endpoint as specified in the `credential_source` object. * @param roleName The role attached to the current VM. * @param headers The headers to be used in the metadata request. * @param transporter The transporter to use for requests. * @return A promise that resolves with the temporary AWS credentials * needed for creating the GetCallerIdentity signed request. */ async function _DefaultAwsSecurityCredentialsSupplier_retrieveAwsSecurityCredentials(roleName, headers, transporter) { const response = await transporter.request({ ...this.additionalGaxiosOptions, url: `${this.securityCredentialsUrl}/${roleName}`, responseType: 'json', headers: headers, }); return response.data; }, _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_regionFromEnv_get() { // The AWS region can be provided through AWS_REGION or AWS_DEFAULT_REGION. // Only one is required. return (process.env['AWS_REGION'] || process.env['AWS_DEFAULT_REGION'] || null); }, _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get = function _DefaultAwsSecurityCredentialsSupplier_securityCredentialsFromEnv_get() { // Both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY are required. if (process.env['AWS_ACCESS_KEY_ID'] && process.env['AWS_SECRET_ACCESS_KEY']) { return { accessKeyId: process.env['AWS_ACCESS_KEY_ID'], secretAccessKey: process.env['AWS_SECRET_ACCESS_KEY'], token: process.env['AWS_SESSION_TOKEN'], }; } return null; }; /***/ }), /***/ 77556: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DownscopedClient = exports.EXPIRATION_TIME_OFFSET = exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = void 0; const stream = __nccwpck_require__(2203); const authclient_1 = __nccwpck_require__(34810); const sts = __nccwpck_require__(121); /** * The required token exchange grant_type: rfc8693#section-2.1 */ const STS_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:token-exchange'; /** * The requested token exchange requested_token_type: rfc8693#section-2.1 */ const STS_REQUEST_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** * The requested token exchange subject_token_type: rfc8693#section-2.1 */ const STS_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:access_token'; /** * The maximum number of access boundary rules a Credential Access Boundary * can contain. */ exports.MAX_ACCESS_BOUNDARY_RULES_COUNT = 10; /** * Offset to take into account network delays and server clock skews. */ exports.EXPIRATION_TIME_OFFSET = 5 * 60 * 1000; /** * Defines a set of Google credentials that are downscoped from an existing set * of Google OAuth2 credentials. This is useful to restrict the Identity and * Access Management (IAM) permissions that a short-lived credential can use. * The common pattern of usage is to have a token broker with elevated access * generate these downscoped credentials from higher access source credentials * and pass the downscoped short-lived access tokens to a token consumer via * some secure authenticated channel for limited access to Google Cloud Storage * resources. */ class DownscopedClient extends authclient_1.AuthClient { /** * Instantiates a downscoped client object using the provided source * AuthClient and credential access boundary rules. * To downscope permissions of a source AuthClient, a Credential Access * Boundary that specifies which resources the new credential can access, as * well as an upper bound on the permissions that are available on each * resource, has to be defined. A downscoped client can then be instantiated * using the source AuthClient and the Credential Access Boundary. * @param authClient The source AuthClient to be downscoped based on the * provided Credential Access Boundary rules. * @param credentialAccessBoundary The Credential Access Boundary which * contains a list of access boundary rules. Each rule contains information * on the resource that the rule applies to, the upper bound of the * permissions that are available on that resource and an optional * condition to further restrict permissions. * @param additionalOptions **DEPRECATED, set this in the provided `authClient`.** * Optional additional behavior customization options. * @param quotaProjectId **DEPRECATED, set this in the provided `authClient`.** * Optional quota project id for setting up in the x-goog-user-project header. */ constructor(authClient, credentialAccessBoundary, additionalOptions, quotaProjectId) { super({ ...additionalOptions, quotaProjectId }); this.authClient = authClient; this.credentialAccessBoundary = credentialAccessBoundary; // Check 1-10 Access Boundary Rules are defined within Credential Access // Boundary. if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length === 0) { throw new Error('At least one access boundary rule needs to be defined.'); } else if (credentialAccessBoundary.accessBoundary.accessBoundaryRules.length > exports.MAX_ACCESS_BOUNDARY_RULES_COUNT) { throw new Error('The provided access boundary has more than ' + `${exports.MAX_ACCESS_BOUNDARY_RULES_COUNT} access boundary rules.`); } // Check at least one permission should be defined in each Access Boundary // Rule. for (const rule of credentialAccessBoundary.accessBoundary .accessBoundaryRules) { if (rule.availablePermissions.length === 0) { throw new Error('At least one permission should be defined in access boundary rules.'); } } this.stsCredential = new sts.StsCredentials(`https://sts.${this.universeDomain}/v1/token`); this.cachedDownscopedAccessToken = null; } /** * Provides a mechanism to inject Downscoped access tokens directly. * The expiry_date field is required to facilitate determination of the token * expiration which would make it easier for the token consumer to handle. * @param credentials The Credentials object to set on the current client. */ setCredentials(credentials) { if (!credentials.expiry_date) { throw new Error('The access token expiry_date field is missing in the provided ' + 'credentials.'); } super.setCredentials(credentials); this.cachedDownscopedAccessToken = credentials; } async getAccessToken() { // If the cached access token is unavailable or expired, force refresh. // The Downscoped access token will be returned in // DownscopedAccessTokenResponse format. if (!this.cachedDownscopedAccessToken || this.isExpired(this.cachedDownscopedAccessToken)) { await this.refreshAccessTokenAsync(); } // Return Downscoped access token in DownscopedAccessTokenResponse format. return { token: this.cachedDownscopedAccessToken.access_token, expirationTime: this.cachedDownscopedAccessToken.expiry_date, res: this.cachedDownscopedAccessToken.res, }; } /** * The main authentication interface. It takes an optional url which when * present is the endpoint being accessed, and returns a Promise which * resolves with authorization header fields. * * The result has the form: * { Authorization: 'Bearer ' } */ async getRequestHeaders() { const accessTokenResponse = await this.getAccessToken(); const headers = { Authorization: `Bearer ${accessTokenResponse.token}`, }; return this.addSharedMetadataHeaders(headers); } request(opts, callback) { if (callback) { this.requestAsync(opts).then(r => callback(null, r), e => { return callback(e, e.response); }); } else { return this.requestAsync(opts); } } /** * Authenticates the provided HTTP request, processes it and resolves with the * returned response. * @param opts The HTTP request options. * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure * @return A promise that resolves with the successful response. */ async requestAsync(opts, reAuthRetried = false) { let response; try { const requestHeaders = await this.getRequestHeaders(); opts.headers = opts.headers || {}; if (requestHeaders && requestHeaders['x-goog-user-project']) { opts.headers['x-goog-user-project'] = requestHeaders['x-goog-user-project']; } if (requestHeaders && requestHeaders.Authorization) { opts.headers.Authorization = requestHeaders.Authorization; } response = await this.transporter.request(opts); } catch (e) { const res = e.response; if (res) { const statusCode = res.status; // Retry the request for metadata if the following criteria are true: // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream // - forceRefreshOnFailure is true const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; if (!reAuthRetried && isAuthErr && !isReadableStream && this.forceRefreshOnFailure) { await this.refreshAccessTokenAsync(); return await this.requestAsync(opts, true); } } throw e; } return response; } /** * Forces token refresh, even if unexpired tokens are currently cached. * GCP access tokens are retrieved from authclient object/source credential. * Then GCP access tokens are exchanged for downscoped access tokens via the * token exchange endpoint. * @return A promise that resolves with the fresh downscoped access token. */ async refreshAccessTokenAsync() { var _a; // Retrieve GCP access token from source credential. const subjectToken = (await this.authClient.getAccessToken()).token; // Construct the STS credentials options. const stsCredentialsOptions = { grantType: STS_GRANT_TYPE, requestedTokenType: STS_REQUEST_TOKEN_TYPE, subjectToken: subjectToken, subjectTokenType: STS_SUBJECT_TOKEN_TYPE, }; // Exchange the source AuthClient access token for a Downscoped access // token. const stsResponse = await this.stsCredential.exchangeToken(stsCredentialsOptions, undefined, this.credentialAccessBoundary); /** * The STS endpoint will only return the expiration time for the downscoped * access token if the original access token represents a service account. * The downscoped token's expiration time will always match the source * credential expiration. When no expires_in is returned, we can copy the * source credential's expiration time. */ const sourceCredExpireDate = ((_a = this.authClient.credentials) === null || _a === void 0 ? void 0 : _a.expiry_date) || null; const expiryDate = stsResponse.expires_in ? new Date().getTime() + stsResponse.expires_in * 1000 : sourceCredExpireDate; // Save response in cached access token. this.cachedDownscopedAccessToken = { access_token: stsResponse.access_token, expiry_date: expiryDate, res: stsResponse.res, }; // Save credentials. this.credentials = {}; Object.assign(this.credentials, this.cachedDownscopedAccessToken); delete this.credentials.res; // Trigger tokens event to notify external listeners. this.emit('tokens', { refresh_token: null, expiry_date: this.cachedDownscopedAccessToken.expiry_date, access_token: this.cachedDownscopedAccessToken.access_token, token_type: 'Bearer', id_token: null, }); // Return the cached access token. return this.cachedDownscopedAccessToken; } /** * Returns whether the provided credentials are expired or not. * If there is no expiry time, assumes the token is not expired or expiring. * @param downscopedAccessToken The credentials to check for expiration. * @return Whether the credentials are expired or not. */ isExpired(downscopedAccessToken) { const now = new Date().getTime(); return downscopedAccessToken.expiry_date ? now >= downscopedAccessToken.expiry_date - this.eagerRefreshThresholdMillis : false; } } exports.DownscopedClient = DownscopedClient; /***/ }), /***/ 60963: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2018 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GCPEnv = void 0; exports.clear = clear; exports.getEnv = getEnv; const gcpMetadata = __nccwpck_require__(23046); var GCPEnv; (function (GCPEnv) { GCPEnv["APP_ENGINE"] = "APP_ENGINE"; GCPEnv["KUBERNETES_ENGINE"] = "KUBERNETES_ENGINE"; GCPEnv["CLOUD_FUNCTIONS"] = "CLOUD_FUNCTIONS"; GCPEnv["COMPUTE_ENGINE"] = "COMPUTE_ENGINE"; GCPEnv["CLOUD_RUN"] = "CLOUD_RUN"; GCPEnv["NONE"] = "NONE"; })(GCPEnv || (exports.GCPEnv = GCPEnv = {})); let envPromise; function clear() { envPromise = undefined; } async function getEnv() { if (envPromise) { return envPromise; } envPromise = getEnvMemoized(); return envPromise; } async function getEnvMemoized() { let env = GCPEnv.NONE; if (isAppEngine()) { env = GCPEnv.APP_ENGINE; } else if (isCloudFunction()) { env = GCPEnv.CLOUD_FUNCTIONS; } else if (await isComputeEngine()) { if (await isKubernetesEngine()) { env = GCPEnv.KUBERNETES_ENGINE; } else if (isCloudRun()) { env = GCPEnv.CLOUD_RUN; } else { env = GCPEnv.COMPUTE_ENGINE; } } else { env = GCPEnv.NONE; } return env; } function isAppEngine() { return !!(process.env.GAE_SERVICE || process.env.GAE_MODULE_NAME); } function isCloudFunction() { return !!(process.env.FUNCTION_NAME || process.env.FUNCTION_TARGET); } /** * This check only verifies that the environment is running knative. * This must be run *after* checking for Kubernetes, otherwise it will * return a false positive. */ function isCloudRun() { return !!process.env.K_CONFIGURATION; } async function isKubernetesEngine() { try { await gcpMetadata.instance('attributes/cluster-name'); return true; } catch (e) { return false; } } async function isComputeEngine() { return gcpMetadata.isAvailable(); } /***/ }), /***/ 43247: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.InvalidSubjectTokenError = exports.InvalidMessageFieldError = exports.InvalidCodeFieldError = exports.InvalidTokenTypeFieldError = exports.InvalidExpirationTimeFieldError = exports.InvalidSuccessFieldError = exports.InvalidVersionFieldError = exports.ExecutableResponseError = exports.ExecutableResponse = void 0; const SAML_SUBJECT_TOKEN_TYPE = 'urn:ietf:params:oauth:token-type:saml2'; const OIDC_SUBJECT_TOKEN_TYPE1 = 'urn:ietf:params:oauth:token-type:id_token'; const OIDC_SUBJECT_TOKEN_TYPE2 = 'urn:ietf:params:oauth:token-type:jwt'; /** * Defines the response of a 3rd party executable run by the pluggable auth client. */ class ExecutableResponse { /** * Instantiates an ExecutableResponse instance using the provided JSON object * from the output of the executable. * @param responseJson Response from a 3rd party executable, loaded from a * run of the executable or a cached output file. */ constructor(responseJson) { // Check that the required fields exist in the json response. if (!responseJson.version) { throw new InvalidVersionFieldError("Executable response must contain a 'version' field."); } if (responseJson.success === undefined) { throw new InvalidSuccessFieldError("Executable response must contain a 'success' field."); } this.version = responseJson.version; this.success = responseJson.success; // Validate required fields for a successful response. if (this.success) { this.expirationTime = responseJson.expiration_time; this.tokenType = responseJson.token_type; // Validate token type field. if (this.tokenType !== SAML_SUBJECT_TOKEN_TYPE && this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE1 && this.tokenType !== OIDC_SUBJECT_TOKEN_TYPE2) { throw new InvalidTokenTypeFieldError("Executable response must contain a 'token_type' field when successful " + `and it must be one of ${OIDC_SUBJECT_TOKEN_TYPE1}, ${OIDC_SUBJECT_TOKEN_TYPE2}, or ${SAML_SUBJECT_TOKEN_TYPE}.`); } // Validate subject token. if (this.tokenType === SAML_SUBJECT_TOKEN_TYPE) { if (!responseJson.saml_response) { throw new InvalidSubjectTokenError(`Executable response must contain a 'saml_response' field when token_type=${SAML_SUBJECT_TOKEN_TYPE}.`); } this.subjectToken = responseJson.saml_response; } else { if (!responseJson.id_token) { throw new InvalidSubjectTokenError("Executable response must contain a 'id_token' field when " + `token_type=${OIDC_SUBJECT_TOKEN_TYPE1} or ${OIDC_SUBJECT_TOKEN_TYPE2}.`); } this.subjectToken = responseJson.id_token; } } else { // Both code and message must be provided for unsuccessful responses. if (!responseJson.code) { throw new InvalidCodeFieldError("Executable response must contain a 'code' field when unsuccessful."); } if (!responseJson.message) { throw new InvalidMessageFieldError("Executable response must contain a 'message' field when unsuccessful."); } this.errorCode = responseJson.code; this.errorMessage = responseJson.message; } } /** * @return A boolean representing if the response has a valid token. Returns * true when the response was successful and the token is not expired. */ isValid() { return !this.isExpired() && this.success; } /** * @return A boolean representing if the response is expired. Returns true if the * provided timeout has passed. */ isExpired() { return (this.expirationTime !== undefined && this.expirationTime < Math.round(Date.now() / 1000)); } } exports.ExecutableResponse = ExecutableResponse; /** * An error thrown by the ExecutableResponse class. */ class ExecutableResponseError extends Error { constructor(message) { super(message); Object.setPrototypeOf(this, new.target.prototype); } } exports.ExecutableResponseError = ExecutableResponseError; /** * An error thrown when the 'version' field in an executable response is missing or invalid. */ class InvalidVersionFieldError extends ExecutableResponseError { } exports.InvalidVersionFieldError = InvalidVersionFieldError; /** * An error thrown when the 'success' field in an executable response is missing or invalid. */ class InvalidSuccessFieldError extends ExecutableResponseError { } exports.InvalidSuccessFieldError = InvalidSuccessFieldError; /** * An error thrown when the 'expiration_time' field in an executable response is missing or invalid. */ class InvalidExpirationTimeFieldError extends ExecutableResponseError { } exports.InvalidExpirationTimeFieldError = InvalidExpirationTimeFieldError; /** * An error thrown when the 'token_type' field in an executable response is missing or invalid. */ class InvalidTokenTypeFieldError extends ExecutableResponseError { } exports.InvalidTokenTypeFieldError = InvalidTokenTypeFieldError; /** * An error thrown when the 'code' field in an executable response is missing or invalid. */ class InvalidCodeFieldError extends ExecutableResponseError { } exports.InvalidCodeFieldError = InvalidCodeFieldError; /** * An error thrown when the 'message' field in an executable response is missing or invalid. */ class InvalidMessageFieldError extends ExecutableResponseError { } exports.InvalidMessageFieldError = InvalidMessageFieldError; /** * An error thrown when the subject token in an executable response is missing or invalid. */ class InvalidSubjectTokenError extends ExecutableResponseError { } exports.InvalidSubjectTokenError = InvalidSubjectTokenError; /***/ }), /***/ 34240: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ExternalAccountAuthorizedUserClient = exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = void 0; const authclient_1 = __nccwpck_require__(34810); const oauth2common_1 = __nccwpck_require__(6653); const gaxios_1 = __nccwpck_require__(97003); const stream = __nccwpck_require__(2203); const baseexternalclient_1 = __nccwpck_require__(142); /** * The credentials JSON file type for external account authorized user clients. */ exports.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE = 'external_account_authorized_user'; const DEFAULT_TOKEN_URL = 'https://sts.{universeDomain}/v1/oauthtoken'; /** * Handler for token refresh requests sent to the token_url endpoint for external * authorized user credentials. */ class ExternalAccountAuthorizedUserHandler extends oauth2common_1.OAuthClientAuthHandler { /** * Initializes an ExternalAccountAuthorizedUserHandler instance. * @param url The URL of the token refresh endpoint. * @param transporter The transporter to use for the refresh request. * @param clientAuthentication The client authentication credentials to use * for the refresh request. */ constructor(url, transporter, clientAuthentication) { super(clientAuthentication); this.url = url; this.transporter = transporter; } /** * Requests a new access token from the token_url endpoint using the provided * refresh token. * @param refreshToken The refresh token to use to generate a new access token. * @param additionalHeaders Optional additional headers to pass along the * request. * @return A promise that resolves with the token refresh response containing * the requested access token and its expiration time. */ async refreshToken(refreshToken, additionalHeaders) { const values = new URLSearchParams({ grant_type: 'refresh_token', refresh_token: refreshToken, }); const headers = { 'Content-Type': 'application/x-www-form-urlencoded', ...additionalHeaders, }; const opts = { ...ExternalAccountAuthorizedUserHandler.RETRY_CONFIG, url: this.url, method: 'POST', headers, data: values.toString(), responseType: 'json', }; // Apply OAuth client authentication. this.applyClientAuthenticationOptions(opts); try { const response = await this.transporter.request(opts); // Successful response. const tokenRefreshResponse = response.data; tokenRefreshResponse.res = response; return tokenRefreshResponse; } catch (error) { // Translate error to OAuthError. if (error instanceof gaxios_1.GaxiosError && error.response) { throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, // Preserve other fields from the original error. error); } // Request could fail before the server responds. throw error; } } } /** * External Account Authorized User Client. This is used for OAuth2 credentials * sourced using external identities through Workforce Identity Federation. * Obtaining the initial access and refresh token can be done through the * Google Cloud CLI. */ class ExternalAccountAuthorizedUserClient extends authclient_1.AuthClient { /** * Instantiates an ExternalAccountAuthorizedUserClient instances using the * provided JSON object loaded from a credentials files. * An error is throws if the credential is not valid. * @param options The external account authorized user option object typically * from the external accoutn authorized user JSON credential file. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. */ constructor(options, additionalOptions) { var _a; super({ ...options, ...additionalOptions }); if (options.universe_domain) { this.universeDomain = options.universe_domain; } this.refreshToken = options.refresh_token; const clientAuth = { confidentialClientType: 'basic', clientId: options.client_id, clientSecret: options.client_secret, }; this.externalAccountAuthorizedUserHandler = new ExternalAccountAuthorizedUserHandler((_a = options.token_url) !== null && _a !== void 0 ? _a : DEFAULT_TOKEN_URL.replace('{universeDomain}', this.universeDomain), this.transporter, clientAuth); this.cachedAccessToken = null; this.quotaProjectId = options.quota_project_id; // As threshold could be zero, // eagerRefreshThresholdMillis || EXPIRATION_TIME_OFFSET will override the // zero value. if (typeof (additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.eagerRefreshThresholdMillis) !== 'number') { this.eagerRefreshThresholdMillis = baseexternalclient_1.EXPIRATION_TIME_OFFSET; } else { this.eagerRefreshThresholdMillis = additionalOptions .eagerRefreshThresholdMillis; } this.forceRefreshOnFailure = !!(additionalOptions === null || additionalOptions === void 0 ? void 0 : additionalOptions.forceRefreshOnFailure); } async getAccessToken() { // If cached access token is unavailable or expired, force refresh. if (!this.cachedAccessToken || this.isExpired(this.cachedAccessToken)) { await this.refreshAccessTokenAsync(); } // Return GCP access token in GetAccessTokenResponse format. return { token: this.cachedAccessToken.access_token, res: this.cachedAccessToken.res, }; } async getRequestHeaders() { const accessTokenResponse = await this.getAccessToken(); const headers = { Authorization: `Bearer ${accessTokenResponse.token}`, }; return this.addSharedMetadataHeaders(headers); } request(opts, callback) { if (callback) { this.requestAsync(opts).then(r => callback(null, r), e => { return callback(e, e.response); }); } else { return this.requestAsync(opts); } } /** * Authenticates the provided HTTP request, processes it and resolves with the * returned response. * @param opts The HTTP request options. * @param reAuthRetried Whether the current attempt is a retry after a failed attempt due to an auth failure. * @return A promise that resolves with the successful response. */ async requestAsync(opts, reAuthRetried = false) { let response; try { const requestHeaders = await this.getRequestHeaders(); opts.headers = opts.headers || {}; if (requestHeaders && requestHeaders['x-goog-user-project']) { opts.headers['x-goog-user-project'] = requestHeaders['x-goog-user-project']; } if (requestHeaders && requestHeaders.Authorization) { opts.headers.Authorization = requestHeaders.Authorization; } response = await this.transporter.request(opts); } catch (e) { const res = e.response; if (res) { const statusCode = res.status; // Retry the request for metadata if the following criteria are true: // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream // - forceRefreshOnFailure is true const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; if (!reAuthRetried && isAuthErr && !isReadableStream && this.forceRefreshOnFailure) { await this.refreshAccessTokenAsync(); return await this.requestAsync(opts, true); } } throw e; } return response; } /** * Forces token refresh, even if unexpired tokens are currently cached. * @return A promise that resolves with the refreshed credential. */ async refreshAccessTokenAsync() { // Refresh the access token using the refresh token. const refreshResponse = await this.externalAccountAuthorizedUserHandler.refreshToken(this.refreshToken); this.cachedAccessToken = { access_token: refreshResponse.access_token, expiry_date: new Date().getTime() + refreshResponse.expires_in * 1000, res: refreshResponse.res, }; if (refreshResponse.refresh_token !== undefined) { this.refreshToken = refreshResponse.refresh_token; } return this.cachedAccessToken; } /** * Returns whether the provided credentials are expired or not. * If there is no expiry time, assumes the token is not expired or expiring. * @param credentials The credentials to check for expiration. * @return Whether the credentials are expired or not. */ isExpired(credentials) { const now = new Date().getTime(); return credentials.expiry_date ? now >= credentials.expiry_date - this.eagerRefreshThresholdMillis : false; } } exports.ExternalAccountAuthorizedUserClient = ExternalAccountAuthorizedUserClient; /***/ }), /***/ 88323: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ExternalAccountClient = void 0; const baseexternalclient_1 = __nccwpck_require__(142); const identitypoolclient_1 = __nccwpck_require__(29960); const awsclient_1 = __nccwpck_require__(81261); const pluggable_auth_client_1 = __nccwpck_require__(46077); /** * Dummy class with no constructor. Developers are expected to use fromJSON. */ class ExternalAccountClient { constructor() { throw new Error('ExternalAccountClients should be initialized via: ' + 'ExternalAccountClient.fromJSON(), ' + 'directly via explicit constructors, eg. ' + 'new AwsClient(options), new IdentityPoolClient(options), new' + 'PluggableAuthClientOptions, or via ' + 'new GoogleAuth(options).getClient()'); } /** * This static method will instantiate the * corresponding type of external account credential depending on the * underlying credential source. * @param options The external account options object typically loaded * from the external account JSON credential file. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. * @return A BaseExternalAccountClient instance or null if the options * provided do not correspond to an external account credential. */ static fromJSON(options, additionalOptions) { var _a, _b; if (options && options.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { if ((_a = options.credential_source) === null || _a === void 0 ? void 0 : _a.environment_id) { return new awsclient_1.AwsClient(options, additionalOptions); } else if ((_b = options.credential_source) === null || _b === void 0 ? void 0 : _b.executable) { return new pluggable_auth_client_1.PluggableAuthClient(options, additionalOptions); } else { return new identitypoolclient_1.IdentityPoolClient(options, additionalOptions); } } else { return null; } } } exports.ExternalAccountClient = ExternalAccountClient; /***/ }), /***/ 10932: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var _a, _b, _c; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.FileSubjectTokenSupplier = void 0; const util_1 = __nccwpck_require__(39023); const fs = __nccwpck_require__(79896); // fs.readfile is undefined in browser karma tests causing // `npm run browser-test` to fail as test.oauth2.ts imports this file via // src/index.ts. // Fallback to void function to avoid promisify throwing a TypeError. const readFile = (0, util_1.promisify)((_a = fs.readFile) !== null && _a !== void 0 ? _a : (() => { })); const realpath = (0, util_1.promisify)((_b = fs.realpath) !== null && _b !== void 0 ? _b : (() => { })); const lstat = (0, util_1.promisify)((_c = fs.lstat) !== null && _c !== void 0 ? _c : (() => { })); /** * Internal subject token supplier implementation used when a file location * is configured in the credential configuration used to build an {@link IdentityPoolClient} */ class FileSubjectTokenSupplier { /** * Instantiates a new file based subject token supplier. * @param opts The file subject token supplier options to build the supplier * with. */ constructor(opts) { this.filePath = opts.filePath; this.formatType = opts.formatType; this.subjectTokenFieldName = opts.subjectTokenFieldName; } /** * Returns the subject token stored at the file specified in the constructor. * @param context {@link ExternalAccountSupplierContext} from the calling * {@link IdentityPoolClient}, contains the requested audience and subject * token type for the external account identity. Not used. */ async getSubjectToken(context) { // Make sure there is a file at the path. lstatSync will throw if there is // nothing there. let parsedFilePath = this.filePath; try { // Resolve path to actual file in case of symlink. Expect a thrown error // if not resolvable. parsedFilePath = await realpath(parsedFilePath); if (!(await lstat(parsedFilePath)).isFile()) { throw new Error(); } } catch (err) { if (err instanceof Error) { err.message = `The file at ${parsedFilePath} does not exist, or it is not a file. ${err.message}`; } throw err; } let subjectToken; const rawText = await readFile(parsedFilePath, { encoding: 'utf8' }); if (this.formatType === 'text') { subjectToken = rawText; } else if (this.formatType === 'json' && this.subjectTokenFieldName) { const json = JSON.parse(rawText); subjectToken = json[this.subjectTokenFieldName]; } if (!subjectToken) { throw new Error('Unable to parse the subject_token from the credential_source file'); } return subjectToken; } } exports.FileSubjectTokenSupplier = FileSubjectTokenSupplier; /***/ }), /***/ 95934: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var _GoogleAuth_instances, _GoogleAuth_pendingAuthClient, _GoogleAuth_prepareAndCacheClient, _GoogleAuth_determineClient; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GoogleAuth = exports.GoogleAuthExceptionMessages = exports.CLOUD_SDK_CLIENT_ID = void 0; const child_process_1 = __nccwpck_require__(35317); const fs = __nccwpck_require__(79896); const gcpMetadata = __nccwpck_require__(23046); const os = __nccwpck_require__(70857); const path = __nccwpck_require__(16928); const crypto_1 = __nccwpck_require__(88851); const transporters_1 = __nccwpck_require__(67633); const computeclient_1 = __nccwpck_require__(20977); const idtokenclient_1 = __nccwpck_require__(12718); const envDetect_1 = __nccwpck_require__(60963); const jwtclient_1 = __nccwpck_require__(75277); const refreshclient_1 = __nccwpck_require__(99807); const impersonated_1 = __nccwpck_require__(39964); const externalclient_1 = __nccwpck_require__(88323); const baseexternalclient_1 = __nccwpck_require__(142); const authclient_1 = __nccwpck_require__(34810); const externalAccountAuthorizedUserClient_1 = __nccwpck_require__(34240); const util_1 = __nccwpck_require__(37870); exports.CLOUD_SDK_CLIENT_ID = '764086051850-6qr4p6gpi6hn506pt8ejuq83di341hur.apps.googleusercontent.com'; exports.GoogleAuthExceptionMessages = { API_KEY_WITH_CREDENTIALS: 'API Keys and Credentials are mutually exclusive authentication methods and cannot be used together.', NO_PROJECT_ID_FOUND: 'Unable to detect a Project Id in the current environment. \n' + 'To learn more about authentication and Google APIs, visit: \n' + 'https://cloud.google.com/docs/authentication/getting-started', NO_CREDENTIALS_FOUND: 'Unable to find credentials in current environment. \n' + 'To learn more about authentication and Google APIs, visit: \n' + 'https://cloud.google.com/docs/authentication/getting-started', NO_ADC_FOUND: 'Could not load the default credentials. Browse to https://cloud.google.com/docs/authentication/getting-started for more information.', NO_UNIVERSE_DOMAIN_FOUND: 'Unable to detect a Universe Domain in the current environment.\n' + 'To learn more about Universe Domain retrieval, visit: \n' + 'https://cloud.google.com/compute/docs/metadata/predefined-metadata-keys', }; class GoogleAuth { // Note: this properly is only public to satisfy unit tests. // https://github.com/Microsoft/TypeScript/issues/5228 get isGCE() { return this.checkIsGCE; } /** * Configuration is resolved in the following order of precedence: * - {@link GoogleAuthOptions.credentials `credentials`} * - {@link GoogleAuthOptions.keyFilename `keyFilename`} * - {@link GoogleAuthOptions.keyFile `keyFile`} * * {@link GoogleAuthOptions.clientOptions `clientOptions`} are passed to the * {@link AuthClient `AuthClient`s}. * * @param opts */ constructor(opts = {}) { _GoogleAuth_instances.add(this); /** * Caches a value indicating whether the auth layer is running on Google * Compute Engine. * @private */ this.checkIsGCE = undefined; // To save the contents of the JSON credential file this.jsonContent = null; this.cachedCredential = null; /** * A pending {@link AuthClient}. Used for concurrent {@link GoogleAuth.getClient} calls. */ _GoogleAuth_pendingAuthClient.set(this, null); this.clientOptions = {}; this._cachedProjectId = opts.projectId || null; this.cachedCredential = opts.authClient || null; this.keyFilename = opts.keyFilename || opts.keyFile; this.scopes = opts.scopes; this.clientOptions = opts.clientOptions || {}; this.jsonContent = opts.credentials || null; this.apiKey = opts.apiKey || this.clientOptions.apiKey || null; // Cannot use both API Key + Credentials if (this.apiKey && (this.jsonContent || this.clientOptions.credentials)) { throw new RangeError(exports.GoogleAuthExceptionMessages.API_KEY_WITH_CREDENTIALS); } if (opts.universeDomain) { this.clientOptions.universeDomain = opts.universeDomain; } } // GAPIC client libraries should always use self-signed JWTs. The following // variables are set on the JWT client in order to indicate the type of library, // and sign the JWT with the correct audience and scopes (if not supplied). setGapicJWTValues(client) { client.defaultServicePath = this.defaultServicePath; client.useJWTAccessWithScope = this.useJWTAccessWithScope; client.defaultScopes = this.defaultScopes; } getProjectId(callback) { if (callback) { this.getProjectIdAsync().then(r => callback(null, r), callback); } else { return this.getProjectIdAsync(); } } /** * A temporary method for internal `getProjectId` usages where `null` is * acceptable. In a future major release, `getProjectId` should return `null` * (as the `Promise` base signature describes) and this private * method should be removed. * * @returns Promise that resolves with project id (or `null`) */ async getProjectIdOptional() { try { return await this.getProjectId(); } catch (e) { if (e instanceof Error && e.message === exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND) { return null; } else { throw e; } } } /** * A private method for finding and caching a projectId. * * Supports environments in order of precedence: * - GCLOUD_PROJECT or GOOGLE_CLOUD_PROJECT environment variable * - GOOGLE_APPLICATION_CREDENTIALS JSON file * - Cloud SDK: `gcloud config config-helper --format json` * - GCE project ID from metadata server * * @returns projectId */ async findAndCacheProjectId() { let projectId = null; projectId || (projectId = await this.getProductionProjectId()); projectId || (projectId = await this.getFileProjectId()); projectId || (projectId = await this.getDefaultServiceProjectId()); projectId || (projectId = await this.getGCEProjectId()); projectId || (projectId = await this.getExternalAccountClientProjectId()); if (projectId) { this._cachedProjectId = projectId; return projectId; } else { throw new Error(exports.GoogleAuthExceptionMessages.NO_PROJECT_ID_FOUND); } } async getProjectIdAsync() { if (this._cachedProjectId) { return this._cachedProjectId; } if (!this._findProjectIdPromise) { this._findProjectIdPromise = this.findAndCacheProjectId(); } return this._findProjectIdPromise; } /** * Retrieves a universe domain from the metadata server via * {@link gcpMetadata.universe}. * * @returns a universe domain */ async getUniverseDomainFromMetadataServer() { var _a; let universeDomain; try { universeDomain = await gcpMetadata.universe('universe-domain'); universeDomain || (universeDomain = authclient_1.DEFAULT_UNIVERSE); } catch (e) { if (e && ((_a = e === null || e === void 0 ? void 0 : e.response) === null || _a === void 0 ? void 0 : _a.status) === 404) { universeDomain = authclient_1.DEFAULT_UNIVERSE; } else { throw e; } } return universeDomain; } /** * Retrieves, caches, and returns the universe domain in the following order * of precedence: * - The universe domain in {@link GoogleAuth.clientOptions} * - An existing or ADC {@link AuthClient}'s universe domain * - {@link gcpMetadata.universe}, if {@link Compute} client * * @returns The universe domain */ async getUniverseDomain() { let universeDomain = (0, util_1.originalOrCamelOptions)(this.clientOptions).get('universe_domain'); try { universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = (await this.getClient()).universeDomain); } catch (_a) { // client or ADC is not available universeDomain !== null && universeDomain !== void 0 ? universeDomain : (universeDomain = authclient_1.DEFAULT_UNIVERSE); } return universeDomain; } /** * @returns Any scopes (user-specified or default scopes specified by the * client library) that need to be set on the current Auth client. */ getAnyScopes() { return this.scopes || this.defaultScopes; } getApplicationDefault(optionsOrCallback = {}, callback) { let options; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else { options = optionsOrCallback; } if (callback) { this.getApplicationDefaultAsync(options).then(r => callback(null, r.credential, r.projectId), callback); } else { return this.getApplicationDefaultAsync(options); } } async getApplicationDefaultAsync(options = {}) { // If we've already got a cached credential, return it. // This will also preserve one's configured quota project, in case they // set one directly on the credential previously. if (this.cachedCredential) { // cache, while preserving existing quota project preferences return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, this.cachedCredential, null); } let credential; // Check for the existence of a local environment variable pointing to the // location of the credential file. This is typically used in local // developer scenarios. credential = await this._tryGetApplicationCredentialsFromEnvironmentVariable(options); if (credential) { if (credential instanceof jwtclient_1.JWT) { credential.scopes = this.scopes; } else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { credential.scopes = this.getAnyScopes(); } return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); } // Look in the well-known credential file location. credential = await this._tryGetApplicationCredentialsFromWellKnownFile(options); if (credential) { if (credential instanceof jwtclient_1.JWT) { credential.scopes = this.scopes; } else if (credential instanceof baseexternalclient_1.BaseExternalAccountClient) { credential.scopes = this.getAnyScopes(); } return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, credential); } // Determine if we're running on GCE. if (await this._checkIsGCE()) { options.scopes = this.getAnyScopes(); return await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, new computeclient_1.Compute(options)); } throw new Error(exports.GoogleAuthExceptionMessages.NO_ADC_FOUND); } /** * Determines whether the auth layer is running on Google Compute Engine. * Checks for GCP Residency, then fallback to checking if metadata server * is available. * * @returns A promise that resolves with the boolean. * @api private */ async _checkIsGCE() { if (this.checkIsGCE === undefined) { this.checkIsGCE = gcpMetadata.getGCPResidency() || (await gcpMetadata.isAvailable()); } return this.checkIsGCE; } /** * Attempts to load default credentials from the environment variable path.. * @returns Promise that resolves with the OAuth2Client or null. * @api private */ async _tryGetApplicationCredentialsFromEnvironmentVariable(options) { const credentialsPath = process.env['GOOGLE_APPLICATION_CREDENTIALS'] || process.env['google_application_credentials']; if (!credentialsPath || credentialsPath.length === 0) { return null; } try { return this._getApplicationCredentialsFromFilePath(credentialsPath, options); } catch (e) { if (e instanceof Error) { e.message = `Unable to read the credential file specified by the GOOGLE_APPLICATION_CREDENTIALS environment variable: ${e.message}`; } throw e; } } /** * Attempts to load default credentials from a well-known file location * @return Promise that resolves with the OAuth2Client or null. * @api private */ async _tryGetApplicationCredentialsFromWellKnownFile(options) { // First, figure out the location of the file, depending upon the OS type. let location = null; if (this._isWindows()) { // Windows location = process.env['APPDATA']; } else { // Linux or Mac const home = process.env['HOME']; if (home) { location = path.join(home, '.config'); } } // If we found the root path, expand it. if (location) { location = path.join(location, 'gcloud', 'application_default_credentials.json'); if (!fs.existsSync(location)) { location = null; } } // The file does not exist. if (!location) { return null; } // The file seems to exist. Try to use it. const client = await this._getApplicationCredentialsFromFilePath(location, options); return client; } /** * Attempts to load default credentials from a file at the given path.. * @param filePath The path to the file to read. * @returns Promise that resolves with the OAuth2Client * @api private */ async _getApplicationCredentialsFromFilePath(filePath, options = {}) { // Make sure the path looks like a string. if (!filePath || filePath.length === 0) { throw new Error('The file path is invalid.'); } // Make sure there is a file at the path. lstatSync will throw if there is // nothing there. try { // Resolve path to actual file in case of symlink. Expect a thrown error // if not resolvable. filePath = fs.realpathSync(filePath); if (!fs.lstatSync(filePath).isFile()) { throw new Error(); } } catch (err) { if (err instanceof Error) { err.message = `The file at ${filePath} does not exist, or it is not a file. ${err.message}`; } throw err; } // Now open a read stream on the file, and parse it. const readStream = fs.createReadStream(filePath); return this.fromStream(readStream, options); } /** * Create a credentials instance using a given impersonated input options. * @param json The impersonated input object. * @returns JWT or UserRefresh Client with data */ fromImpersonatedJSON(json) { var _a, _b, _c, _d; if (!json) { throw new Error('Must pass in a JSON object containing an impersonated refresh token'); } if (json.type !== impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { throw new Error(`The incoming JSON object does not have the "${impersonated_1.IMPERSONATED_ACCOUNT_TYPE}" type`); } if (!json.source_credentials) { throw new Error('The incoming JSON object does not contain a source_credentials field'); } if (!json.service_account_impersonation_url) { throw new Error('The incoming JSON object does not contain a service_account_impersonation_url field'); } const sourceClient = this.fromJSON(json.source_credentials); if (((_a = json.service_account_impersonation_url) === null || _a === void 0 ? void 0 : _a.length) > 256) { /** * Prevents DOS attacks. * @see {@link https://github.com/googleapis/google-auth-library-nodejs/security/code-scanning/85} **/ throw new RangeError(`Target principal is too long: ${json.service_account_impersonation_url}`); } // Extract service account from service_account_impersonation_url const targetPrincipal = (_c = (_b = /(?[^/]+):(generateAccessToken|generateIdToken)$/.exec(json.service_account_impersonation_url)) === null || _b === void 0 ? void 0 : _b.groups) === null || _c === void 0 ? void 0 : _c.target; if (!targetPrincipal) { throw new RangeError(`Cannot extract target principal from ${json.service_account_impersonation_url}`); } const targetScopes = (_d = this.getAnyScopes()) !== null && _d !== void 0 ? _d : []; return new impersonated_1.Impersonated({ ...json, sourceClient, targetPrincipal, targetScopes: Array.isArray(targetScopes) ? targetScopes : [targetScopes], }); } /** * Create a credentials instance using the given input options. * This client is not cached. * * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. * * @param json The input object. * @param options The JWT or UserRefresh options for the client * @returns JWT or UserRefresh Client with data */ fromJSON(json, options = {}) { let client; // user's preferred universe domain const preferredUniverseDomain = (0, util_1.originalOrCamelOptions)(options).get('universe_domain'); if (json.type === refreshclient_1.USER_REFRESH_ACCOUNT_TYPE) { client = new refreshclient_1.UserRefreshClient(options); client.fromJSON(json); } else if (json.type === impersonated_1.IMPERSONATED_ACCOUNT_TYPE) { client = this.fromImpersonatedJSON(json); } else if (json.type === baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { client = externalclient_1.ExternalAccountClient.fromJSON(json, options); client.scopes = this.getAnyScopes(); } else if (json.type === externalAccountAuthorizedUserClient_1.EXTERNAL_ACCOUNT_AUTHORIZED_USER_TYPE) { client = new externalAccountAuthorizedUserClient_1.ExternalAccountAuthorizedUserClient(json, options); } else { options.scopes = this.scopes; client = new jwtclient_1.JWT(options); this.setGapicJWTValues(client); client.fromJSON(json); } if (preferredUniverseDomain) { client.universeDomain = preferredUniverseDomain; } return client; } /** * Return a JWT or UserRefreshClient from JavaScript object, caching both the * object used to instantiate and the client. * @param json The input object. * @param options The JWT or UserRefresh options for the client * @returns JWT or UserRefresh Client with data */ _cacheClientFromJSON(json, options) { const client = this.fromJSON(json, options); // cache both raw data used to instantiate client and client itself. this.jsonContent = json; this.cachedCredential = client; return client; } fromStream(inputStream, optionsOrCallback = {}, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else { options = optionsOrCallback; } if (callback) { this.fromStreamAsync(inputStream, options).then(r => callback(null, r), callback); } else { return this.fromStreamAsync(inputStream, options); } } fromStreamAsync(inputStream, options) { return new Promise((resolve, reject) => { if (!inputStream) { throw new Error('Must pass in a stream containing the Google auth settings.'); } const chunks = []; inputStream .setEncoding('utf8') .on('error', reject) .on('data', chunk => chunks.push(chunk)) .on('end', () => { try { try { const data = JSON.parse(chunks.join('')); const r = this._cacheClientFromJSON(data, options); return resolve(r); } catch (err) { // If we failed parsing this.keyFileName, assume that it // is a PEM or p12 certificate: if (!this.keyFilename) throw err; const client = new jwtclient_1.JWT({ ...this.clientOptions, keyFile: this.keyFilename, }); this.cachedCredential = client; this.setGapicJWTValues(client); return resolve(client); } } catch (err) { return reject(err); } }); }); } /** * Create a credentials instance using the given API key string. * The created client is not cached. In order to create and cache it use the {@link GoogleAuth.getClient `getClient`} method after first providing an {@link GoogleAuth.apiKey `apiKey`}. * * @param apiKey The API key string * @param options An optional options object. * @returns A JWT loaded from the key */ fromAPIKey(apiKey, options = {}) { return new jwtclient_1.JWT({ ...options, apiKey }); } /** * Determines whether the current operating system is Windows. * @api private */ _isWindows() { const sys = os.platform(); if (sys && sys.length >= 3) { if (sys.substring(0, 3).toLowerCase() === 'win') { return true; } } return false; } /** * Run the Google Cloud SDK command that prints the default project ID */ async getDefaultServiceProjectId() { return new Promise(resolve => { (0, child_process_1.exec)('gcloud config config-helper --format json', (err, stdout) => { if (!err && stdout) { try { const projectId = JSON.parse(stdout).configuration.properties.core.project; resolve(projectId); return; } catch (e) { // ignore errors } } resolve(null); }); }); } /** * Loads the project id from environment variables. * @api private */ getProductionProjectId() { return (process.env['GCLOUD_PROJECT'] || process.env['GOOGLE_CLOUD_PROJECT'] || process.env['gcloud_project'] || process.env['google_cloud_project']); } /** * Loads the project id from the GOOGLE_APPLICATION_CREDENTIALS json file. * @api private */ async getFileProjectId() { if (this.cachedCredential) { // Try to read the project ID from the cached credentials file return this.cachedCredential.projectId; } // Ensure the projectId is loaded from the keyFile if available. if (this.keyFilename) { const creds = await this.getClient(); if (creds && creds.projectId) { return creds.projectId; } } // Try to load a credentials file and read its project ID const r = await this._tryGetApplicationCredentialsFromEnvironmentVariable(); if (r) { return r.projectId; } else { return null; } } /** * Gets the project ID from external account client if available. */ async getExternalAccountClientProjectId() { if (!this.jsonContent || this.jsonContent.type !== baseexternalclient_1.EXTERNAL_ACCOUNT_TYPE) { return null; } const creds = await this.getClient(); // Do not suppress the underlying error, as the error could contain helpful // information for debugging and fixing. This is especially true for // external account creds as in order to get the project ID, the following // operations have to succeed: // 1. Valid credentials file should be supplied. // 2. Ability to retrieve access tokens from STS token exchange API. // 3. Ability to exchange for service account impersonated credentials (if // enabled). // 4. Ability to get project info using the access token from step 2 or 3. // Without surfacing the error, it is harder for developers to determine // which step went wrong. return await creds.getProjectId(); } /** * Gets the Compute Engine project ID if it can be inferred. */ async getGCEProjectId() { try { const r = await gcpMetadata.project('project-id'); return r; } catch (e) { // Ignore any errors return null; } } getCredentials(callback) { if (callback) { this.getCredentialsAsync().then(r => callback(null, r), callback); } else { return this.getCredentialsAsync(); } } async getCredentialsAsync() { const client = await this.getClient(); if (client instanceof impersonated_1.Impersonated) { return { client_email: client.getTargetPrincipal() }; } if (client instanceof baseexternalclient_1.BaseExternalAccountClient) { const serviceAccountEmail = client.getServiceAccountEmail(); if (serviceAccountEmail) { return { client_email: serviceAccountEmail, universe_domain: client.universeDomain, }; } } if (this.jsonContent) { return { client_email: this.jsonContent.client_email, private_key: this.jsonContent.private_key, universe_domain: this.jsonContent.universe_domain, }; } if (await this._checkIsGCE()) { const [client_email, universe_domain] = await Promise.all([ gcpMetadata.instance('service-accounts/default/email'), this.getUniverseDomain(), ]); return { client_email, universe_domain }; } throw new Error(exports.GoogleAuthExceptionMessages.NO_CREDENTIALS_FOUND); } /** * Automatically obtain an {@link AuthClient `AuthClient`} based on the * provided configuration. If no options were passed, use Application * Default Credentials. */ async getClient() { if (this.cachedCredential) { return this.cachedCredential; } // Use an existing auth client request, or cache a new one __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f") || __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_determineClient).call(this), "f"); try { return await __classPrivateFieldGet(this, _GoogleAuth_pendingAuthClient, "f"); } finally { // reset the pending auth client in case it is changed later __classPrivateFieldSet(this, _GoogleAuth_pendingAuthClient, null, "f"); } } /** * Creates a client which will fetch an ID token for authorization. * @param targetAudience the audience for the fetched ID token. * @returns IdTokenClient for making HTTP calls authenticated with ID tokens. */ async getIdTokenClient(targetAudience) { const client = await this.getClient(); if (!('fetchIdToken' in client)) { throw new Error('Cannot fetch ID token in this environment, use GCE or set the GOOGLE_APPLICATION_CREDENTIALS environment variable to a service account credentials JSON file.'); } return new idtokenclient_1.IdTokenClient({ targetAudience, idTokenProvider: client }); } /** * Automatically obtain application default credentials, and return * an access token for making requests. */ async getAccessToken() { const client = await this.getClient(); return (await client.getAccessToken()).token; } /** * Obtain the HTTP headers that will provide authorization for a given * request. */ async getRequestHeaders(url) { const client = await this.getClient(); return client.getRequestHeaders(url); } /** * Obtain credentials for a request, then attach the appropriate headers to * the request options. * @param opts Axios or Request options on which to attach the headers */ async authorizeRequest(opts) { opts = opts || {}; const url = opts.url || opts.uri; const client = await this.getClient(); const headers = await client.getRequestHeaders(url); opts.headers = Object.assign(opts.headers || {}, headers); return opts; } /** * Automatically obtain application default credentials, and make an * HTTP request using the given options. * @param opts Axios request options for the HTTP request. */ // eslint-disable-next-line @typescript-eslint/no-explicit-any async request(opts) { const client = await this.getClient(); return client.request(opts); } /** * Determine the compute environment in which the code is running. */ getEnv() { return (0, envDetect_1.getEnv)(); } /** * Sign the given data with the current private key, or go out * to the IAM API to sign it. * @param data The data to be signed. * @param endpoint A custom endpoint to use. * * @example * ``` * sign('data', 'https://iamcredentials.googleapis.com/v1/projects/-/serviceAccounts/'); * ``` */ async sign(data, endpoint) { const client = await this.getClient(); const universe = await this.getUniverseDomain(); endpoint = endpoint || `https://iamcredentials.${universe}/v1/projects/-/serviceAccounts/`; if (client instanceof impersonated_1.Impersonated) { const signed = await client.sign(data); return signed.signedBlob; } const crypto = (0, crypto_1.createCrypto)(); if (client instanceof jwtclient_1.JWT && client.key) { const sign = await crypto.sign(client.key, data); return sign; } const creds = await this.getCredentials(); if (!creds.client_email) { throw new Error('Cannot sign data without `client_email`.'); } return this.signBlob(crypto, creds.client_email, data, endpoint); } async signBlob(crypto, emailOrUniqueId, data, endpoint) { const url = new URL(endpoint + `${emailOrUniqueId}:signBlob`); const res = await this.request({ method: 'POST', url: url.href, data: { payload: crypto.encodeBase64StringUtf8(data), }, retry: true, retryConfig: { httpMethodsToRetry: ['POST'], }, }); return res.data.signedBlob; } } exports.GoogleAuth = GoogleAuth; _GoogleAuth_pendingAuthClient = new WeakMap(), _GoogleAuth_instances = new WeakSet(), _GoogleAuth_prepareAndCacheClient = async function _GoogleAuth_prepareAndCacheClient(credential, quotaProjectIdOverride = process.env['GOOGLE_CLOUD_QUOTA_PROJECT'] || null) { const projectId = await this.getProjectIdOptional(); if (quotaProjectIdOverride) { credential.quotaProjectId = quotaProjectIdOverride; } this.cachedCredential = credential; return { credential, projectId }; }, _GoogleAuth_determineClient = async function _GoogleAuth_determineClient() { if (this.jsonContent) { return this._cacheClientFromJSON(this.jsonContent, this.clientOptions); } else if (this.keyFilename) { const filePath = path.resolve(this.keyFilename); const stream = fs.createReadStream(filePath); return await this.fromStreamAsync(stream, this.clientOptions); } else if (this.apiKey) { const client = await this.fromAPIKey(this.apiKey, this.clientOptions); client.scopes = this.scopes; const { credential } = await __classPrivateFieldGet(this, _GoogleAuth_instances, "m", _GoogleAuth_prepareAndCacheClient).call(this, client); return credential; } else { const { credential } = await this.getApplicationDefaultAsync(this.clientOptions); return credential; } }; /** * Export DefaultTransporter as a static property of the class. */ GoogleAuth.DefaultTransporter = transporters_1.DefaultTransporter; /***/ }), /***/ 89390: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2014 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IAMAuth = void 0; class IAMAuth { /** * IAM credentials. * * @param selector the iam authority selector * @param token the token * @constructor */ constructor(selector, token) { this.selector = selector; this.token = token; this.selector = selector; this.token = token; } /** * Acquire the HTTP headers required to make an authenticated request. */ getRequestHeaders() { return { 'x-goog-iam-authority-selector': this.selector, 'x-goog-iam-authorization-token': this.token, }; } } exports.IAMAuth = IAMAuth; /***/ }), /***/ 29960: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IdentityPoolClient = void 0; const baseexternalclient_1 = __nccwpck_require__(142); const util_1 = __nccwpck_require__(37870); const filesubjecttokensupplier_1 = __nccwpck_require__(10932); const urlsubjecttokensupplier_1 = __nccwpck_require__(24627); /** * Defines the Url-sourced and file-sourced external account clients mainly * used for K8s and Azure workloads. */ class IdentityPoolClient extends baseexternalclient_1.BaseExternalAccountClient { /** * Instantiate an IdentityPoolClient instance using the provided JSON * object loaded from an external account credentials file. * An error is thrown if the credential is not a valid file-sourced or * url-sourced credential or a workforce pool user project is provided * with a non workforce audience. * @param options The external account options object typically loaded * from the external account JSON credential file. The camelCased options * are aliases for the snake_cased options. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. */ constructor(options, additionalOptions) { super(options, additionalOptions); const opts = (0, util_1.originalOrCamelOptions)(options); const credentialSource = opts.get('credential_source'); const subjectTokenSupplier = opts.get('subject_token_supplier'); // Validate credential sourcing configuration. if (!credentialSource && !subjectTokenSupplier) { throw new Error('A credential source or subject token supplier must be specified.'); } if (credentialSource && subjectTokenSupplier) { throw new Error('Only one of credential source or subject token supplier can be specified.'); } if (subjectTokenSupplier) { this.subjectTokenSupplier = subjectTokenSupplier; this.credentialSourceType = 'programmatic'; } else { const credentialSourceOpts = (0, util_1.originalOrCamelOptions)(credentialSource); const formatOpts = (0, util_1.originalOrCamelOptions)(credentialSourceOpts.get('format')); // Text is the default format type. const formatType = formatOpts.get('type') || 'text'; const formatSubjectTokenFieldName = formatOpts.get('subject_token_field_name'); if (formatType !== 'json' && formatType !== 'text') { throw new Error(`Invalid credential_source format "${formatType}"`); } if (formatType === 'json' && !formatSubjectTokenFieldName) { throw new Error('Missing subject_token_field_name for JSON credential_source format'); } const file = credentialSourceOpts.get('file'); const url = credentialSourceOpts.get('url'); const headers = credentialSourceOpts.get('headers'); if (file && url) { throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } else if (file && !url) { this.credentialSourceType = 'file'; this.subjectTokenSupplier = new filesubjecttokensupplier_1.FileSubjectTokenSupplier({ filePath: file, formatType: formatType, subjectTokenFieldName: formatSubjectTokenFieldName, }); } else if (!file && url) { this.credentialSourceType = 'url'; this.subjectTokenSupplier = new urlsubjecttokensupplier_1.UrlSubjectTokenSupplier({ url: url, formatType: formatType, subjectTokenFieldName: formatSubjectTokenFieldName, headers: headers, additionalGaxiosOptions: IdentityPoolClient.RETRY_CONFIG, }); } else { throw new Error('No valid Identity Pool "credential_source" provided, must be either file or url.'); } } } /** * Triggered when a external subject token is needed to be exchanged for a GCP * access token via GCP STS endpoint. Gets a subject token by calling * the configured {@link SubjectTokenSupplier} * @return A promise that resolves with the external subject token. */ async retrieveSubjectToken() { return this.subjectTokenSupplier.getSubjectToken(this.supplierContext); } } exports.IdentityPoolClient = IdentityPoolClient; /***/ }), /***/ 12718: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.IdTokenClient = void 0; const oauth2client_1 = __nccwpck_require__(10091); class IdTokenClient extends oauth2client_1.OAuth2Client { /** * Google ID Token client * * Retrieve ID token from the metadata server. * See: https://cloud.google.com/docs/authentication/get-id-token#metadata-server */ constructor(options) { super(options); this.targetAudience = options.targetAudience; this.idTokenProvider = options.idTokenProvider; } async getRequestMetadataAsync( // eslint-disable-next-line @typescript-eslint/no-unused-vars url) { if (!this.credentials.id_token || !this.credentials.expiry_date || this.isTokenExpiring()) { const idToken = await this.idTokenProvider.fetchIdToken(this.targetAudience); this.credentials = { id_token: idToken, expiry_date: this.getIdTokenExpiryDate(idToken), }; } const headers = { Authorization: 'Bearer ' + this.credentials.id_token, }; return { headers }; } getIdTokenExpiryDate(idToken) { const payloadB64 = idToken.split('.')[1]; if (payloadB64) { const payload = JSON.parse(Buffer.from(payloadB64, 'base64').toString('ascii')); return payload.exp * 1000; } } } exports.IdTokenClient = IdTokenClient; /***/ }), /***/ 39964: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /** * Copyright 2021 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Impersonated = exports.IMPERSONATED_ACCOUNT_TYPE = void 0; const oauth2client_1 = __nccwpck_require__(10091); const gaxios_1 = __nccwpck_require__(97003); const util_1 = __nccwpck_require__(37870); exports.IMPERSONATED_ACCOUNT_TYPE = 'impersonated_service_account'; class Impersonated extends oauth2client_1.OAuth2Client { /** * Impersonated service account credentials. * * Create a new access token by impersonating another service account. * * Impersonated Credentials allowing credentials issued to a user or * service account to impersonate another. The source project using * Impersonated Credentials must enable the "IAMCredentials" API. * Also, the target service account must grant the orginating principal * the "Service Account Token Creator" IAM role. * * @param {object} options - The configuration object. * @param {object} [options.sourceClient] the source credential used as to * acquire the impersonated credentials. * @param {string} [options.targetPrincipal] the service account to * impersonate. * @param {string[]} [options.delegates] the chained list of delegates * required to grant the final access_token. If set, the sequence of * identities must have "Service Account Token Creator" capability granted to * the preceding identity. For example, if set to [serviceAccountB, * serviceAccountC], the sourceCredential must have the Token Creator role on * serviceAccountB. serviceAccountB must have the Token Creator on * serviceAccountC. Finally, C must have Token Creator on target_principal. * If left unset, sourceCredential must have that role on targetPrincipal. * @param {string[]} [options.targetScopes] scopes to request during the * authorization grant. * @param {number} [options.lifetime] number of seconds the delegated * credential should be valid for up to 3600 seconds by default, or 43,200 * seconds by extending the token's lifetime, see: * https://cloud.google.com/iam/docs/creating-short-lived-service-account-credentials#sa-credentials-oauth * @param {string} [options.endpoint] api endpoint override. */ constructor(options = {}) { var _a, _b, _c, _d, _e, _f; super(options); // Start with an expired refresh token, which will automatically be // refreshed before the first API call is made. this.credentials = { expiry_date: 1, refresh_token: 'impersonated-placeholder', }; this.sourceClient = (_a = options.sourceClient) !== null && _a !== void 0 ? _a : new oauth2client_1.OAuth2Client(); this.targetPrincipal = (_b = options.targetPrincipal) !== null && _b !== void 0 ? _b : ''; this.delegates = (_c = options.delegates) !== null && _c !== void 0 ? _c : []; this.targetScopes = (_d = options.targetScopes) !== null && _d !== void 0 ? _d : []; this.lifetime = (_e = options.lifetime) !== null && _e !== void 0 ? _e : 3600; const usingExplicitUniverseDomain = !!(0, util_1.originalOrCamelOptions)(options).get('universe_domain'); if (!usingExplicitUniverseDomain) { // override the default universe with the source's universe this.universeDomain = this.sourceClient.universeDomain; } else if (this.sourceClient.universeDomain !== this.universeDomain) { // non-default universe and is not matching the source - this could be a credential leak throw new RangeError(`Universe domain ${this.sourceClient.universeDomain} in source credentials does not match ${this.universeDomain} universe domain set for impersonated credentials.`); } this.endpoint = (_f = options.endpoint) !== null && _f !== void 0 ? _f : `https://iamcredentials.${this.universeDomain}`; } /** * Signs some bytes. * * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob Reference Documentation} * @param blobToSign String to sign. * * @returns A {@link SignBlobResponse} denoting the keyID and signedBlob in base64 string */ async sign(blobToSign) { await this.sourceClient.getAccessToken(); const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; const u = `${this.endpoint}/v1/${name}:signBlob`; const body = { delegates: this.delegates, payload: Buffer.from(blobToSign).toString('base64'), }; const res = await this.sourceClient.request({ ...Impersonated.RETRY_CONFIG, url: u, data: body, method: 'POST', }); return res.data; } /** The service account email to be impersonated. */ getTargetPrincipal() { return this.targetPrincipal; } /** * Refreshes the access token. */ async refreshToken() { var _a, _b, _c, _d, _e, _f; try { await this.sourceClient.getAccessToken(); const name = 'projects/-/serviceAccounts/' + this.targetPrincipal; const u = `${this.endpoint}/v1/${name}:generateAccessToken`; const body = { delegates: this.delegates, scope: this.targetScopes, lifetime: this.lifetime + 's', }; const res = await this.sourceClient.request({ ...Impersonated.RETRY_CONFIG, url: u, data: body, method: 'POST', }); const tokenResponse = res.data; this.credentials.access_token = tokenResponse.accessToken; this.credentials.expiry_date = Date.parse(tokenResponse.expireTime); return { tokens: this.credentials, res, }; } catch (error) { if (!(error instanceof Error)) throw error; let status = 0; let message = ''; if (error instanceof gaxios_1.GaxiosError) { status = (_c = (_b = (_a = error === null || error === void 0 ? void 0 : error.response) === null || _a === void 0 ? void 0 : _a.data) === null || _b === void 0 ? void 0 : _b.error) === null || _c === void 0 ? void 0 : _c.status; message = (_f = (_e = (_d = error === null || error === void 0 ? void 0 : error.response) === null || _d === void 0 ? void 0 : _d.data) === null || _e === void 0 ? void 0 : _e.error) === null || _f === void 0 ? void 0 : _f.message; } if (status && message) { error.message = `${status}: unable to impersonate: ${message}`; throw error; } else { error.message = `unable to impersonate: ${error}`; throw error; } } } /** * Generates an OpenID Connect ID token for a service account. * * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/generateIdToken Reference Documentation} * * @param targetAudience the audience for the fetched ID token. * @param options the for the request * @return an OpenID Connect ID token */ async fetchIdToken(targetAudience, options) { var _a, _b; await this.sourceClient.getAccessToken(); const name = `projects/-/serviceAccounts/${this.targetPrincipal}`; const u = `${this.endpoint}/v1/${name}:generateIdToken`; const body = { delegates: this.delegates, audience: targetAudience, includeEmail: (_a = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _a !== void 0 ? _a : true, useEmailAzp: (_b = options === null || options === void 0 ? void 0 : options.includeEmail) !== null && _b !== void 0 ? _b : true, }; const res = await this.sourceClient.request({ ...Impersonated.RETRY_CONFIG, url: u, data: body, method: 'POST', }); return res.data.token; } } exports.Impersonated = Impersonated; /***/ }), /***/ 27060: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2015 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JWTAccess = void 0; const jws = __nccwpck_require__(33324); const util_1 = __nccwpck_require__(37870); const DEFAULT_HEADER = { alg: 'RS256', typ: 'JWT', }; class JWTAccess { /** * JWTAccess service account credentials. * * Create a new access token by using the credential to create a new JWT token * that's recognized as the access token. * * @param email the service account email address. * @param key the private key that will be used to sign the token. * @param keyId the ID of the private key used to sign the token. */ constructor(email, key, keyId, eagerRefreshThresholdMillis) { this.cache = new util_1.LRUCache({ capacity: 500, maxAge: 60 * 60 * 1000, }); this.email = email; this.key = key; this.keyId = keyId; this.eagerRefreshThresholdMillis = eagerRefreshThresholdMillis !== null && eagerRefreshThresholdMillis !== void 0 ? eagerRefreshThresholdMillis : 5 * 60 * 1000; } /** * Ensures that we're caching a key appropriately, giving precedence to scopes vs. url * * @param url The URI being authorized. * @param scopes The scope or scopes being authorized * @returns A string that returns the cached key. */ getCachedKey(url, scopes) { let cacheKey = url; if (scopes && Array.isArray(scopes) && scopes.length) { cacheKey = url ? `${url}_${scopes.join('_')}` : `${scopes.join('_')}`; } else if (typeof scopes === 'string') { cacheKey = url ? `${url}_${scopes}` : scopes; } if (!cacheKey) { throw Error('Scopes or url must be provided'); } return cacheKey; } /** * Get a non-expired access token, after refreshing if necessary. * * @param url The URI being authorized. * @param additionalClaims An object with a set of additional claims to * include in the payload. * @returns An object that includes the authorization header. */ getRequestHeaders(url, additionalClaims, scopes) { // Return cached authorization headers, unless we are within // eagerRefreshThresholdMillis ms of them expiring: const key = this.getCachedKey(url, scopes); const cachedToken = this.cache.get(key); const now = Date.now(); if (cachedToken && cachedToken.expiration - now > this.eagerRefreshThresholdMillis) { return cachedToken.headers; } const iat = Math.floor(Date.now() / 1000); const exp = JWTAccess.getExpirationTime(iat); let defaultClaims; // Turn scopes into space-separated string if (Array.isArray(scopes)) { scopes = scopes.join(' '); } // If scopes are specified, sign with scopes if (scopes) { defaultClaims = { iss: this.email, sub: this.email, scope: scopes, exp, iat, }; } else { defaultClaims = { iss: this.email, sub: this.email, aud: url, exp, iat, }; } // if additionalClaims are provided, ensure they do not collide with // other required claims. if (additionalClaims) { for (const claim in defaultClaims) { if (additionalClaims[claim]) { throw new Error(`The '${claim}' property is not allowed when passing additionalClaims. This claim is included in the JWT by default.`); } } } const header = this.keyId ? { ...DEFAULT_HEADER, kid: this.keyId } : DEFAULT_HEADER; const payload = Object.assign(defaultClaims, additionalClaims); // Sign the jwt and add it to the cache const signedJWT = jws.sign({ header, payload, secret: this.key }); const headers = { Authorization: `Bearer ${signedJWT}` }; this.cache.set(key, { expiration: exp * 1000, headers, }); return headers; } /** * Returns an expiration time for the JWT token. * * @param iat The issued at time for the JWT. * @returns An expiration time for the JWT. */ static getExpirationTime(iat) { const exp = iat + 3600; // 3600 seconds = 1 hour return exp; } /** * Create a JWTAccess credentials instance using the given input options. * @param json The input object. */ fromJSON(json) { if (!json) { throw new Error('Must pass in a JSON object containing the service account auth settings.'); } if (!json.client_email) { throw new Error('The incoming JSON object does not contain a client_email field'); } if (!json.private_key) { throw new Error('The incoming JSON object does not contain a private_key field'); } // Extract the relevant information from the json key file. this.email = json.client_email; this.key = json.private_key; this.keyId = json.private_key_id; this.projectId = json.project_id; } fromStream(inputStream, callback) { if (callback) { this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { return this.fromStreamAsync(inputStream); } } fromStreamAsync(inputStream) { return new Promise((resolve, reject) => { if (!inputStream) { reject(new Error('Must pass in a stream containing the service account auth settings.')); } let s = ''; inputStream .setEncoding('utf8') .on('data', chunk => (s += chunk)) .on('error', reject) .on('end', () => { try { const data = JSON.parse(s); this.fromJSON(data); resolve(); } catch (err) { reject(err); } }); }); } } exports.JWTAccess = JWTAccess; /***/ }), /***/ 75277: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2013 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.JWT = void 0; const gtoken_1 = __nccwpck_require__(28568); const jwtaccess_1 = __nccwpck_require__(27060); const oauth2client_1 = __nccwpck_require__(10091); const authclient_1 = __nccwpck_require__(34810); class JWT extends oauth2client_1.OAuth2Client { constructor(optionsOrEmail, keyFile, key, scopes, subject, keyId) { const opts = optionsOrEmail && typeof optionsOrEmail === 'object' ? optionsOrEmail : { email: optionsOrEmail, keyFile, key, keyId, scopes, subject }; super(opts); this.email = opts.email; this.keyFile = opts.keyFile; this.key = opts.key; this.keyId = opts.keyId; this.scopes = opts.scopes; this.subject = opts.subject; this.additionalClaims = opts.additionalClaims; // Start with an expired refresh token, which will automatically be // refreshed before the first API call is made. this.credentials = { refresh_token: 'jwt-placeholder', expiry_date: 1 }; } /** * Creates a copy of the credential with the specified scopes. * @param scopes List of requested scopes or a single scope. * @return The cloned instance. */ createScoped(scopes) { const jwt = new JWT(this); jwt.scopes = scopes; return jwt; } /** * Obtains the metadata to be sent with the request. * * @param url the URI being authorized. */ async getRequestMetadataAsync(url) { url = this.defaultServicePath ? `https://${this.defaultServicePath}/` : url; const useSelfSignedJWT = (!this.hasUserScopes() && url) || (this.useJWTAccessWithScope && this.hasAnyScopes()) || this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; if (this.subject && this.universeDomain !== authclient_1.DEFAULT_UNIVERSE) { throw new RangeError(`Service Account user is configured for the credential. Domain-wide delegation is not supported in universes other than ${authclient_1.DEFAULT_UNIVERSE}`); } if (!this.apiKey && useSelfSignedJWT) { if (this.additionalClaims && this.additionalClaims.target_audience) { const { tokens } = await this.refreshToken(); return { headers: this.addSharedMetadataHeaders({ Authorization: `Bearer ${tokens.id_token}`, }), }; } else { // no scopes have been set, but a uri has been provided. Use JWTAccess // credentials. if (!this.access) { this.access = new jwtaccess_1.JWTAccess(this.email, this.key, this.keyId, this.eagerRefreshThresholdMillis); } let scopes; if (this.hasUserScopes()) { scopes = this.scopes; } else if (!url) { scopes = this.defaultScopes; } const useScopes = this.useJWTAccessWithScope || this.universeDomain !== authclient_1.DEFAULT_UNIVERSE; const headers = await this.access.getRequestHeaders(url !== null && url !== void 0 ? url : undefined, this.additionalClaims, // Scopes take precedent over audience for signing, // so we only provide them if `useJWTAccessWithScope` is on or // if we are in a non-default universe useScopes ? scopes : undefined); return { headers: this.addSharedMetadataHeaders(headers) }; } } else if (this.hasAnyScopes() || this.apiKey) { return super.getRequestMetadataAsync(url); } else { // If no audience, apiKey, or scopes are provided, we should not attempt // to populate any headers: return { headers: {} }; } } /** * Fetches an ID token. * @param targetAudience the audience for the fetched ID token. */ async fetchIdToken(targetAudience) { // Create a new gToken for fetching an ID token const gtoken = new gtoken_1.GoogleToken({ iss: this.email, sub: this.subject, scope: this.scopes || this.defaultScopes, keyFile: this.keyFile, key: this.key, additionalClaims: { target_audience: targetAudience }, transporter: this.transporter, }); await gtoken.getToken({ forceRefresh: true, }); if (!gtoken.idToken) { throw new Error('Unknown error: Failed to fetch ID token'); } return gtoken.idToken; } /** * Determine if there are currently scopes available. */ hasUserScopes() { if (!this.scopes) { return false; } return this.scopes.length > 0; } /** * Are there any default or user scopes defined. */ hasAnyScopes() { if (this.scopes && this.scopes.length > 0) return true; if (this.defaultScopes && this.defaultScopes.length > 0) return true; return false; } authorize(callback) { if (callback) { this.authorizeAsync().then(r => callback(null, r), callback); } else { return this.authorizeAsync(); } } async authorizeAsync() { const result = await this.refreshToken(); if (!result) { throw new Error('No result returned'); } this.credentials = result.tokens; this.credentials.refresh_token = 'jwt-placeholder'; this.key = this.gtoken.key; this.email = this.gtoken.iss; return result.tokens; } /** * Refreshes the access token. * @param refreshToken ignored * @private */ async refreshTokenNoCache( // eslint-disable-next-line @typescript-eslint/no-unused-vars refreshToken) { const gtoken = this.createGToken(); const token = await gtoken.getToken({ forceRefresh: this.isTokenExpiring(), }); const tokens = { access_token: token.access_token, token_type: 'Bearer', expiry_date: gtoken.expiresAt, id_token: gtoken.idToken, }; this.emit('tokens', tokens); return { res: null, tokens }; } /** * Create a gToken if it doesn't already exist. */ createGToken() { if (!this.gtoken) { this.gtoken = new gtoken_1.GoogleToken({ iss: this.email, sub: this.subject, scope: this.scopes || this.defaultScopes, keyFile: this.keyFile, key: this.key, additionalClaims: this.additionalClaims, transporter: this.transporter, }); } return this.gtoken; } /** * Create a JWT credentials instance using the given input options. * @param json The input object. * * @remarks * * **Important**: If you accept a credential configuration (credential JSON/File/Stream) from an external source for authentication to Google Cloud, you must validate it before providing it to any Google API or library. Providing an unvalidated credential configuration to Google APIs can compromise the security of your systems and data. For more information, refer to {@link https://cloud.google.com/docs/authentication/external/externally-sourced-credentials Validate credential configurations from external sources}. */ fromJSON(json) { if (!json) { throw new Error('Must pass in a JSON object containing the service account auth settings.'); } if (!json.client_email) { throw new Error('The incoming JSON object does not contain a client_email field'); } if (!json.private_key) { throw new Error('The incoming JSON object does not contain a private_key field'); } // Extract the relevant information from the json key file. this.email = json.client_email; this.key = json.private_key; this.keyId = json.private_key_id; this.projectId = json.project_id; this.quotaProjectId = json.quota_project_id; this.universeDomain = json.universe_domain || this.universeDomain; } fromStream(inputStream, callback) { if (callback) { this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { return this.fromStreamAsync(inputStream); } } fromStreamAsync(inputStream) { return new Promise((resolve, reject) => { if (!inputStream) { throw new Error('Must pass in a stream containing the service account auth settings.'); } let s = ''; inputStream .setEncoding('utf8') .on('error', reject) .on('data', chunk => (s += chunk)) .on('end', () => { try { const data = JSON.parse(s); this.fromJSON(data); resolve(); } catch (e) { reject(e); } }); }); } /** * Creates a JWT credentials instance using an API Key for authentication. * @param apiKey The API Key in string form. */ fromAPIKey(apiKey) { if (typeof apiKey !== 'string') { throw new Error('Must provide an API Key string.'); } this.apiKey = apiKey; } /** * Using the key or keyFile on the JWT client, obtain an object that contains * the key and the client email. */ async getCredentials() { if (this.key) { return { private_key: this.key, client_email: this.email }; } else if (this.keyFile) { const gtoken = this.createGToken(); const creds = await gtoken.getCredentials(this.keyFile); return { private_key: creds.privateKey, client_email: creds.clientEmail }; } throw new Error('A key or a keyFile must be provided to getCredentials.'); } } exports.JWT = JWT; /***/ }), /***/ 53882: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2014 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LoginTicket = void 0; class LoginTicket { /** * Create a simple class to extract user ID from an ID Token * * @param {string} env Envelope of the jwt * @param {TokenPayload} pay Payload of the jwt * @constructor */ constructor(env, pay) { this.envelope = env; this.payload = pay; } getEnvelope() { return this.envelope; } getPayload() { return this.payload; } /** * Create a simple class to extract user ID from an ID Token * * @return The user ID */ getUserId() { const payload = this.getPayload(); if (payload && payload.sub) { return payload.sub; } return null; } /** * Returns attributes from the login ticket. This can contain * various information about the user session. * * @return The envelope and payload */ getAttributes() { return { envelope: this.getEnvelope(), payload: this.getPayload() }; } } exports.LoginTicket = LoginTicket; /***/ }), /***/ 10091: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OAuth2Client = exports.ClientAuthentication = exports.CertificateFormat = exports.CodeChallengeMethod = void 0; const gaxios_1 = __nccwpck_require__(97003); const querystring = __nccwpck_require__(83480); const stream = __nccwpck_require__(2203); const formatEcdsa = __nccwpck_require__(325); const crypto_1 = __nccwpck_require__(88851); const authclient_1 = __nccwpck_require__(34810); const loginticket_1 = __nccwpck_require__(53882); var CodeChallengeMethod; (function (CodeChallengeMethod) { CodeChallengeMethod["Plain"] = "plain"; CodeChallengeMethod["S256"] = "S256"; })(CodeChallengeMethod || (exports.CodeChallengeMethod = CodeChallengeMethod = {})); var CertificateFormat; (function (CertificateFormat) { CertificateFormat["PEM"] = "PEM"; CertificateFormat["JWK"] = "JWK"; })(CertificateFormat || (exports.CertificateFormat = CertificateFormat = {})); /** * The client authentication type. Supported values are basic, post, and none. * https://datatracker.ietf.org/doc/html/rfc7591#section-2 */ var ClientAuthentication; (function (ClientAuthentication) { ClientAuthentication["ClientSecretPost"] = "ClientSecretPost"; ClientAuthentication["ClientSecretBasic"] = "ClientSecretBasic"; ClientAuthentication["None"] = "None"; })(ClientAuthentication || (exports.ClientAuthentication = ClientAuthentication = {})); class OAuth2Client extends authclient_1.AuthClient { constructor(optionsOrClientId, clientSecret, redirectUri) { const opts = optionsOrClientId && typeof optionsOrClientId === 'object' ? optionsOrClientId : { clientId: optionsOrClientId, clientSecret, redirectUri }; super(opts); this.certificateCache = {}; this.certificateExpiry = null; this.certificateCacheFormat = CertificateFormat.PEM; this.refreshTokenPromises = new Map(); this._clientId = opts.clientId; this._clientSecret = opts.clientSecret; this.redirectUri = opts.redirectUri; this.endpoints = { tokenInfoUrl: 'https://oauth2.googleapis.com/tokeninfo', oauth2AuthBaseUrl: 'https://accounts.google.com/o/oauth2/v2/auth', oauth2TokenUrl: 'https://oauth2.googleapis.com/token', oauth2RevokeUrl: 'https://oauth2.googleapis.com/revoke', oauth2FederatedSignonPemCertsUrl: 'https://www.googleapis.com/oauth2/v1/certs', oauth2FederatedSignonJwkCertsUrl: 'https://www.googleapis.com/oauth2/v3/certs', oauth2IapPublicKeyUrl: 'https://www.gstatic.com/iap/verify/public_key', ...opts.endpoints, }; this.clientAuthentication = opts.clientAuthentication || ClientAuthentication.ClientSecretPost; this.issuers = opts.issuers || [ 'accounts.google.com', 'https://accounts.google.com', this.universeDomain, ]; } /** * Generates URL for consent page landing. * @param opts Options. * @return URL to consent page. */ generateAuthUrl(opts = {}) { if (opts.code_challenge_method && !opts.code_challenge) { throw new Error('If a code_challenge_method is provided, code_challenge must be included.'); } opts.response_type = opts.response_type || 'code'; opts.client_id = opts.client_id || this._clientId; opts.redirect_uri = opts.redirect_uri || this.redirectUri; // Allow scopes to be passed either as array or a string if (Array.isArray(opts.scope)) { opts.scope = opts.scope.join(' '); } const rootUrl = this.endpoints.oauth2AuthBaseUrl.toString(); return (rootUrl + '?' + querystring.stringify(opts)); } generateCodeVerifier() { // To make the code compatible with browser SubtleCrypto we need to make // this method async. throw new Error('generateCodeVerifier is removed, please use generateCodeVerifierAsync instead.'); } /** * Convenience method to automatically generate a code_verifier, and its * resulting SHA256. If used, this must be paired with a S256 * code_challenge_method. * * For a full example see: * https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/oauth2-codeVerifier.js */ async generateCodeVerifierAsync() { // base64 encoding uses 6 bits per character, and we want to generate128 // characters. 6*128/8 = 96. const crypto = (0, crypto_1.createCrypto)(); const randomString = crypto.randomBytesBase64(96); // The valid characters in the code_verifier are [A-Z]/[a-z]/[0-9]/ // "-"/"."/"_"/"~". Base64 encoded strings are pretty close, so we're just // swapping out a few chars. const codeVerifier = randomString .replace(/\+/g, '~') .replace(/=/g, '_') .replace(/\//g, '-'); // Generate the base64 encoded SHA256 const unencodedCodeChallenge = await crypto.sha256DigestBase64(codeVerifier); // We need to use base64UrlEncoding instead of standard base64 const codeChallenge = unencodedCodeChallenge .split('=')[0] .replace(/\+/g, '-') .replace(/\//g, '_'); return { codeVerifier, codeChallenge }; } getToken(codeOrOptions, callback) { const options = typeof codeOrOptions === 'string' ? { code: codeOrOptions } : codeOrOptions; if (callback) { this.getTokenAsync(options).then(r => callback(null, r.tokens, r.res), e => callback(e, null, e.response)); } else { return this.getTokenAsync(options); } } async getTokenAsync(options) { const url = this.endpoints.oauth2TokenUrl.toString(); const headers = { 'Content-Type': 'application/x-www-form-urlencoded', }; const values = { client_id: options.client_id || this._clientId, code_verifier: options.codeVerifier, code: options.code, grant_type: 'authorization_code', redirect_uri: options.redirect_uri || this.redirectUri, }; if (this.clientAuthentication === ClientAuthentication.ClientSecretBasic) { const basic = Buffer.from(`${this._clientId}:${this._clientSecret}`); headers['Authorization'] = `Basic ${basic.toString('base64')}`; } if (this.clientAuthentication === ClientAuthentication.ClientSecretPost) { values.client_secret = this._clientSecret; } const res = await this.transporter.request({ ...OAuth2Client.RETRY_CONFIG, method: 'POST', url, data: querystring.stringify(values), headers, }); const tokens = res.data; if (res.data && res.data.expires_in) { tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; delete tokens.expires_in; } this.emit('tokens', tokens); return { tokens, res }; } /** * Refreshes the access token. * @param refresh_token Existing refresh token. * @private */ async refreshToken(refreshToken) { if (!refreshToken) { return this.refreshTokenNoCache(refreshToken); } // If a request to refresh using the same token has started, // return the same promise. if (this.refreshTokenPromises.has(refreshToken)) { return this.refreshTokenPromises.get(refreshToken); } const p = this.refreshTokenNoCache(refreshToken).then(r => { this.refreshTokenPromises.delete(refreshToken); return r; }, e => { this.refreshTokenPromises.delete(refreshToken); throw e; }); this.refreshTokenPromises.set(refreshToken, p); return p; } async refreshTokenNoCache(refreshToken) { var _a; if (!refreshToken) { throw new Error('No refresh token is set.'); } const url = this.endpoints.oauth2TokenUrl.toString(); const data = { refresh_token: refreshToken, client_id: this._clientId, client_secret: this._clientSecret, grant_type: 'refresh_token', }; let res; try { // request for new token res = await this.transporter.request({ ...OAuth2Client.RETRY_CONFIG, method: 'POST', url, data: querystring.stringify(data), headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, }); } catch (e) { if (e instanceof gaxios_1.GaxiosError && e.message === 'invalid_grant' && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) && /ReAuth/i.test(e.response.data.error_description)) { e.message = JSON.stringify(e.response.data); } throw e; } const tokens = res.data; // TODO: de-duplicate this code from a few spots if (res.data && res.data.expires_in) { tokens.expiry_date = new Date().getTime() + res.data.expires_in * 1000; delete tokens.expires_in; } this.emit('tokens', tokens); return { tokens, res }; } refreshAccessToken(callback) { if (callback) { this.refreshAccessTokenAsync().then(r => callback(null, r.credentials, r.res), callback); } else { return this.refreshAccessTokenAsync(); } } async refreshAccessTokenAsync() { const r = await this.refreshToken(this.credentials.refresh_token); const tokens = r.tokens; tokens.refresh_token = this.credentials.refresh_token; this.credentials = tokens; return { credentials: this.credentials, res: r.res }; } getAccessToken(callback) { if (callback) { this.getAccessTokenAsync().then(r => callback(null, r.token, r.res), callback); } else { return this.getAccessTokenAsync(); } } async getAccessTokenAsync() { const shouldRefresh = !this.credentials.access_token || this.isTokenExpiring(); if (shouldRefresh) { if (!this.credentials.refresh_token) { if (this.refreshHandler) { const refreshedAccessToken = await this.processAndValidateRefreshHandler(); if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { this.setCredentials(refreshedAccessToken); return { token: this.credentials.access_token }; } } else { throw new Error('No refresh token or refresh handler callback is set.'); } } const r = await this.refreshAccessTokenAsync(); if (!r.credentials || (r.credentials && !r.credentials.access_token)) { throw new Error('Could not refresh access token.'); } return { token: r.credentials.access_token, res: r.res }; } else { return { token: this.credentials.access_token }; } } /** * The main authentication interface. It takes an optional url which when * present is the endpoint being accessed, and returns a Promise which * resolves with authorization header fields. * * In OAuth2Client, the result has the form: * { Authorization: 'Bearer ' } * @param url The optional url being authorized */ async getRequestHeaders(url) { const headers = (await this.getRequestMetadataAsync(url)).headers; return headers; } async getRequestMetadataAsync( // eslint-disable-next-line @typescript-eslint/no-unused-vars url) { const thisCreds = this.credentials; if (!thisCreds.access_token && !thisCreds.refresh_token && !this.apiKey && !this.refreshHandler) { throw new Error('No access, refresh token, API key or refresh handler callback is set.'); } if (thisCreds.access_token && !this.isTokenExpiring()) { thisCreds.token_type = thisCreds.token_type || 'Bearer'; const headers = { Authorization: thisCreds.token_type + ' ' + thisCreds.access_token, }; return { headers: this.addSharedMetadataHeaders(headers) }; } // If refreshHandler exists, call processAndValidateRefreshHandler(). if (this.refreshHandler) { const refreshedAccessToken = await this.processAndValidateRefreshHandler(); if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { this.setCredentials(refreshedAccessToken); const headers = { Authorization: 'Bearer ' + this.credentials.access_token, }; return { headers: this.addSharedMetadataHeaders(headers) }; } } if (this.apiKey) { return { headers: { 'X-Goog-Api-Key': this.apiKey } }; } let r = null; let tokens = null; try { r = await this.refreshToken(thisCreds.refresh_token); tokens = r.tokens; } catch (err) { const e = err; if (e.response && (e.response.status === 403 || e.response.status === 404)) { e.message = `Could not refresh access token: ${e.message}`; } throw e; } const credentials = this.credentials; credentials.token_type = credentials.token_type || 'Bearer'; tokens.refresh_token = credentials.refresh_token; this.credentials = tokens; const headers = { Authorization: credentials.token_type + ' ' + tokens.access_token, }; return { headers: this.addSharedMetadataHeaders(headers), res: r.res }; } /** * Generates an URL to revoke the given token. * @param token The existing token to be revoked. * * @deprecated use instance method {@link OAuth2Client.getRevokeTokenURL} */ static getRevokeTokenUrl(token) { return new OAuth2Client().getRevokeTokenURL(token).toString(); } /** * Generates a URL to revoke the given token. * * @param token The existing token to be revoked. */ getRevokeTokenURL(token) { const url = new URL(this.endpoints.oauth2RevokeUrl); url.searchParams.append('token', token); return url; } revokeToken(token, callback) { const opts = { ...OAuth2Client.RETRY_CONFIG, url: this.getRevokeTokenURL(token).toString(), method: 'POST', }; if (callback) { this.transporter .request(opts) .then(r => callback(null, r), callback); } else { return this.transporter.request(opts); } } revokeCredentials(callback) { if (callback) { this.revokeCredentialsAsync().then(res => callback(null, res), callback); } else { return this.revokeCredentialsAsync(); } } async revokeCredentialsAsync() { const token = this.credentials.access_token; this.credentials = {}; if (token) { return this.revokeToken(token); } else { throw new Error('No access token to revoke.'); } } request(opts, callback) { if (callback) { this.requestAsync(opts).then(r => callback(null, r), e => { return callback(e, e.response); }); } else { return this.requestAsync(opts); } } async requestAsync(opts, reAuthRetried = false) { let r2; try { const r = await this.getRequestMetadataAsync(opts.url); opts.headers = opts.headers || {}; if (r.headers && r.headers['x-goog-user-project']) { opts.headers['x-goog-user-project'] = r.headers['x-goog-user-project']; } if (r.headers && r.headers.Authorization) { opts.headers.Authorization = r.headers.Authorization; } if (this.apiKey) { opts.headers['X-Goog-Api-Key'] = this.apiKey; } r2 = await this.transporter.request(opts); } catch (e) { const res = e.response; if (res) { const statusCode = res.status; // Retry the request for metadata if the following criteria are true: // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream // - An access_token and refresh_token were available, but either no // expiry_date was available or the forceRefreshOnFailure flag is set. // The absent expiry_date case can happen when developers stash the // access_token and refresh_token for later use, but the access_token // fails on the first try because it's expired. Some developers may // choose to enable forceRefreshOnFailure to mitigate time-related // errors. // Or the following criteria are true: // - We haven't already retried. It only makes sense to retry once. // - The response was a 401 or a 403 // - The request didn't send a readableStream // - No refresh_token was available // - An access_token and a refreshHandler callback were available, but // either no expiry_date was available or the forceRefreshOnFailure // flag is set. The access_token fails on the first try because it's // expired. Some developers may choose to enable forceRefreshOnFailure // to mitigate time-related errors. const mayRequireRefresh = this.credentials && this.credentials.access_token && this.credentials.refresh_token && (!this.credentials.expiry_date || this.forceRefreshOnFailure); const mayRequireRefreshWithNoRefreshToken = this.credentials && this.credentials.access_token && !this.credentials.refresh_token && (!this.credentials.expiry_date || this.forceRefreshOnFailure) && this.refreshHandler; const isReadableStream = res.config.data instanceof stream.Readable; const isAuthErr = statusCode === 401 || statusCode === 403; if (!reAuthRetried && isAuthErr && !isReadableStream && mayRequireRefresh) { await this.refreshAccessTokenAsync(); return this.requestAsync(opts, true); } else if (!reAuthRetried && isAuthErr && !isReadableStream && mayRequireRefreshWithNoRefreshToken) { const refreshedAccessToken = await this.processAndValidateRefreshHandler(); if (refreshedAccessToken === null || refreshedAccessToken === void 0 ? void 0 : refreshedAccessToken.access_token) { this.setCredentials(refreshedAccessToken); } return this.requestAsync(opts, true); } } throw e; } return r2; } verifyIdToken(options, callback) { // This function used to accept two arguments instead of an options object. // Check the types to help users upgrade with less pain. // This check can be removed after a 2.0 release. if (callback && typeof callback !== 'function') { throw new Error('This method accepts an options object as the first parameter, which includes the idToken, audience, and maxExpiry.'); } if (callback) { this.verifyIdTokenAsync(options).then(r => callback(null, r), callback); } else { return this.verifyIdTokenAsync(options); } } async verifyIdTokenAsync(options) { if (!options.idToken) { throw new Error('The verifyIdToken method requires an ID Token'); } const response = await this.getFederatedSignonCertsAsync(); const login = await this.verifySignedJwtWithCertsAsync(options.idToken, response.certs, options.audience, this.issuers, options.maxExpiry); return login; } /** * Obtains information about the provisioned access token. Especially useful * if you want to check the scopes that were provisioned to a given token. * * @param accessToken Required. The Access Token for which you want to get * user info. */ async getTokenInfo(accessToken) { const { data } = await this.transporter.request({ ...OAuth2Client.RETRY_CONFIG, method: 'POST', headers: { 'Content-Type': 'application/x-www-form-urlencoded', Authorization: `Bearer ${accessToken}`, }, url: this.endpoints.tokenInfoUrl.toString(), }); const info = Object.assign({ expiry_date: new Date().getTime() + data.expires_in * 1000, scopes: data.scope.split(' '), }, data); delete info.expires_in; delete info.scope; return info; } getFederatedSignonCerts(callback) { if (callback) { this.getFederatedSignonCertsAsync().then(r => callback(null, r.certs, r.res), callback); } else { return this.getFederatedSignonCertsAsync(); } } async getFederatedSignonCertsAsync() { const nowTime = new Date().getTime(); const format = (0, crypto_1.hasBrowserCrypto)() ? CertificateFormat.JWK : CertificateFormat.PEM; if (this.certificateExpiry && nowTime < this.certificateExpiry.getTime() && this.certificateCacheFormat === format) { return { certs: this.certificateCache, format }; } let res; let url; switch (format) { case CertificateFormat.PEM: url = this.endpoints.oauth2FederatedSignonPemCertsUrl.toString(); break; case CertificateFormat.JWK: url = this.endpoints.oauth2FederatedSignonJwkCertsUrl.toString(); break; default: throw new Error(`Unsupported certificate format ${format}`); } try { res = await this.transporter.request({ ...OAuth2Client.RETRY_CONFIG, url, }); } catch (e) { if (e instanceof Error) { e.message = `Failed to retrieve verification certificates: ${e.message}`; } throw e; } const cacheControl = res ? res.headers['cache-control'] : undefined; let cacheAge = -1; if (cacheControl) { const pattern = new RegExp('max-age=([0-9]*)'); const regexResult = pattern.exec(cacheControl); if (regexResult && regexResult.length === 2) { // Cache results with max-age (in seconds) cacheAge = Number(regexResult[1]) * 1000; // milliseconds } } let certificates = {}; switch (format) { case CertificateFormat.PEM: certificates = res.data; break; case CertificateFormat.JWK: for (const key of res.data.keys) { certificates[key.kid] = key; } break; default: throw new Error(`Unsupported certificate format ${format}`); } const now = new Date(); this.certificateExpiry = cacheAge === -1 ? null : new Date(now.getTime() + cacheAge); this.certificateCache = certificates; this.certificateCacheFormat = format; return { certs: certificates, format, res }; } getIapPublicKeys(callback) { if (callback) { this.getIapPublicKeysAsync().then(r => callback(null, r.pubkeys, r.res), callback); } else { return this.getIapPublicKeysAsync(); } } async getIapPublicKeysAsync() { let res; const url = this.endpoints.oauth2IapPublicKeyUrl.toString(); try { res = await this.transporter.request({ ...OAuth2Client.RETRY_CONFIG, url, }); } catch (e) { if (e instanceof Error) { e.message = `Failed to retrieve verification certificates: ${e.message}`; } throw e; } return { pubkeys: res.data, res }; } verifySignedJwtWithCerts() { // To make the code compatible with browser SubtleCrypto we need to make // this method async. throw new Error('verifySignedJwtWithCerts is removed, please use verifySignedJwtWithCertsAsync instead.'); } /** * Verify the id token is signed with the correct certificate * and is from the correct audience. * @param jwt The jwt to verify (The ID Token in this case). * @param certs The array of certs to test the jwt against. * @param requiredAudience The audience to test the jwt against. * @param issuers The allowed issuers of the jwt (Optional). * @param maxExpiry The max expiry the certificate can be (Optional). * @return Returns a promise resolving to LoginTicket on verification. */ async verifySignedJwtWithCertsAsync(jwt, certs, requiredAudience, issuers, maxExpiry) { const crypto = (0, crypto_1.createCrypto)(); if (!maxExpiry) { maxExpiry = OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_; } const segments = jwt.split('.'); if (segments.length !== 3) { throw new Error('Wrong number of segments in token: ' + jwt); } const signed = segments[0] + '.' + segments[1]; let signature = segments[2]; let envelope; let payload; try { envelope = JSON.parse(crypto.decodeBase64StringUtf8(segments[0])); } catch (err) { if (err instanceof Error) { err.message = `Can't parse token envelope: ${segments[0]}': ${err.message}`; } throw err; } if (!envelope) { throw new Error("Can't parse token envelope: " + segments[0]); } try { payload = JSON.parse(crypto.decodeBase64StringUtf8(segments[1])); } catch (err) { if (err instanceof Error) { err.message = `Can't parse token payload '${segments[0]}`; } throw err; } if (!payload) { throw new Error("Can't parse token payload: " + segments[1]); } if (!Object.prototype.hasOwnProperty.call(certs, envelope.kid)) { // If this is not present, then there's no reason to attempt verification throw new Error('No pem found for envelope: ' + JSON.stringify(envelope)); } const cert = certs[envelope.kid]; if (envelope.alg === 'ES256') { signature = formatEcdsa.joseToDer(signature, 'ES256').toString('base64'); } const verified = await crypto.verify(cert, signed, signature); if (!verified) { throw new Error('Invalid token signature: ' + jwt); } if (!payload.iat) { throw new Error('No issue time in token: ' + JSON.stringify(payload)); } if (!payload.exp) { throw new Error('No expiration time in token: ' + JSON.stringify(payload)); } const iat = Number(payload.iat); if (isNaN(iat)) throw new Error('iat field using invalid format'); const exp = Number(payload.exp); if (isNaN(exp)) throw new Error('exp field using invalid format'); const now = new Date().getTime() / 1000; if (exp >= now + maxExpiry) { throw new Error('Expiration time too far in future: ' + JSON.stringify(payload)); } const earliest = iat - OAuth2Client.CLOCK_SKEW_SECS_; const latest = exp + OAuth2Client.CLOCK_SKEW_SECS_; if (now < earliest) { throw new Error('Token used too early, ' + now + ' < ' + earliest + ': ' + JSON.stringify(payload)); } if (now > latest) { throw new Error('Token used too late, ' + now + ' > ' + latest + ': ' + JSON.stringify(payload)); } if (issuers && issuers.indexOf(payload.iss) < 0) { throw new Error('Invalid issuer, expected one of [' + issuers + '], but got ' + payload.iss); } // Check the audience matches if we have one if (typeof requiredAudience !== 'undefined' && requiredAudience !== null) { const aud = payload.aud; let audVerified = false; // If the requiredAudience is an array, check if it contains token // audience if (requiredAudience.constructor === Array) { audVerified = requiredAudience.indexOf(aud) > -1; } else { audVerified = aud === requiredAudience; } if (!audVerified) { throw new Error('Wrong recipient, payload audience != requiredAudience'); } } return new loginticket_1.LoginTicket(envelope, payload); } /** * Returns a promise that resolves with AccessTokenResponse type if * refreshHandler is defined. * If not, nothing is returned. */ async processAndValidateRefreshHandler() { if (this.refreshHandler) { const accessTokenResponse = await this.refreshHandler(); if (!accessTokenResponse.access_token) { throw new Error('No access token is returned by the refreshHandler callback.'); } return accessTokenResponse; } return; } /** * Returns true if a token is expired or will expire within * eagerRefreshThresholdMillismilliseconds. * If there is no expiry time, assumes the token is not expired or expiring. */ isTokenExpiring() { const expiryDate = this.credentials.expiry_date; return expiryDate ? expiryDate <= new Date().getTime() + this.eagerRefreshThresholdMillis : false; } } exports.OAuth2Client = OAuth2Client; /** * @deprecated use instance's {@link OAuth2Client.endpoints} */ OAuth2Client.GOOGLE_TOKEN_INFO_URL = 'https://oauth2.googleapis.com/tokeninfo'; /** * Clock skew - five minutes in seconds */ OAuth2Client.CLOCK_SKEW_SECS_ = 300; /** * The default max Token Lifetime is one day in seconds */ OAuth2Client.DEFAULT_MAX_TOKEN_LIFETIME_SECS_ = 86400; /***/ }), /***/ 6653: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.OAuthClientAuthHandler = void 0; exports.getErrorFromOAuthErrorResponse = getErrorFromOAuthErrorResponse; const querystring = __nccwpck_require__(83480); const crypto_1 = __nccwpck_require__(88851); /** List of HTTP methods that accept request bodies. */ const METHODS_SUPPORTING_REQUEST_BODY = ['PUT', 'POST', 'PATCH']; /** * Abstract class for handling client authentication in OAuth-based * operations. * When request-body client authentication is used, only application/json and * application/x-www-form-urlencoded content types for HTTP methods that support * request bodies are supported. */ class OAuthClientAuthHandler { /** * Instantiates an OAuth client authentication handler. * @param clientAuthentication The client auth credentials. */ constructor(clientAuthentication) { this.clientAuthentication = clientAuthentication; this.crypto = (0, crypto_1.createCrypto)(); } /** * Applies client authentication on the OAuth request's headers or POST * body but does not process the request. * @param opts The GaxiosOptions whose headers or data are to be modified * depending on the client authentication mechanism to be used. * @param bearerToken The optional bearer token to use for authentication. * When this is used, no client authentication credentials are needed. */ applyClientAuthenticationOptions(opts, bearerToken) { // Inject authenticated header. this.injectAuthenticatedHeaders(opts, bearerToken); // Inject authenticated request body. if (!bearerToken) { this.injectAuthenticatedRequestBody(opts); } } /** * Applies client authentication on the request's header if either * basic authentication or bearer token authentication is selected. * * @param opts The GaxiosOptions whose headers or data are to be modified * depending on the client authentication mechanism to be used. * @param bearerToken The optional bearer token to use for authentication. * When this is used, no client authentication credentials are needed. */ injectAuthenticatedHeaders(opts, bearerToken) { var _a; // Bearer token prioritized higher than basic Auth. if (bearerToken) { opts.headers = opts.headers || {}; Object.assign(opts.headers, { Authorization: `Bearer ${bearerToken}}`, }); } else if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'basic') { opts.headers = opts.headers || {}; const clientId = this.clientAuthentication.clientId; const clientSecret = this.clientAuthentication.clientSecret || ''; const base64EncodedCreds = this.crypto.encodeBase64StringUtf8(`${clientId}:${clientSecret}`); Object.assign(opts.headers, { Authorization: `Basic ${base64EncodedCreds}`, }); } } /** * Applies client authentication on the request's body if request-body * client authentication is selected. * * @param opts The GaxiosOptions whose headers or data are to be modified * depending on the client authentication mechanism to be used. */ injectAuthenticatedRequestBody(opts) { var _a; if (((_a = this.clientAuthentication) === null || _a === void 0 ? void 0 : _a.confidentialClientType) === 'request-body') { const method = (opts.method || 'GET').toUpperCase(); // Inject authenticated request body. if (METHODS_SUPPORTING_REQUEST_BODY.indexOf(method) !== -1) { // Get content-type. let contentType; const headers = opts.headers || {}; for (const key in headers) { if (key.toLowerCase() === 'content-type' && headers[key]) { contentType = headers[key].toLowerCase(); break; } } if (contentType === 'application/x-www-form-urlencoded') { opts.data = opts.data || ''; const data = querystring.parse(opts.data); Object.assign(data, { client_id: this.clientAuthentication.clientId, client_secret: this.clientAuthentication.clientSecret || '', }); opts.data = querystring.stringify(data); } else if (contentType === 'application/json') { opts.data = opts.data || {}; Object.assign(opts.data, { client_id: this.clientAuthentication.clientId, client_secret: this.clientAuthentication.clientSecret || '', }); } else { throw new Error(`${contentType} content-types are not supported with ` + `${this.clientAuthentication.confidentialClientType} ` + 'client authentication'); } } else { throw new Error(`${method} HTTP method does not support ` + `${this.clientAuthentication.confidentialClientType} ` + 'client authentication'); } } } /** * Retry config for Auth-related requests. * * @remarks * * This is not a part of the default {@link AuthClient.transporter transporter/gaxios} * config as some downstream APIs would prefer if customers explicitly enable retries, * such as GCS. */ static get RETRY_CONFIG() { return { retry: true, retryConfig: { httpMethodsToRetry: ['GET', 'PUT', 'POST', 'HEAD', 'OPTIONS', 'DELETE'], }, }; } } exports.OAuthClientAuthHandler = OAuthClientAuthHandler; /** * Converts an OAuth error response to a native JavaScript Error. * @param resp The OAuth error response to convert to a native Error object. * @param err The optional original error. If provided, the error properties * will be copied to the new error. * @return The converted native Error object. */ function getErrorFromOAuthErrorResponse(resp, err) { // Error response. const errorCode = resp.error; const errorDescription = resp.error_description; const errorUri = resp.error_uri; let message = `Error code ${errorCode}`; if (typeof errorDescription !== 'undefined') { message += `: ${errorDescription}`; } if (typeof errorUri !== 'undefined') { message += ` - ${errorUri}`; } const newError = new Error(message); // Copy properties from original error to newly generated error. if (err) { const keys = Object.keys(err); if (err.stack) { // Copy error.stack if available. keys.push('stack'); } keys.forEach(key => { // Do not overwrite the message field. if (key !== 'message') { Object.defineProperty(newError, key, { // eslint-disable-next-line @typescript-eslint/no-explicit-any value: err[key], writable: false, enumerable: true, }); } }); } return newError; } /***/ }), /***/ 62045: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PassThroughClient = void 0; const authclient_1 = __nccwpck_require__(34810); /** * An AuthClient without any Authentication information. Useful for: * - Anonymous access * - Local Emulators * - Testing Environments * */ class PassThroughClient extends authclient_1.AuthClient { /** * Creates a request without any authentication headers or checks. * * @remarks * * In testing environments it may be useful to change the provided * {@link AuthClient.transporter} for any desired request overrides/handling. * * @param opts * @returns The response of the request. */ async request(opts) { return this.transporter.request(opts); } /** * A required method of the base class. * Always will return an empty object. * * @returns {} */ async getAccessToken() { return {}; } /** * A required method of the base class. * Always will return an empty object. * * @returns {} */ async getRequestHeaders() { return {}; } } exports.PassThroughClient = PassThroughClient; const a = new PassThroughClient(); a.getAccessToken(); /***/ }), /***/ 46077: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PluggableAuthClient = exports.ExecutableError = void 0; const baseexternalclient_1 = __nccwpck_require__(142); const executable_response_1 = __nccwpck_require__(43247); const pluggable_auth_handler_1 = __nccwpck_require__(908); /** * Error thrown from the executable run by PluggableAuthClient. */ class ExecutableError extends Error { constructor(message, code) { super(`The executable failed with exit code: ${code} and error message: ${message}.`); this.code = code; Object.setPrototypeOf(this, new.target.prototype); } } exports.ExecutableError = ExecutableError; /** * The default executable timeout when none is provided, in milliseconds. */ const DEFAULT_EXECUTABLE_TIMEOUT_MILLIS = 30 * 1000; /** * The minimum allowed executable timeout in milliseconds. */ const MINIMUM_EXECUTABLE_TIMEOUT_MILLIS = 5 * 1000; /** * The maximum allowed executable timeout in milliseconds. */ const MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS = 120 * 1000; /** * The environment variable to check to see if executable can be run. * Value must be set to '1' for the executable to run. */ const GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES = 'GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES'; /** * The maximum currently supported executable version. */ const MAXIMUM_EXECUTABLE_VERSION = 1; /** * PluggableAuthClient enables the exchange of workload identity pool external credentials for * Google access tokens by retrieving 3rd party tokens through a user supplied executable. These * scripts/executables are completely independent of the Google Cloud Auth libraries. These * credentials plug into ADC and will call the specified executable to retrieve the 3rd party token * to be exchanged for a Google access token. * *

To use these credentials, the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment variable * must be set to '1'. This is for security reasons. * *

Both OIDC and SAML are supported. The executable must adhere to a specific response format * defined below. * *

The executable must print out the 3rd party token to STDOUT in JSON format. When an * output_file is specified in the credential configuration, the executable must also handle writing the * JSON response to this file. * *

 * OIDC response sample:
 * {
 *   "version": 1,
 *   "success": true,
 *   "token_type": "urn:ietf:params:oauth:token-type:id_token",
 *   "id_token": "HEADER.PAYLOAD.SIGNATURE",
 *   "expiration_time": 1620433341
 * }
 *
 * SAML2 response sample:
 * {
 *   "version": 1,
 *   "success": true,
 *   "token_type": "urn:ietf:params:oauth:token-type:saml2",
 *   "saml_response": "...",
 *   "expiration_time": 1620433341
 * }
 *
 * Error response sample:
 * {
 *   "version": 1,
 *   "success": false,
 *   "code": "401",
 *   "message": "Error message."
 * }
 * 
* *

The "expiration_time" field in the JSON response is only required for successful * responses when an output file was specified in the credential configuration * *

The auth libraries will populate certain environment variables that will be accessible by the * executable, such as: GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE, GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE, * GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE, GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL, and * GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE. * *

Please see this repositories README for a complete executable request/response specification. */ class PluggableAuthClient extends baseexternalclient_1.BaseExternalAccountClient { /** * Instantiates a PluggableAuthClient instance using the provided JSON * object loaded from an external account credentials file. * An error is thrown if the credential is not a valid pluggable auth credential. * @param options The external account options object typically loaded from * the external account JSON credential file. * @param additionalOptions **DEPRECATED, all options are available in the * `options` parameter.** Optional additional behavior customization options. * These currently customize expiration threshold time and whether to retry * on 401/403 API request errors. */ constructor(options, additionalOptions) { super(options, additionalOptions); if (!options.credential_source.executable) { throw new Error('No valid Pluggable Auth "credential_source" provided.'); } this.command = options.credential_source.executable.command; if (!this.command) { throw new Error('No valid Pluggable Auth "credential_source" provided.'); } // Check if the provided timeout exists and if it is valid. if (options.credential_source.executable.timeout_millis === undefined) { this.timeoutMillis = DEFAULT_EXECUTABLE_TIMEOUT_MILLIS; } else { this.timeoutMillis = options.credential_source.executable.timeout_millis; if (this.timeoutMillis < MINIMUM_EXECUTABLE_TIMEOUT_MILLIS || this.timeoutMillis > MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS) { throw new Error(`Timeout must be between ${MINIMUM_EXECUTABLE_TIMEOUT_MILLIS} and ` + `${MAXIMUM_EXECUTABLE_TIMEOUT_MILLIS} milliseconds.`); } } this.outputFile = options.credential_source.executable.output_file; this.handler = new pluggable_auth_handler_1.PluggableAuthHandler({ command: this.command, timeoutMillis: this.timeoutMillis, outputFile: this.outputFile, }); this.credentialSourceType = 'executable'; } /** * Triggered when an external subject token is needed to be exchanged for a * GCP access token via GCP STS endpoint. * This uses the `options.credential_source` object to figure out how * to retrieve the token using the current environment. In this case, * this calls a user provided executable which returns the subject token. * The logic is summarized as: * 1. Validated that the executable is allowed to run. The * GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment must be set to * 1 for security reasons. * 2. If an output file is specified by the user, check the file location * for a response. If the file exists and contains a valid response, * return the subject token from the file. * 3. Call the provided executable and return response. * @return A promise that resolves with the external subject token. */ async retrieveSubjectToken() { // Check if the executable is allowed to run. if (process.env[GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES] !== '1') { throw new Error('Pluggable Auth executables need to be explicitly allowed to run by ' + 'setting the GOOGLE_EXTERNAL_ACCOUNT_ALLOW_EXECUTABLES environment ' + 'Variable to 1.'); } let executableResponse = undefined; // Try to get cached executable response from output file. if (this.outputFile) { executableResponse = await this.handler.retrieveCachedResponse(); } // If no response from output file, call the executable. if (!executableResponse) { // Set up environment map with required values for the executable. const envMap = new Map(); envMap.set('GOOGLE_EXTERNAL_ACCOUNT_AUDIENCE', this.audience); envMap.set('GOOGLE_EXTERNAL_ACCOUNT_TOKEN_TYPE', this.subjectTokenType); // Always set to 0 because interactive mode is not supported. envMap.set('GOOGLE_EXTERNAL_ACCOUNT_INTERACTIVE', '0'); if (this.outputFile) { envMap.set('GOOGLE_EXTERNAL_ACCOUNT_OUTPUT_FILE', this.outputFile); } const serviceAccountEmail = this.getServiceAccountEmail(); if (serviceAccountEmail) { envMap.set('GOOGLE_EXTERNAL_ACCOUNT_IMPERSONATED_EMAIL', serviceAccountEmail); } executableResponse = await this.handler.retrieveResponseFromExecutable(envMap); } if (executableResponse.version > MAXIMUM_EXECUTABLE_VERSION) { throw new Error(`Version of executable is not currently supported, maximum supported version is ${MAXIMUM_EXECUTABLE_VERSION}.`); } // Check that response was successful. if (!executableResponse.success) { throw new ExecutableError(executableResponse.errorMessage, executableResponse.errorCode); } // Check that response contains expiration time if output file was specified. if (this.outputFile) { if (!executableResponse.expirationTime) { throw new executable_response_1.InvalidExpirationTimeFieldError('The executable response must contain the `expiration_time` field for successful responses when an output_file has been specified in the configuration.'); } } // Check that response is not expired. if (executableResponse.isExpired()) { throw new Error('Executable response is expired.'); } // Return subject token from response. return executableResponse.subjectToken; } } exports.PluggableAuthClient = PluggableAuthClient; /***/ }), /***/ 908: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PluggableAuthHandler = void 0; const pluggable_auth_client_1 = __nccwpck_require__(46077); const executable_response_1 = __nccwpck_require__(43247); const childProcess = __nccwpck_require__(35317); const fs = __nccwpck_require__(79896); /** * A handler used to retrieve 3rd party token responses from user defined * executables and cached file output for the PluggableAuthClient class. */ class PluggableAuthHandler { /** * Instantiates a PluggableAuthHandler instance using the provided * PluggableAuthHandlerOptions object. */ constructor(options) { if (!options.command) { throw new Error('No command provided.'); } this.commandComponents = PluggableAuthHandler.parseCommand(options.command); this.timeoutMillis = options.timeoutMillis; if (!this.timeoutMillis) { throw new Error('No timeoutMillis provided.'); } this.outputFile = options.outputFile; } /** * Calls user provided executable to get a 3rd party subject token and * returns the response. * @param envMap a Map of additional Environment Variables required for * the executable. * @return A promise that resolves with the executable response. */ retrieveResponseFromExecutable(envMap) { return new Promise((resolve, reject) => { // Spawn process to run executable using added environment variables. const child = childProcess.spawn(this.commandComponents[0], this.commandComponents.slice(1), { env: { ...process.env, ...Object.fromEntries(envMap) }, }); let output = ''; // Append stdout to output as executable runs. child.stdout.on('data', (data) => { output += data; }); // Append stderr as executable runs. child.stderr.on('data', (err) => { output += err; }); // Set up a timeout to end the child process and throw an error. const timeout = setTimeout(() => { // Kill child process and remove listeners so 'close' event doesn't get // read after child process is killed. child.removeAllListeners(); child.kill(); return reject(new Error('The executable failed to finish within the timeout specified.')); }, this.timeoutMillis); child.on('close', (code) => { // Cancel timeout if executable closes before timeout is reached. clearTimeout(timeout); if (code === 0) { // If the executable completed successfully, try to return the parsed response. try { const responseJson = JSON.parse(output); const response = new executable_response_1.ExecutableResponse(responseJson); return resolve(response); } catch (error) { if (error instanceof executable_response_1.ExecutableResponseError) { return reject(error); } return reject(new executable_response_1.ExecutableResponseError(`The executable returned an invalid response: ${output}`)); } } else { return reject(new pluggable_auth_client_1.ExecutableError(output, code.toString())); } }); }); } /** * Checks user provided output file for response from previous run of * executable and return the response if it exists, is formatted correctly, and is not expired. */ async retrieveCachedResponse() { if (!this.outputFile || this.outputFile.length === 0) { return undefined; } let filePath; try { filePath = await fs.promises.realpath(this.outputFile); } catch (_a) { // If file path cannot be resolved, return undefined. return undefined; } if (!(await fs.promises.lstat(filePath)).isFile()) { // If path does not lead to file, return undefined. return undefined; } const responseString = await fs.promises.readFile(filePath, { encoding: 'utf8', }); if (responseString === '') { return undefined; } try { const responseJson = JSON.parse(responseString); const response = new executable_response_1.ExecutableResponse(responseJson); // Check if response is successful and unexpired. if (response.isValid()) { return new executable_response_1.ExecutableResponse(responseJson); } return undefined; } catch (error) { if (error instanceof executable_response_1.ExecutableResponseError) { throw error; } throw new executable_response_1.ExecutableResponseError(`The output file contained an invalid response: ${responseString}`); } } /** * Parses given command string into component array, splitting on spaces unless * spaces are between quotation marks. */ static parseCommand(command) { // Split the command into components by splitting on spaces, // unless spaces are contained in quotation marks. const components = command.match(/(?:[^\s"]+|"[^"]*")+/g); if (!components) { throw new Error(`Provided command: "${command}" could not be parsed.`); } // Remove quotation marks from the beginning and end of each component if they are present. for (let i = 0; i < components.length; i++) { if (components[i][0] === '"' && components[i].slice(-1) === '"') { components[i] = components[i].slice(1, -1); } } return components; } } exports.PluggableAuthHandler = PluggableAuthHandler; /***/ }), /***/ 99807: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2015 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.UserRefreshClient = exports.USER_REFRESH_ACCOUNT_TYPE = void 0; const oauth2client_1 = __nccwpck_require__(10091); const querystring_1 = __nccwpck_require__(83480); exports.USER_REFRESH_ACCOUNT_TYPE = 'authorized_user'; class UserRefreshClient extends oauth2client_1.OAuth2Client { constructor(optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure) { const opts = optionsOrClientId && typeof optionsOrClientId === 'object' ? optionsOrClientId : { clientId: optionsOrClientId, clientSecret, refreshToken, eagerRefreshThresholdMillis, forceRefreshOnFailure, }; super(opts); this._refreshToken = opts.refreshToken; this.credentials.refresh_token = opts.refreshToken; } /** * Refreshes the access token. * @param refreshToken An ignored refreshToken.. * @param callback Optional callback. */ async refreshTokenNoCache( // eslint-disable-next-line @typescript-eslint/no-unused-vars refreshToken) { return super.refreshTokenNoCache(this._refreshToken); } async fetchIdToken(targetAudience) { const res = await this.transporter.request({ ...UserRefreshClient.RETRY_CONFIG, url: this.endpoints.oauth2TokenUrl, headers: { 'Content-Type': 'application/x-www-form-urlencoded', }, method: 'POST', data: (0, querystring_1.stringify)({ client_id: this._clientId, client_secret: this._clientSecret, grant_type: 'refresh_token', refresh_token: this._refreshToken, target_audience: targetAudience, }), }); return res.data.id_token; } /** * Create a UserRefreshClient credentials instance using the given input * options. * @param json The input object. */ fromJSON(json) { if (!json) { throw new Error('Must pass in a JSON object containing the user refresh token'); } if (json.type !== 'authorized_user') { throw new Error('The incoming JSON object does not have the "authorized_user" type'); } if (!json.client_id) { throw new Error('The incoming JSON object does not contain a client_id field'); } if (!json.client_secret) { throw new Error('The incoming JSON object does not contain a client_secret field'); } if (!json.refresh_token) { throw new Error('The incoming JSON object does not contain a refresh_token field'); } this._clientId = json.client_id; this._clientSecret = json.client_secret; this._refreshToken = json.refresh_token; this.credentials.refresh_token = json.refresh_token; this.quotaProjectId = json.quota_project_id; this.universeDomain = json.universe_domain || this.universeDomain; } fromStream(inputStream, callback) { if (callback) { this.fromStreamAsync(inputStream).then(() => callback(), callback); } else { return this.fromStreamAsync(inputStream); } } async fromStreamAsync(inputStream) { return new Promise((resolve, reject) => { if (!inputStream) { return reject(new Error('Must pass in a stream containing the user refresh token.')); } let s = ''; inputStream .setEncoding('utf8') .on('error', reject) .on('data', chunk => (s += chunk)) .on('end', () => { try { const data = JSON.parse(s); this.fromJSON(data); return resolve(); } catch (err) { return reject(err); } }); }); } /** * Create a UserRefreshClient credentials instance using the given input * options. * @param json The input object. */ static fromJSON(json) { const client = new UserRefreshClient(); client.fromJSON(json); return client; } } exports.UserRefreshClient = UserRefreshClient; /***/ }), /***/ 121: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2021 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.StsCredentials = void 0; const gaxios_1 = __nccwpck_require__(97003); const querystring = __nccwpck_require__(83480); const transporters_1 = __nccwpck_require__(67633); const oauth2common_1 = __nccwpck_require__(6653); /** * Implements the OAuth 2.0 token exchange based on * https://tools.ietf.org/html/rfc8693 */ class StsCredentials extends oauth2common_1.OAuthClientAuthHandler { /** * Initializes an STS credentials instance. * @param tokenExchangeEndpoint The token exchange endpoint. * @param clientAuthentication The client authentication credentials if * available. */ constructor(tokenExchangeEndpoint, clientAuthentication) { super(clientAuthentication); this.tokenExchangeEndpoint = tokenExchangeEndpoint; this.transporter = new transporters_1.DefaultTransporter(); } /** * Exchanges the provided token for another type of token based on the * rfc8693 spec. * @param stsCredentialsOptions The token exchange options used to populate * the token exchange request. * @param additionalHeaders Optional additional headers to pass along the * request. * @param options Optional additional GCP-specific non-spec defined options * to send with the request. * Example: `&options=${encodeUriComponent(JSON.stringified(options))}` * @return A promise that resolves with the token exchange response containing * the requested token and its expiration time. */ async exchangeToken(stsCredentialsOptions, additionalHeaders, // eslint-disable-next-line @typescript-eslint/no-explicit-any options) { var _a, _b, _c; const values = { grant_type: stsCredentialsOptions.grantType, resource: stsCredentialsOptions.resource, audience: stsCredentialsOptions.audience, scope: (_a = stsCredentialsOptions.scope) === null || _a === void 0 ? void 0 : _a.join(' '), requested_token_type: stsCredentialsOptions.requestedTokenType, subject_token: stsCredentialsOptions.subjectToken, subject_token_type: stsCredentialsOptions.subjectTokenType, actor_token: (_b = stsCredentialsOptions.actingParty) === null || _b === void 0 ? void 0 : _b.actorToken, actor_token_type: (_c = stsCredentialsOptions.actingParty) === null || _c === void 0 ? void 0 : _c.actorTokenType, // Non-standard GCP-specific options. options: options && JSON.stringify(options), }; // Remove undefined fields. Object.keys(values).forEach(key => { // eslint-disable-next-line @typescript-eslint/no-explicit-any if (typeof values[key] === 'undefined') { // eslint-disable-next-line @typescript-eslint/no-explicit-any delete values[key]; } }); const headers = { 'Content-Type': 'application/x-www-form-urlencoded', }; // Inject additional STS headers if available. Object.assign(headers, additionalHeaders || {}); const opts = { ...StsCredentials.RETRY_CONFIG, url: this.tokenExchangeEndpoint.toString(), method: 'POST', headers, data: querystring.stringify(values), responseType: 'json', }; // Apply OAuth client authentication. this.applyClientAuthenticationOptions(opts); try { const response = await this.transporter.request(opts); // Successful response. const stsSuccessfulResponse = response.data; stsSuccessfulResponse.res = response; return stsSuccessfulResponse; } catch (error) { // Translate error to OAuthError. if (error instanceof gaxios_1.GaxiosError && error.response) { throw (0, oauth2common_1.getErrorFromOAuthErrorResponse)(error.response.data, // Preserve other fields from the original error. error); } // Request could fail before the server responds. throw error; } } } exports.StsCredentials = StsCredentials; /***/ }), /***/ 24627: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.UrlSubjectTokenSupplier = void 0; /** * Internal subject token supplier implementation used when a URL * is configured in the credential configuration used to build an {@link IdentityPoolClient} */ class UrlSubjectTokenSupplier { /** * Instantiates a URL subject token supplier. * @param opts The URL subject token supplier options to build the supplier with. */ constructor(opts) { this.url = opts.url; this.formatType = opts.formatType; this.subjectTokenFieldName = opts.subjectTokenFieldName; this.headers = opts.headers; this.additionalGaxiosOptions = opts.additionalGaxiosOptions; } /** * Sends a GET request to the URL provided in the constructor and resolves * with the returned external subject token. * @param context {@link ExternalAccountSupplierContext} from the calling * {@link IdentityPoolClient}, contains the requested audience and subject * token type for the external account identity. Not used. */ async getSubjectToken(context) { const opts = { ...this.additionalGaxiosOptions, url: this.url, method: 'GET', headers: this.headers, responseType: this.formatType, }; let subjectToken; if (this.formatType === 'text') { const response = await context.transporter.request(opts); subjectToken = response.data; } else if (this.formatType === 'json' && this.subjectTokenFieldName) { const response = await context.transporter.request(opts); subjectToken = response.data[this.subjectTokenFieldName]; } if (!subjectToken) { throw new Error('Unable to parse the subject_token from the credential_source URL'); } return subjectToken; } } exports.UrlSubjectTokenSupplier = UrlSubjectTokenSupplier; /***/ }), /***/ 93438: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.BrowserCrypto = void 0; // This file implements crypto functions we need using in-browser // SubtleCrypto interface `window.crypto.subtle`. const base64js = __nccwpck_require__(38793); const crypto_1 = __nccwpck_require__(88851); class BrowserCrypto { constructor() { if (typeof window === 'undefined' || window.crypto === undefined || window.crypto.subtle === undefined) { throw new Error("SubtleCrypto not found. Make sure it's an https:// website."); } } async sha256DigestBase64(str) { // SubtleCrypto digest() method is async, so we must make // this method async as well. // To calculate SHA256 digest using SubtleCrypto, we first // need to convert an input string to an ArrayBuffer: const inputBuffer = new TextEncoder().encode(str); // Result is ArrayBuffer as well. const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); return base64js.fromByteArray(new Uint8Array(outputBuffer)); } randomBytesBase64(count) { const array = new Uint8Array(count); window.crypto.getRandomValues(array); return base64js.fromByteArray(array); } static padBase64(base64) { // base64js requires padding, so let's add some '=' while (base64.length % 4 !== 0) { base64 += '='; } return base64; } async verify(pubkey, data, signature) { const algo = { name: 'RSASSA-PKCS1-v1_5', hash: { name: 'SHA-256' }, }; const dataArray = new TextEncoder().encode(data); const signatureArray = base64js.toByteArray(BrowserCrypto.padBase64(signature)); const cryptoKey = await window.crypto.subtle.importKey('jwk', pubkey, algo, true, ['verify']); // SubtleCrypto's verify method is async so we must make // this method async as well. const result = await window.crypto.subtle.verify(algo, cryptoKey, signatureArray, dataArray); return result; } async sign(privateKey, data) { const algo = { name: 'RSASSA-PKCS1-v1_5', hash: { name: 'SHA-256' }, }; const dataArray = new TextEncoder().encode(data); const cryptoKey = await window.crypto.subtle.importKey('jwk', privateKey, algo, true, ['sign']); // SubtleCrypto's sign method is async so we must make // this method async as well. const result = await window.crypto.subtle.sign(algo, cryptoKey, dataArray); return base64js.fromByteArray(new Uint8Array(result)); } decodeBase64StringUtf8(base64) { const uint8array = base64js.toByteArray(BrowserCrypto.padBase64(base64)); const result = new TextDecoder().decode(uint8array); return result; } encodeBase64StringUtf8(text) { const uint8array = new TextEncoder().encode(text); const result = base64js.fromByteArray(uint8array); return result; } /** * Computes the SHA-256 hash of the provided string. * @param str The plain text string to hash. * @return A promise that resolves with the SHA-256 hash of the provided * string in hexadecimal encoding. */ async sha256DigestHex(str) { // SubtleCrypto digest() method is async, so we must make // this method async as well. // To calculate SHA256 digest using SubtleCrypto, we first // need to convert an input string to an ArrayBuffer: const inputBuffer = new TextEncoder().encode(str); // Result is ArrayBuffer as well. const outputBuffer = await window.crypto.subtle.digest('SHA-256', inputBuffer); return (0, crypto_1.fromArrayBufferToHex)(outputBuffer); } /** * Computes the HMAC hash of a message using the provided crypto key and the * SHA-256 algorithm. * @param key The secret crypto key in utf-8 or ArrayBuffer format. * @param msg The plain text message. * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer * format. */ async signWithHmacSha256(key, msg) { // Convert key, if provided in ArrayBuffer format, to string. const rawKey = typeof key === 'string' ? key : String.fromCharCode(...new Uint16Array(key)); const enc = new TextEncoder(); const cryptoKey = await window.crypto.subtle.importKey('raw', enc.encode(rawKey), { name: 'HMAC', hash: { name: 'SHA-256', }, }, false, ['sign']); return window.crypto.subtle.sign('HMAC', cryptoKey, enc.encode(msg)); } } exports.BrowserCrypto = BrowserCrypto; /***/ }), /***/ 88851: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* global window */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createCrypto = createCrypto; exports.hasBrowserCrypto = hasBrowserCrypto; exports.fromArrayBufferToHex = fromArrayBufferToHex; const crypto_1 = __nccwpck_require__(93438); const crypto_2 = __nccwpck_require__(27388); function createCrypto() { if (hasBrowserCrypto()) { return new crypto_1.BrowserCrypto(); } return new crypto_2.NodeCrypto(); } function hasBrowserCrypto() { return (typeof window !== 'undefined' && typeof window.crypto !== 'undefined' && typeof window.crypto.subtle !== 'undefined'); } /** * Converts an ArrayBuffer to a hexadecimal string. * @param arrayBuffer The ArrayBuffer to convert to hexadecimal string. * @return The hexadecimal encoding of the ArrayBuffer. */ function fromArrayBufferToHex(arrayBuffer) { // Convert buffer to byte array. const byteArray = Array.from(new Uint8Array(arrayBuffer)); // Convert bytes to hex string. return byteArray .map(byte => { return byte.toString(16).padStart(2, '0'); }) .join(''); } /***/ }), /***/ 27388: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.NodeCrypto = void 0; const crypto = __nccwpck_require__(76982); class NodeCrypto { async sha256DigestBase64(str) { return crypto.createHash('sha256').update(str).digest('base64'); } randomBytesBase64(count) { return crypto.randomBytes(count).toString('base64'); } async verify(pubkey, data, signature) { const verifier = crypto.createVerify('RSA-SHA256'); verifier.update(data); verifier.end(); return verifier.verify(pubkey, signature, 'base64'); } async sign(privateKey, data) { const signer = crypto.createSign('RSA-SHA256'); signer.update(data); signer.end(); return signer.sign(privateKey, 'base64'); } decodeBase64StringUtf8(base64) { return Buffer.from(base64, 'base64').toString('utf-8'); } encodeBase64StringUtf8(text) { return Buffer.from(text, 'utf-8').toString('base64'); } /** * Computes the SHA-256 hash of the provided string. * @param str The plain text string to hash. * @return A promise that resolves with the SHA-256 hash of the provided * string in hexadecimal encoding. */ async sha256DigestHex(str) { return crypto.createHash('sha256').update(str).digest('hex'); } /** * Computes the HMAC hash of a message using the provided crypto key and the * SHA-256 algorithm. * @param key The secret crypto key in utf-8 or ArrayBuffer format. * @param msg The plain text message. * @return A promise that resolves with the HMAC-SHA256 hash in ArrayBuffer * format. */ async signWithHmacSha256(key, msg) { const cryptoKey = typeof key === 'string' ? key : toBuffer(key); return toArrayBuffer(crypto.createHmac('sha256', cryptoKey).update(msg).digest()); } } exports.NodeCrypto = NodeCrypto; /** * Converts a Node.js Buffer to an ArrayBuffer. * https://stackoverflow.com/questions/8609289/convert-a-binary-nodejs-buffer-to-javascript-arraybuffer * @param buffer The Buffer input to covert. * @return The ArrayBuffer representation of the input. */ function toArrayBuffer(buffer) { return buffer.buffer.slice(buffer.byteOffset, buffer.byteOffset + buffer.byteLength); } /** * Converts an ArrayBuffer to a Node.js Buffer. * @param arrayBuffer The ArrayBuffer input to covert. * @return The Buffer representation of the input. */ function toBuffer(arrayBuffer) { return Buffer.from(arrayBuffer); } /***/ }), /***/ 492: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GoogleAuth = exports.auth = exports.DefaultTransporter = exports.PassThroughClient = exports.ExecutableError = exports.PluggableAuthClient = exports.DownscopedClient = exports.BaseExternalAccountClient = exports.ExternalAccountClient = exports.IdentityPoolClient = exports.AwsRequestSigner = exports.AwsClient = exports.UserRefreshClient = exports.LoginTicket = exports.ClientAuthentication = exports.OAuth2Client = exports.CodeChallengeMethod = exports.Impersonated = exports.JWT = exports.JWTAccess = exports.IdTokenClient = exports.IAMAuth = exports.GCPEnv = exports.Compute = exports.DEFAULT_UNIVERSE = exports.AuthClient = exports.gaxios = exports.gcpMetadata = void 0; // Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. const googleauth_1 = __nccwpck_require__(95934); Object.defineProperty(exports, "GoogleAuth", ({ enumerable: true, get: function () { return googleauth_1.GoogleAuth; } })); // Export common deps to ensure types/instances are the exact match. Useful // for consistently configuring the library across versions. exports.gcpMetadata = __nccwpck_require__(23046); exports.gaxios = __nccwpck_require__(97003); var authclient_1 = __nccwpck_require__(34810); Object.defineProperty(exports, "AuthClient", ({ enumerable: true, get: function () { return authclient_1.AuthClient; } })); Object.defineProperty(exports, "DEFAULT_UNIVERSE", ({ enumerable: true, get: function () { return authclient_1.DEFAULT_UNIVERSE; } })); var computeclient_1 = __nccwpck_require__(20977); Object.defineProperty(exports, "Compute", ({ enumerable: true, get: function () { return computeclient_1.Compute; } })); var envDetect_1 = __nccwpck_require__(60963); Object.defineProperty(exports, "GCPEnv", ({ enumerable: true, get: function () { return envDetect_1.GCPEnv; } })); var iam_1 = __nccwpck_require__(89390); Object.defineProperty(exports, "IAMAuth", ({ enumerable: true, get: function () { return iam_1.IAMAuth; } })); var idtokenclient_1 = __nccwpck_require__(12718); Object.defineProperty(exports, "IdTokenClient", ({ enumerable: true, get: function () { return idtokenclient_1.IdTokenClient; } })); var jwtaccess_1 = __nccwpck_require__(27060); Object.defineProperty(exports, "JWTAccess", ({ enumerable: true, get: function () { return jwtaccess_1.JWTAccess; } })); var jwtclient_1 = __nccwpck_require__(75277); Object.defineProperty(exports, "JWT", ({ enumerable: true, get: function () { return jwtclient_1.JWT; } })); var impersonated_1 = __nccwpck_require__(39964); Object.defineProperty(exports, "Impersonated", ({ enumerable: true, get: function () { return impersonated_1.Impersonated; } })); var oauth2client_1 = __nccwpck_require__(10091); Object.defineProperty(exports, "CodeChallengeMethod", ({ enumerable: true, get: function () { return oauth2client_1.CodeChallengeMethod; } })); Object.defineProperty(exports, "OAuth2Client", ({ enumerable: true, get: function () { return oauth2client_1.OAuth2Client; } })); Object.defineProperty(exports, "ClientAuthentication", ({ enumerable: true, get: function () { return oauth2client_1.ClientAuthentication; } })); var loginticket_1 = __nccwpck_require__(53882); Object.defineProperty(exports, "LoginTicket", ({ enumerable: true, get: function () { return loginticket_1.LoginTicket; } })); var refreshclient_1 = __nccwpck_require__(99807); Object.defineProperty(exports, "UserRefreshClient", ({ enumerable: true, get: function () { return refreshclient_1.UserRefreshClient; } })); var awsclient_1 = __nccwpck_require__(81261); Object.defineProperty(exports, "AwsClient", ({ enumerable: true, get: function () { return awsclient_1.AwsClient; } })); var awsrequestsigner_1 = __nccwpck_require__(27647); Object.defineProperty(exports, "AwsRequestSigner", ({ enumerable: true, get: function () { return awsrequestsigner_1.AwsRequestSigner; } })); var identitypoolclient_1 = __nccwpck_require__(29960); Object.defineProperty(exports, "IdentityPoolClient", ({ enumerable: true, get: function () { return identitypoolclient_1.IdentityPoolClient; } })); var externalclient_1 = __nccwpck_require__(88323); Object.defineProperty(exports, "ExternalAccountClient", ({ enumerable: true, get: function () { return externalclient_1.ExternalAccountClient; } })); var baseexternalclient_1 = __nccwpck_require__(142); Object.defineProperty(exports, "BaseExternalAccountClient", ({ enumerable: true, get: function () { return baseexternalclient_1.BaseExternalAccountClient; } })); var downscopedclient_1 = __nccwpck_require__(77556); Object.defineProperty(exports, "DownscopedClient", ({ enumerable: true, get: function () { return downscopedclient_1.DownscopedClient; } })); var pluggable_auth_client_1 = __nccwpck_require__(46077); Object.defineProperty(exports, "PluggableAuthClient", ({ enumerable: true, get: function () { return pluggable_auth_client_1.PluggableAuthClient; } })); Object.defineProperty(exports, "ExecutableError", ({ enumerable: true, get: function () { return pluggable_auth_client_1.ExecutableError; } })); var passthrough_1 = __nccwpck_require__(62045); Object.defineProperty(exports, "PassThroughClient", ({ enumerable: true, get: function () { return passthrough_1.PassThroughClient; } })); var transporters_1 = __nccwpck_require__(67633); Object.defineProperty(exports, "DefaultTransporter", ({ enumerable: true, get: function () { return transporters_1.DefaultTransporter; } })); const auth = new googleauth_1.GoogleAuth(); exports.auth = auth; /***/ }), /***/ 78290: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2017 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.validate = validate; // Accepts an options object passed from the user to the API. In the // previous version of the API, it referred to a `Request` options object. // Now it refers to an Axiox Request Config object. This is here to help // ensure users don't pass invalid options when they upgrade from 0.x to 1.x. // eslint-disable-next-line @typescript-eslint/no-explicit-any function validate(options) { const vpairs = [ { invalid: 'uri', expected: 'url' }, { invalid: 'json', expected: 'data' }, { invalid: 'qs', expected: 'params' }, ]; for (const pair of vpairs) { if (options[pair.invalid]) { const e = `'${pair.invalid}' is not a valid configuration option. Please use '${pair.expected}' instead. This library is using Axios for requests. Please see https://github.com/axios/axios to learn more about the valid request options.`; throw new Error(e); } } } /***/ }), /***/ 67633: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DefaultTransporter = void 0; const gaxios_1 = __nccwpck_require__(97003); const options_1 = __nccwpck_require__(78290); // eslint-disable-next-line @typescript-eslint/no-var-requires const pkg = __nccwpck_require__(96066); const PRODUCT_NAME = 'google-api-nodejs-client'; class DefaultTransporter { constructor() { /** * A configurable, replacable `Gaxios` instance. */ this.instance = new gaxios_1.Gaxios(); } /** * Configures request options before making a request. * @param opts GaxiosOptions options. * @return Configured options. */ configure(opts = {}) { opts.headers = opts.headers || {}; if (typeof window === 'undefined') { // set transporter user agent if not in browser const uaValue = opts.headers['User-Agent']; if (!uaValue) { opts.headers['User-Agent'] = DefaultTransporter.USER_AGENT; } else if (!uaValue.includes(`${PRODUCT_NAME}/`)) { opts.headers['User-Agent'] = `${uaValue} ${DefaultTransporter.USER_AGENT}`; } // track google-auth-library-nodejs version: if (!opts.headers['x-goog-api-client']) { const nodeVersion = process.version.replace(/^v/, ''); opts.headers['x-goog-api-client'] = `gl-node/${nodeVersion}`; } } return opts; } /** * Makes a request using Gaxios with given options. * @param opts GaxiosOptions options. * @param callback optional callback that contains GaxiosResponse object. * @return GaxiosPromise, assuming no callback is passed. */ request(opts) { // ensure the user isn't passing in request-style options opts = this.configure(opts); (0, options_1.validate)(opts); return this.instance.request(opts).catch(e => { throw this.processError(e); }); } get defaults() { return this.instance.defaults; } set defaults(opts) { this.instance.defaults = opts; } /** * Changes the error to include details from the body. */ processError(e) { const res = e.response; const err = e; const body = res ? res.data : null; if (res && body && body.error && res.status !== 200) { if (typeof body.error === 'string') { err.message = body.error; err.status = res.status; } else if (Array.isArray(body.error.errors)) { err.message = body.error.errors .map((err2) => err2.message) .join('\n'); err.code = body.error.code; err.errors = body.error.errors; } else { err.message = body.error.message; err.code = body.error.code; } } else if (res && res.status >= 400) { // Consider all 4xx and 5xx responses errors. err.message = body; err.status = res.status; } return err; } } exports.DefaultTransporter = DefaultTransporter; /** * Default user agent. */ DefaultTransporter.USER_AGENT = `${PRODUCT_NAME}/${pkg.version}`; /***/ }), /***/ 37870: /***/ (function(__unused_webpack_module, exports) { "use strict"; // Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _LRUCache_instances, _LRUCache_cache, _LRUCache_moveToEnd, _LRUCache_evict; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LRUCache = void 0; exports.snakeToCamel = snakeToCamel; exports.originalOrCamelOptions = originalOrCamelOptions; /** * Returns the camel case of a provided string. * * @remarks * * Match any `_` and not `_` pair, then return the uppercase of the not `_` * character. * * @internal * * @param str the string to convert * @returns the camelCase'd string */ function snakeToCamel(str) { return str.replace(/([_][^_])/g, match => match.slice(1).toUpperCase()); } /** * Get the value of `obj[key]` or `obj[camelCaseKey]`, with a preference * for original, non-camelCase key. * * @param obj object to lookup a value in * @returns a `get` function for getting `obj[key || snakeKey]`, if available */ function originalOrCamelOptions(obj) { /** * * @param key an index of object, preferably snake_case * @returns the value `obj[key || snakeKey]`, if available */ function get(key) { var _a; const o = (obj || {}); return (_a = o[key]) !== null && _a !== void 0 ? _a : o[snakeToCamel(key)]; } return { get }; } /** * A simple LRU cache utility. * Not meant for external usage. * * @experimental * @internal */ class LRUCache { constructor(options) { _LRUCache_instances.add(this); /** * Maps are in order. Thus, the older item is the first item. * * {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map} */ _LRUCache_cache.set(this, new Map()); this.capacity = options.capacity; this.maxAge = options.maxAge; } /** * Add an item to the cache. * * @param key the key to upsert * @param value the value of the key */ set(key, value) { __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, value); __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); } /** * Get an item from the cache. * * @param key the key to retrieve */ get(key) { const item = __classPrivateFieldGet(this, _LRUCache_cache, "f").get(key); if (!item) return; __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_moveToEnd).call(this, key, item.value); __classPrivateFieldGet(this, _LRUCache_instances, "m", _LRUCache_evict).call(this); return item.value; } } exports.LRUCache = LRUCache; _LRUCache_cache = new WeakMap(), _LRUCache_instances = new WeakSet(), _LRUCache_moveToEnd = function _LRUCache_moveToEnd(key, value) { __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(key); __classPrivateFieldGet(this, _LRUCache_cache, "f").set(key, { value, lastAccessed: Date.now(), }); }, _LRUCache_evict = function _LRUCache_evict() { const cutoffDate = this.maxAge ? Date.now() - this.maxAge : 0; /** * Because we know Maps are in order, this item is both the * last item in the list (capacity) and oldest (maxAge). */ let oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); while (!oldestItem.done && (__classPrivateFieldGet(this, _LRUCache_cache, "f").size > this.capacity || // too many oldestItem.value[1].lastAccessed < cutoffDate) // too old ) { __classPrivateFieldGet(this, _LRUCache_cache, "f").delete(oldestItem.value[0]); oldestItem = __classPrivateFieldGet(this, _LRUCache_cache, "f").entries().next(); } }; /***/ }), /***/ 71628: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Colours = void 0; /** * Handles figuring out if we can use ANSI colours and handing out the escape codes. * * This is for package-internal use only, and may change at any time. * * @private * @internal */ class Colours { /** * @param stream The stream (e.g. process.stderr) * @returns true if the stream should have colourization enabled */ static isEnabled(stream) { return (stream.isTTY && (typeof stream.getColorDepth === 'function' ? stream.getColorDepth() > 2 : true)); } static refresh() { Colours.enabled = Colours.isEnabled(process.stderr); if (!this.enabled) { Colours.reset = ''; Colours.bright = ''; Colours.dim = ''; Colours.red = ''; Colours.green = ''; Colours.yellow = ''; Colours.blue = ''; Colours.magenta = ''; Colours.cyan = ''; Colours.white = ''; Colours.grey = ''; } else { Colours.reset = '\u001b[0m'; Colours.bright = '\u001b[1m'; Colours.dim = '\u001b[2m'; Colours.red = '\u001b[31m'; Colours.green = '\u001b[32m'; Colours.yellow = '\u001b[33m'; Colours.blue = '\u001b[34m'; Colours.magenta = '\u001b[35m'; Colours.cyan = '\u001b[36m'; Colours.white = '\u001b[37m'; Colours.grey = '\u001b[90m'; } } } exports.Colours = Colours; Colours.enabled = false; Colours.reset = ''; Colours.bright = ''; Colours.dim = ''; Colours.red = ''; Colours.green = ''; Colours.yellow = ''; Colours.blue = ''; Colours.magenta = ''; Colours.cyan = ''; Colours.white = ''; Colours.grey = ''; Colours.refresh(); //# sourceMappingURL=colours.js.map /***/ }), /***/ 81577: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); __exportStar(__nccwpck_require__(74788), exports); //# sourceMappingURL=index.js.map /***/ }), /***/ 74788: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2021-2024 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.env = exports.DebugLogBackendBase = exports.placeholder = exports.AdhocDebugLogger = exports.LogSeverity = void 0; exports.getNodeBackend = getNodeBackend; exports.getDebugBackend = getDebugBackend; exports.getStructuredBackend = getStructuredBackend; exports.setBackend = setBackend; exports.log = log; const node_events_1 = __nccwpck_require__(78474); const process = __importStar(__nccwpck_require__(1708)); const util = __importStar(__nccwpck_require__(57975)); const colours_1 = __nccwpck_require__(71628); // Some functions (as noted) are based on the Node standard library, from // the following file: // // https://github.com/nodejs/node/blob/main/lib/internal/util/debuglog.js /** * This module defines an ad-hoc debug logger for Google Cloud Platform * client libraries in Node. An ad-hoc debug logger is a tool which lets * users use an external, unified interface (in this case, environment * variables) to determine what logging they want to see at runtime. This * isn't necessarily fed into the console, but is meant to be under the * control of the user. The kind of logging that will be produced by this * is more like "call retry happened", not "event you'd want to record * in Cloud Logger". * * More for Googlers implementing libraries with it: * go/cloud-client-logging-design */ /** * Possible log levels. These are a subset of Cloud Observability levels. * https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry#LogSeverity */ var LogSeverity; (function (LogSeverity) { LogSeverity["DEFAULT"] = "DEFAULT"; LogSeverity["DEBUG"] = "DEBUG"; LogSeverity["INFO"] = "INFO"; LogSeverity["WARNING"] = "WARNING"; LogSeverity["ERROR"] = "ERROR"; })(LogSeverity || (exports.LogSeverity = LogSeverity = {})); /** * Our logger instance. This actually contains the meat of dealing * with log lines, including EventEmitter. This contains the function * that will be passed back to users of the package. */ class AdhocDebugLogger extends node_events_1.EventEmitter { /** * @param upstream The backend will pass a function that will be * called whenever our logger function is invoked. */ constructor(namespace, upstream) { super(); this.namespace = namespace; this.upstream = upstream; this.func = Object.assign(this.invoke.bind(this), { // Also add an instance pointer back to us. instance: this, // And pull over the EventEmitter functionality. on: (event, listener) => this.on(event, listener), }); // Convenience methods for log levels. this.func.debug = (...args) => this.invokeSeverity(LogSeverity.DEBUG, ...args); this.func.info = (...args) => this.invokeSeverity(LogSeverity.INFO, ...args); this.func.warn = (...args) => this.invokeSeverity(LogSeverity.WARNING, ...args); this.func.error = (...args) => this.invokeSeverity(LogSeverity.ERROR, ...args); this.func.sublog = (namespace) => log(namespace, this.func); } invoke(fields, ...args) { // Push out any upstream logger first. if (this.upstream) { this.upstream(fields, ...args); } // Emit sink events. this.emit('log', fields, args); } invokeSeverity(severity, ...args) { this.invoke({ severity }, ...args); } } exports.AdhocDebugLogger = AdhocDebugLogger; /** * This can be used in place of a real logger while waiting for Promises or disabling logging. */ exports.placeholder = new AdhocDebugLogger('', () => { }).func; /** * The base class for debug logging backends. It's possible to use this, but the * same non-guarantees above still apply (unstable interface, etc). * * @private * @internal */ class DebugLogBackendBase { constructor() { var _a; this.cached = new Map(); this.filters = []; this.filtersSet = false; // Look for the Node config variable for what systems to enable. We'll store // these for the log method below, which will call setFilters() once. let nodeFlag = (_a = process.env[exports.env.nodeEnables]) !== null && _a !== void 0 ? _a : '*'; if (nodeFlag === 'all') { nodeFlag = '*'; } this.filters = nodeFlag.split(','); } log(namespace, fields, ...args) { try { if (!this.filtersSet) { this.setFilters(); this.filtersSet = true; } let logger = this.cached.get(namespace); if (!logger) { logger = this.makeLogger(namespace); this.cached.set(namespace, logger); } logger(fields, ...args); } catch (e) { // Silently ignore all errors; we don't want them to interfere with // the user's running app. // e; console.error(e); } } } exports.DebugLogBackendBase = DebugLogBackendBase; // The basic backend. This one definitely works, but it's less feature-filled. // // Rather than using util.debuglog, this implements the same basic logic directly. // The reason for this decision is that debuglog checks the value of the // NODE_DEBUG environment variable before any user code runs; we therefore // can't pipe our own enables into it (and util.debuglog will never print unless // the user duplicates it into NODE_DEBUG, which isn't reasonable). // class NodeBackend extends DebugLogBackendBase { constructor() { super(...arguments); // Default to allowing all systems, since we gate earlier based on whether the // variable is empty. this.enabledRegexp = /.*/g; } isEnabled(namespace) { return this.enabledRegexp.test(namespace); } makeLogger(namespace) { if (!this.enabledRegexp.test(namespace)) { return () => { }; } return (fields, ...args) => { var _a; // TODO: `fields` needs to be turned into a string here, one way or another. const nscolour = `${colours_1.Colours.green}${namespace}${colours_1.Colours.reset}`; const pid = `${colours_1.Colours.yellow}${process.pid}${colours_1.Colours.reset}`; let level; switch (fields.severity) { case LogSeverity.ERROR: level = `${colours_1.Colours.red}${fields.severity}${colours_1.Colours.reset}`; break; case LogSeverity.INFO: level = `${colours_1.Colours.magenta}${fields.severity}${colours_1.Colours.reset}`; break; case LogSeverity.WARNING: level = `${colours_1.Colours.yellow}${fields.severity}${colours_1.Colours.reset}`; break; default: level = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.DEFAULT; break; } const msg = util.formatWithOptions({ colors: colours_1.Colours.enabled }, ...args); const filteredFields = Object.assign({}, fields); delete filteredFields.severity; const fieldsJson = Object.getOwnPropertyNames(filteredFields).length ? JSON.stringify(filteredFields) : ''; const fieldsColour = fieldsJson ? `${colours_1.Colours.grey}${fieldsJson}${colours_1.Colours.reset}` : ''; console.error('%s [%s|%s] %s%s', pid, nscolour, level, msg, fieldsJson ? ` ${fieldsColour}` : ''); }; } // Regexp patterns below are from here: // https://github.com/nodejs/node/blob/c0aebed4b3395bd65d54b18d1fd00f071002ac20/lib/internal/util/debuglog.js#L36 setFilters() { const totalFilters = this.filters.join(','); const regexp = totalFilters .replace(/[|\\{}()[\]^$+?.]/g, '\\$&') .replace(/\*/g, '.*') .replace(/,/g, '$|^'); this.enabledRegexp = new RegExp(`^${regexp}$`, 'i'); } } /** * @returns A backend based on Node util.debuglog; this is the default. */ function getNodeBackend() { return new NodeBackend(); } class DebugBackend extends DebugLogBackendBase { constructor(pkg) { super(); this.debugPkg = pkg; } makeLogger(namespace) { const debugLogger = this.debugPkg(namespace); return (fields, ...args) => { // TODO: `fields` needs to be turned into a string here. debugLogger(args[0], ...args.slice(1)); }; } setFilters() { var _a; const existingFilters = (_a = process.env['NODE_DEBUG']) !== null && _a !== void 0 ? _a : ''; process.env['NODE_DEBUG'] = `${existingFilters}${existingFilters ? ',' : ''}${this.filters.join(',')}`; } } /** * Creates a "debug" package backend. The user must call require('debug') and pass * the resulting object to this function. * * ``` * setBackend(getDebugBackend(require('debug'))) * ``` * * https://www.npmjs.com/package/debug * * Note: Google does not explicitly endorse or recommend this package; it's just * being provided as an option. * * @returns A backend based on the npm "debug" package. */ function getDebugBackend(debugPkg) { return new DebugBackend(debugPkg); } /** * This pretty much works like the Node logger, but it outputs structured * logging JSON matching Google Cloud's ingestion specs. Rather than handling * its own output, it wraps another backend. The passed backend must be a subclass * of `DebugLogBackendBase` (any of the backends exposed by this package will work). */ class StructuredBackend extends DebugLogBackendBase { constructor(upstream) { var _a; super(); this.upstream = (_a = upstream) !== null && _a !== void 0 ? _a : new NodeBackend(); } makeLogger(namespace) { const debugLogger = this.upstream.makeLogger(namespace); return (fields, ...args) => { var _a; const severity = (_a = fields.severity) !== null && _a !== void 0 ? _a : LogSeverity.INFO; const json = Object.assign({ severity, message: util.format(...args), }, fields); const jsonString = JSON.stringify(json); debugLogger(fields, jsonString); }; } setFilters() { this.upstream.setFilters(); } } /** * Creates a "structured logging" backend. This pretty much works like the * Node logger, but it outputs structured logging JSON matching Google * Cloud's ingestion specs instead of plain text. * * ``` * setBackend(getStructuredBackend()) * ``` * * @param upstream If you want to use something besides the Node backend to * write the actual log lines into, pass that here. * @returns A backend based on Google Cloud structured logging. */ function getStructuredBackend(upstream) { return new StructuredBackend(upstream); } /** * The environment variables that we standardized on, for all ad-hoc logging. */ exports.env = { /** * Filter wildcards specific to the Node syntax, and similar to the built-in * utils.debuglog() environment variable. If missing, disables logging. */ nodeEnables: 'GOOGLE_SDK_NODE_LOGGING', }; // Keep a copy of all namespaced loggers so users can reliably .on() them. // Note that these cached functions will need to deal with changes in the backend. const loggerCache = new Map(); // Our current global backend. This might be: let cachedBackend = undefined; /** * Set the backend to use for our log output. * - A backend object * - null to disable logging * - undefined for "nothing yet", defaults to the Node backend * * @param backend Results from one of the get*Backend() functions. */ function setBackend(backend) { cachedBackend = backend; loggerCache.clear(); } /** * Creates a logging function. Multiple calls to this with the same namespace * will produce the same logger, with the same event emitter hooks. * * Namespaces can be a simple string ("system" name), or a qualified string * (system:subsystem), which can be used for filtering, or for "system:*". * * @param namespace The namespace, a descriptive text string. * @returns A function you can call that works similar to console.log(). */ function log(namespace, parent) { // If the enable flag isn't set, do nothing. const enablesFlag = process.env[exports.env.nodeEnables]; if (!enablesFlag) { return exports.placeholder; } // This might happen mostly if the typings are dropped in a user's code, // or if they're calling from JavaScript. if (!namespace) { return exports.placeholder; } // Handle sub-loggers. if (parent) { namespace = `${parent.instance.namespace}:${namespace}`; } // Reuse loggers so things like event sinks are persistent. const existing = loggerCache.get(namespace); if (existing) { return existing.func; } // Do we have a backend yet? if (cachedBackend === null) { // Explicitly disabled. return exports.placeholder; } else if (cachedBackend === undefined) { // One hasn't been made yet, so default to Node. cachedBackend = getNodeBackend(); } // The logger is further wrapped so we can handle the backend changing out. const logger = (() => { let previousBackend = undefined; const newLogger = new AdhocDebugLogger(namespace, (fields, ...args) => { if (previousBackend !== cachedBackend) { // Did the user pass a custom backend? if (cachedBackend === null) { // Explicitly disabled. return; } else if (cachedBackend === undefined) { // One hasn't been made yet, so default to Node. cachedBackend = getNodeBackend(); } previousBackend = cachedBackend; } cachedBackend === null || cachedBackend === void 0 ? void 0 : cachedBackend.log(namespace, fields, ...args); }); return newLogger; })(); loggerCache.set(namespace, logger); return logger.func; } //# sourceMappingURL=logging-utils.js.map /***/ }), /***/ 1174: /***/ ((module) => { "use strict"; /** @type {import('./gOPD')} */ module.exports = Object.getOwnPropertyDescriptor; /***/ }), /***/ 33170: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /** @type {import('.')} */ var $gOPD = __nccwpck_require__(1174); if ($gOPD) { try { $gOPD([], 'length'); } catch (e) { // IE 8 has a broken gOPD $gOPD = null; } } module.exports = $gOPD; /***/ }), /***/ 28568: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /** * Copyright 2018 Google LLC * * Distributed under MIT license. * See file LICENSE for detail or copy at https://opensource.org/licenses/MIT */ var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var _GoogleToken_instances, _GoogleToken_inFlightRequest, _GoogleToken_getTokenAsync, _GoogleToken_getTokenAsyncInner, _GoogleToken_ensureEmail, _GoogleToken_revokeTokenAsync, _GoogleToken_configure, _GoogleToken_requestToken; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GoogleToken = void 0; const fs = __nccwpck_require__(79896); const gaxios_1 = __nccwpck_require__(97003); const jws = __nccwpck_require__(33324); const path = __nccwpck_require__(16928); const util_1 = __nccwpck_require__(39023); const readFile = fs.readFile ? (0, util_1.promisify)(fs.readFile) : async () => { // if running in the web-browser, fs.readFile may not have been shimmed. throw new ErrorWithCode('use key rather than keyFile.', 'MISSING_CREDENTIALS'); }; const GOOGLE_TOKEN_URL = 'https://www.googleapis.com/oauth2/v4/token'; const GOOGLE_REVOKE_TOKEN_URL = 'https://accounts.google.com/o/oauth2/revoke?token='; class ErrorWithCode extends Error { constructor(message, code) { super(message); this.code = code; } } class GoogleToken { get accessToken() { return this.rawToken ? this.rawToken.access_token : undefined; } get idToken() { return this.rawToken ? this.rawToken.id_token : undefined; } get tokenType() { return this.rawToken ? this.rawToken.token_type : undefined; } get refreshToken() { return this.rawToken ? this.rawToken.refresh_token : undefined; } /** * Create a GoogleToken. * * @param options Configuration object. */ constructor(options) { _GoogleToken_instances.add(this); this.transporter = { request: opts => (0, gaxios_1.request)(opts), }; _GoogleToken_inFlightRequest.set(this, void 0); __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, options); } /** * Returns whether the token has expired. * * @return true if the token has expired, false otherwise. */ hasExpired() { const now = new Date().getTime(); if (this.rawToken && this.expiresAt) { return now >= this.expiresAt; } else { return true; } } /** * Returns whether the token will expire within eagerRefreshThresholdMillis * * @return true if the token will be expired within eagerRefreshThresholdMillis, false otherwise. */ isTokenExpiring() { var _a; const now = new Date().getTime(); const eagerRefreshThresholdMillis = (_a = this.eagerRefreshThresholdMillis) !== null && _a !== void 0 ? _a : 0; if (this.rawToken && this.expiresAt) { return this.expiresAt <= now + eagerRefreshThresholdMillis; } else { return true; } } getToken(callback, opts = {}) { if (typeof callback === 'object') { opts = callback; callback = undefined; } opts = Object.assign({ forceRefresh: false, }, opts); if (callback) { const cb = callback; __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts).then(t => cb(null, t), callback); return; } return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsync).call(this, opts); } /** * Given a keyFile, extract the key and client email if available * @param keyFile Path to a json, pem, or p12 file that contains the key. * @returns an object with privateKey and clientEmail properties */ async getCredentials(keyFile) { const ext = path.extname(keyFile); switch (ext) { case '.json': { const key = await readFile(keyFile, 'utf8'); const body = JSON.parse(key); const privateKey = body.private_key; const clientEmail = body.client_email; if (!privateKey || !clientEmail) { throw new ErrorWithCode('private_key and client_email are required.', 'MISSING_CREDENTIALS'); } return { privateKey, clientEmail }; } case '.der': case '.crt': case '.pem': { const privateKey = await readFile(keyFile, 'utf8'); return { privateKey }; } case '.p12': case '.pfx': { throw new ErrorWithCode('*.p12 certificates are not supported after v6.1.2. ' + 'Consider utilizing *.json format or converting *.p12 to *.pem using the OpenSSL CLI.', 'UNKNOWN_CERTIFICATE_TYPE'); } default: throw new ErrorWithCode('Unknown certificate type. Type is determined based on file extension. ' + 'Current supported extensions are *.json, and *.pem.', 'UNKNOWN_CERTIFICATE_TYPE'); } } revokeToken(callback) { if (callback) { __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this).then(() => callback(), callback); return; } return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_revokeTokenAsync).call(this); } } exports.GoogleToken = GoogleToken; _GoogleToken_inFlightRequest = new WeakMap(), _GoogleToken_instances = new WeakSet(), _GoogleToken_getTokenAsync = async function _GoogleToken_getTokenAsync(opts) { if (__classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f") && !opts.forceRefresh) { return __classPrivateFieldGet(this, _GoogleToken_inFlightRequest, "f"); } try { return await (__classPrivateFieldSet(this, _GoogleToken_inFlightRequest, __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_getTokenAsyncInner).call(this, opts), "f")); } finally { __classPrivateFieldSet(this, _GoogleToken_inFlightRequest, undefined, "f"); } }, _GoogleToken_getTokenAsyncInner = async function _GoogleToken_getTokenAsyncInner(opts) { if (this.isTokenExpiring() === false && opts.forceRefresh === false) { return Promise.resolve(this.rawToken); } if (!this.key && !this.keyFile) { throw new Error('No key or keyFile set.'); } if (!this.key && this.keyFile) { const creds = await this.getCredentials(this.keyFile); this.key = creds.privateKey; this.iss = creds.clientEmail || this.iss; if (!creds.clientEmail) { __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_ensureEmail).call(this); } } return __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_requestToken).call(this); }, _GoogleToken_ensureEmail = function _GoogleToken_ensureEmail() { if (!this.iss) { throw new ErrorWithCode('email is required.', 'MISSING_CREDENTIALS'); } }, _GoogleToken_revokeTokenAsync = async function _GoogleToken_revokeTokenAsync() { if (!this.accessToken) { throw new Error('No token to revoke.'); } const url = GOOGLE_REVOKE_TOKEN_URL + this.accessToken; await this.transporter.request({ url, retry: true, }); __classPrivateFieldGet(this, _GoogleToken_instances, "m", _GoogleToken_configure).call(this, { email: this.iss, sub: this.sub, key: this.key, keyFile: this.keyFile, scope: this.scope, additionalClaims: this.additionalClaims, }); }, _GoogleToken_configure = function _GoogleToken_configure(options = {}) { this.keyFile = options.keyFile; this.key = options.key; this.rawToken = undefined; this.iss = options.email || options.iss; this.sub = options.sub; this.additionalClaims = options.additionalClaims; if (typeof options.scope === 'object') { this.scope = options.scope.join(' '); } else { this.scope = options.scope; } this.eagerRefreshThresholdMillis = options.eagerRefreshThresholdMillis; if (options.transporter) { this.transporter = options.transporter; } }, _GoogleToken_requestToken = /** * Request the token from Google. */ async function _GoogleToken_requestToken() { var _a, _b; const iat = Math.floor(new Date().getTime() / 1000); const additionalClaims = this.additionalClaims || {}; const payload = Object.assign({ iss: this.iss, scope: this.scope, aud: GOOGLE_TOKEN_URL, exp: iat + 3600, iat, sub: this.sub, }, additionalClaims); const signedJWT = jws.sign({ header: { alg: 'RS256' }, payload, secret: this.key, }); try { const r = await this.transporter.request({ method: 'POST', url: GOOGLE_TOKEN_URL, data: { grant_type: 'urn:ietf:params:oauth:grant-type:jwt-bearer', assertion: signedJWT, }, headers: { 'Content-Type': 'application/x-www-form-urlencoded' }, responseType: 'json', retryConfig: { httpMethodsToRetry: ['POST'], }, }); this.rawToken = r.data; this.expiresAt = r.data.expires_in === null || r.data.expires_in === undefined ? undefined : (iat + r.data.expires_in) * 1000; return this.rawToken; } catch (e) { this.rawToken = undefined; this.tokenExpires = undefined; const body = e.response && ((_a = e.response) === null || _a === void 0 ? void 0 : _a.data) ? (_b = e.response) === null || _b === void 0 ? void 0 : _b.data : {}; if (body.error) { const desc = body.error_description ? `: ${body.error_description}` : ''; e.message = `${body.error}${desc}`; } throw e; } }; //# sourceMappingURL=index.js.map /***/ }), /***/ 23336: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var origSymbol = typeof Symbol !== 'undefined' && Symbol; var hasSymbolSham = __nccwpck_require__(61114); /** @type {import('.')} */ module.exports = function hasNativeSymbols() { if (typeof origSymbol !== 'function') { return false; } if (typeof Symbol !== 'function') { return false; } if (typeof origSymbol('foo') !== 'symbol') { return false; } if (typeof Symbol('bar') !== 'symbol') { return false; } return hasSymbolSham(); }; /***/ }), /***/ 61114: /***/ ((module) => { "use strict"; /** @type {import('./shams')} */ /* eslint complexity: [2, 18], max-statements: [2, 33] */ module.exports = function hasSymbols() { if (typeof Symbol !== 'function' || typeof Object.getOwnPropertySymbols !== 'function') { return false; } if (typeof Symbol.iterator === 'symbol') { return true; } /** @type {{ [k in symbol]?: unknown }} */ var obj = {}; var sym = Symbol('test'); var symObj = Object(sym); if (typeof sym === 'string') { return false; } if (Object.prototype.toString.call(sym) !== '[object Symbol]') { return false; } if (Object.prototype.toString.call(symObj) !== '[object Symbol]') { return false; } // temp disabled per https://github.com/ljharb/object.assign/issues/17 // if (sym instanceof Symbol) { return false; } // temp disabled per https://github.com/WebReflection/get-own-property-symbols/issues/4 // if (!(symObj instanceof Symbol)) { return false; } // if (typeof Symbol.prototype.toString !== 'function') { return false; } // if (String(sym) !== Symbol.prototype.toString.call(sym)) { return false; } var symVal = 42; obj[sym] = symVal; for (var _ in obj) { return false; } // eslint-disable-line no-restricted-syntax, no-unreachable-loop if (typeof Object.keys === 'function' && Object.keys(obj).length !== 0) { return false; } if (typeof Object.getOwnPropertyNames === 'function' && Object.getOwnPropertyNames(obj).length !== 0) { return false; } var syms = Object.getOwnPropertySymbols(obj); if (syms.length !== 1 || syms[0] !== sym) { return false; } if (!Object.prototype.propertyIsEnumerable.call(obj, sym)) { return false; } if (typeof Object.getOwnPropertyDescriptor === 'function') { // eslint-disable-next-line no-extra-parens var descriptor = /** @type {PropertyDescriptor} */ (Object.getOwnPropertyDescriptor(obj, sym)); if (descriptor.value !== symVal || descriptor.enumerable !== true) { return false; } } return true; }; /***/ }), /***/ 85479: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var hasSymbols = __nccwpck_require__(61114); /** @type {import('.')} */ module.exports = function hasToStringTagShams() { return hasSymbols() && !!Symbol.toStringTag; }; /***/ }), /***/ 54076: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var call = Function.prototype.call; var $hasOwn = Object.prototype.hasOwnProperty; var bind = __nccwpck_require__(37564); /** @type {import('.')} */ module.exports = bind.call(call, $hasOwn); /***/ }), /***/ 81970: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpProxyAgent = void 0; const net = __importStar(__nccwpck_require__(69278)); const tls = __importStar(__nccwpck_require__(64756)); const debug_1 = __importDefault(__nccwpck_require__(2830)); const events_1 = __nccwpck_require__(24434); const agent_base_1 = __nccwpck_require__(98894); const url_1 = __nccwpck_require__(87016); const debug = (0, debug_1.default)('http-proxy-agent'); /** * The `HttpProxyAgent` implements an HTTP Agent subclass that connects * to the specified "HTTP proxy server" in order to proxy HTTP requests. */ class HttpProxyAgent extends agent_base_1.Agent { constructor(proxy, opts) { super(opts); this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; debug('Creating new HttpProxyAgent instance: %o', this.proxy.href); // Trim off the brackets from IPv6 addresses const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === 'https:' ? 443 : 80; this.connectOpts = { ...(opts ? omit(opts, 'headers') : null), host, port, }; } addRequest(req, opts) { req._header = null; this.setRequestProps(req, opts); // @ts-expect-error `addRequest()` isn't defined in `@types/node` super.addRequest(req, opts); } setRequestProps(req, opts) { const { proxy } = this; const protocol = opts.secureEndpoint ? 'https:' : 'http:'; const hostname = req.getHeader('host') || 'localhost'; const base = `${protocol}//${hostname}`; const url = new url_1.URL(req.path, base); if (opts.port !== 80) { url.port = String(opts.port); } // Change the `http.ClientRequest` instance's "path" field // to the absolute path of the URL that will be requested. req.path = String(url); // Inject the `Proxy-Authorization` header if necessary. const headers = typeof this.proxyHeaders === 'function' ? this.proxyHeaders() : { ...this.proxyHeaders }; if (proxy.username || proxy.password) { const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; } if (!headers['Proxy-Connection']) { headers['Proxy-Connection'] = this.keepAlive ? 'Keep-Alive' : 'close'; } for (const name of Object.keys(headers)) { const value = headers[name]; if (value) { req.setHeader(name, value); } } } async connect(req, opts) { req._header = null; if (!req.path.includes('://')) { this.setRequestProps(req, opts); } // At this point, the http ClientRequest's internal `_header` field // might have already been set. If this is the case then we'll need // to re-generate the string since we just changed the `req.path`. let first; let endOfHeaders; debug('Regenerating stored HTTP header string for request'); req._implicitHeader(); if (req.outputData && req.outputData.length > 0) { debug('Patching connection write() output buffer with updated header'); first = req.outputData[0].data; endOfHeaders = first.indexOf('\r\n\r\n') + 4; req.outputData[0].data = req._header + first.substring(endOfHeaders); debug('Output buffer: %o', req.outputData[0].data); } // Create a socket connection to the proxy server. let socket; if (this.proxy.protocol === 'https:') { debug('Creating `tls.Socket`: %o', this.connectOpts); socket = tls.connect(this.connectOpts); } else { debug('Creating `net.Socket`: %o', this.connectOpts); socket = net.connect(this.connectOpts); } // Wait for the socket's `connect` event, so that this `callback()` // function throws instead of the `http` request machinery. This is // important for i.e. `PacProxyAgent` which determines a failed proxy // connection via the `callback()` function throwing. await (0, events_1.once)(socket, 'connect'); return socket; } } HttpProxyAgent.protocols = ['http', 'https']; exports.HttpProxyAgent = HttpProxyAgent; function omit(obj, ...keys) { const ret = {}; let key; for (key in obj) { if (!keys.includes(key)) { ret[key] = obj[key]; } } return ret; } //# sourceMappingURL=index.js.map /***/ }), /***/ 3669: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpsProxyAgent = void 0; const net = __importStar(__nccwpck_require__(69278)); const tls = __importStar(__nccwpck_require__(64756)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const debug_1 = __importDefault(__nccwpck_require__(2830)); const agent_base_1 = __nccwpck_require__(98894); const url_1 = __nccwpck_require__(87016); const parse_proxy_response_1 = __nccwpck_require__(37943); const debug = (0, debug_1.default)('https-proxy-agent'); const setServernameFromNonIpHost = (options) => { if (options.servername === undefined && options.host && !net.isIP(options.host)) { return { ...options, servername: options.host, }; } return options; }; /** * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. * * Outgoing HTTP requests are first tunneled through the proxy server using the * `CONNECT` HTTP request method to establish a connection to the proxy server, * and then the proxy server connects to the destination target and issues the * HTTP request from the proxy server. * * `https:` requests have their socket connection upgraded to TLS once * the connection to the proxy server has been established. */ class HttpsProxyAgent extends agent_base_1.Agent { constructor(proxy, opts) { super(opts); this.options = { path: undefined }; this.proxy = typeof proxy === 'string' ? new url_1.URL(proxy) : proxy; this.proxyHeaders = opts?.headers ?? {}; debug('Creating new HttpsProxyAgent instance: %o', this.proxy.href); // Trim off the brackets from IPv6 addresses const host = (this.proxy.hostname || this.proxy.host).replace(/^\[|\]$/g, ''); const port = this.proxy.port ? parseInt(this.proxy.port, 10) : this.proxy.protocol === 'https:' ? 443 : 80; this.connectOpts = { // Attempt to negotiate http/1.1 for proxy servers that support http/2 ALPNProtocols: ['http/1.1'], ...(opts ? omit(opts, 'headers') : null), host, port, }; } /** * Called when the node-core HTTP client library is creating a * new HTTP request. */ async connect(req, opts) { const { proxy } = this; if (!opts.host) { throw new TypeError('No "host" provided'); } // Create a socket connection to the proxy server. let socket; if (proxy.protocol === 'https:') { debug('Creating `tls.Socket`: %o', this.connectOpts); socket = tls.connect(setServernameFromNonIpHost(this.connectOpts)); } else { debug('Creating `net.Socket`: %o', this.connectOpts); socket = net.connect(this.connectOpts); } const headers = typeof this.proxyHeaders === 'function' ? this.proxyHeaders() : { ...this.proxyHeaders }; const host = net.isIPv6(opts.host) ? `[${opts.host}]` : opts.host; let payload = `CONNECT ${host}:${opts.port} HTTP/1.1\r\n`; // Inject the `Proxy-Authorization` header if necessary. if (proxy.username || proxy.password) { const auth = `${decodeURIComponent(proxy.username)}:${decodeURIComponent(proxy.password)}`; headers['Proxy-Authorization'] = `Basic ${Buffer.from(auth).toString('base64')}`; } headers.Host = `${host}:${opts.port}`; if (!headers['Proxy-Connection']) { headers['Proxy-Connection'] = this.keepAlive ? 'Keep-Alive' : 'close'; } for (const name of Object.keys(headers)) { payload += `${name}: ${headers[name]}\r\n`; } const proxyResponsePromise = (0, parse_proxy_response_1.parseProxyResponse)(socket); socket.write(`${payload}\r\n`); const { connect, buffered } = await proxyResponsePromise; req.emit('proxyConnect', connect); this.emit('proxyConnect', connect, req); if (connect.statusCode === 200) { req.once('socket', resume); if (opts.secureEndpoint) { // The proxy is connecting to a TLS server, so upgrade // this socket connection to a TLS connection. debug('Upgrading socket connection to TLS'); return tls.connect({ ...omit(setServernameFromNonIpHost(opts), 'host', 'path', 'port'), socket, }); } return socket; } // Some other status code that's not 200... need to re-play the HTTP // header "data" events onto the socket once the HTTP machinery is // attached so that the node core `http` can parse and handle the // error status code. // Close the original socket, and a new "fake" socket is returned // instead, so that the proxy doesn't get the HTTP request // written to it (which may contain `Authorization` headers or other // sensitive data). // // See: https://hackerone.com/reports/541502 socket.destroy(); const fakeSocket = new net.Socket({ writable: false }); fakeSocket.readable = true; // Need to wait for the "socket" event to re-play the "data" events. req.once('socket', (s) => { debug('Replaying proxy buffer for failed request'); (0, assert_1.default)(s.listenerCount('data') > 0); // Replay the "buffered" Buffer onto the fake `socket`, since at // this point the HTTP module machinery has been hooked up for // the user. s.push(buffered); s.push(null); }); return fakeSocket; } } HttpsProxyAgent.protocols = ['http', 'https']; exports.HttpsProxyAgent = HttpsProxyAgent; function resume(socket) { socket.resume(); } function omit(obj, ...keys) { const ret = {}; let key; for (key in obj) { if (!keys.includes(key)) { ret[key] = obj[key]; } } return ret; } //# sourceMappingURL=index.js.map /***/ }), /***/ 37943: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.parseProxyResponse = void 0; const debug_1 = __importDefault(__nccwpck_require__(2830)); const debug = (0, debug_1.default)('https-proxy-agent:parse-proxy-response'); function parseProxyResponse(socket) { return new Promise((resolve, reject) => { // we need to buffer any HTTP traffic that happens with the proxy before we get // the CONNECT response, so that if the response is anything other than an "200" // response code, then we can re-play the "data" events on the socket once the // HTTP parser is hooked up... let buffersLength = 0; const buffers = []; function read() { const b = socket.read(); if (b) ondata(b); else socket.once('readable', read); } function cleanup() { socket.removeListener('end', onend); socket.removeListener('error', onerror); socket.removeListener('readable', read); } function onend() { cleanup(); debug('onend'); reject(new Error('Proxy connection ended before receiving CONNECT response')); } function onerror(err) { cleanup(); debug('onerror %o', err); reject(err); } function ondata(b) { buffers.push(b); buffersLength += b.length; const buffered = Buffer.concat(buffers, buffersLength); const endOfHeaders = buffered.indexOf('\r\n\r\n'); if (endOfHeaders === -1) { // keep buffering debug('have not received end of HTTP headers yet...'); read(); return; } const headerParts = buffered .slice(0, endOfHeaders) .toString('ascii') .split('\r\n'); const firstLine = headerParts.shift(); if (!firstLine) { socket.destroy(); return reject(new Error('No header received from proxy CONNECT response')); } const firstLineParts = firstLine.split(' '); const statusCode = +firstLineParts[1]; const statusText = firstLineParts.slice(2).join(' '); const headers = {}; for (const header of headerParts) { if (!header) continue; const firstColon = header.indexOf(':'); if (firstColon === -1) { socket.destroy(); return reject(new Error(`Invalid header from proxy CONNECT response: "${header}"`)); } const key = header.slice(0, firstColon).toLowerCase(); const value = header.slice(firstColon + 1).trimStart(); const current = headers[key]; if (typeof current === 'string') { headers[key] = [current, value]; } else if (Array.isArray(current)) { current.push(value); } else { headers[key] = value; } } debug('got proxy server response: %o %o', firstLine, headers); cleanup(); resolve({ connect: { statusCode, statusText, headers, }, buffered, }); } socket.on('error', onerror); socket.on('end', onend); read(); }); } exports.parseProxyResponse = parseProxyResponse; //# sourceMappingURL=parse-proxy-response.js.map /***/ }), /***/ 39598: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { try { var util = __nccwpck_require__(39023); /* istanbul ignore next */ if (typeof util.inherits !== 'function') throw ''; module.exports = util.inherits; } catch (e) { /* istanbul ignore next */ module.exports = __nccwpck_require__(26589); } /***/ }), /***/ 26589: /***/ ((module) => { if (typeof Object.create === 'function') { // implementation from standard node.js 'util' module module.exports = function inherits(ctor, superCtor) { if (superCtor) { ctor.super_ = superCtor ctor.prototype = Object.create(superCtor.prototype, { constructor: { value: ctor, enumerable: false, writable: true, configurable: true } }) } }; } else { // old school shim for old browsers module.exports = function inherits(ctor, superCtor) { if (superCtor) { ctor.super_ = superCtor var TempCtor = function () {} TempCtor.prototype = superCtor.prototype ctor.prototype = new TempCtor() ctor.prototype.constructor = ctor } } } /***/ }), /***/ 96543: /***/ ((module) => { "use strict"; const isStream = stream => stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; isStream.writable = stream => isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; isStream.readable = stream => isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; isStream.duplex = stream => isStream.writable(stream) && isStream.readable(stream); isStream.transform = stream => isStream.duplex(stream) && typeof stream._transform === 'function'; module.exports = isStream; /***/ }), /***/ 14826: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var json_stringify = (__nccwpck_require__(93651).stringify); var json_parse = __nccwpck_require__(3197); module.exports = function(options) { return { parse: json_parse(options), stringify: json_stringify } }; //create the default method members with no options applied for backwards compatibility module.exports.parse = json_parse(); module.exports.stringify = json_stringify; /***/ }), /***/ 3197: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var BigNumber = null; // regexpxs extracted from // (c) BSD-3-Clause // https://github.com/fastify/secure-json-parse/graphs/contributors and https://github.com/hapijs/bourne/graphs/contributors const suspectProtoRx = /(?:_|\\u005[Ff])(?:_|\\u005[Ff])(?:p|\\u0070)(?:r|\\u0072)(?:o|\\u006[Ff])(?:t|\\u0074)(?:o|\\u006[Ff])(?:_|\\u005[Ff])(?:_|\\u005[Ff])/; const suspectConstructorRx = /(?:c|\\u0063)(?:o|\\u006[Ff])(?:n|\\u006[Ee])(?:s|\\u0073)(?:t|\\u0074)(?:r|\\u0072)(?:u|\\u0075)(?:c|\\u0063)(?:t|\\u0074)(?:o|\\u006[Ff])(?:r|\\u0072)/; /* json_parse.js 2012-06-20 Public Domain. NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. This file creates a json_parse function. During create you can (optionally) specify some behavioural switches require('json-bigint')(options) The optional options parameter holds switches that drive certain aspects of the parsing process: * options.strict = true will warn about duplicate-key usage in the json. The default (strict = false) will silently ignore those and overwrite values for keys that are in duplicate use. The resulting function follows this signature: json_parse(text, reviver) This method parses a JSON text to produce an object or array. It can throw a SyntaxError exception. The optional reviver parameter is a function that can filter and transform the results. It receives each of the keys and values, and its return value is used instead of the original value. If it returns what it received, then the structure is not modified. If it returns undefined then the member is deleted. Example: // Parse the text. Values that look like ISO date strings will // be converted to Date objects. myData = json_parse(text, function (key, value) { var a; if (typeof value === 'string') { a = /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); if (a) { return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], +a[5], +a[6])); } } return value; }); This is a reference implementation. You are free to copy, modify, or redistribute. This code should be minified before deployment. See http://javascript.crockford.com/jsmin.html USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO NOT CONTROL. */ /*members "", "\"", "\/", "\\", at, b, call, charAt, f, fromCharCode, hasOwnProperty, message, n, name, prototype, push, r, t, text */ var json_parse = function (options) { 'use strict'; // This is a function that can parse a JSON text, producing a JavaScript // data structure. It is a simple, recursive descent parser. It does not use // eval or regular expressions, so it can be used as a model for implementing // a JSON parser in other languages. // We are defining the function inside of another function to avoid creating // global variables. // Default options one can override by passing options to the parse() var _options = { strict: false, // not being strict means do not generate syntax errors for "duplicate key" storeAsString: false, // toggles whether the values should be stored as BigNumber (default) or a string alwaysParseAsBig: false, // toggles whether all numbers should be Big useNativeBigInt: false, // toggles whether to use native BigInt instead of bignumber.js protoAction: 'error', constructorAction: 'error', }; // If there are options, then use them to override the default _options if (options !== undefined && options !== null) { if (options.strict === true) { _options.strict = true; } if (options.storeAsString === true) { _options.storeAsString = true; } _options.alwaysParseAsBig = options.alwaysParseAsBig === true ? options.alwaysParseAsBig : false; _options.useNativeBigInt = options.useNativeBigInt === true ? options.useNativeBigInt : false; if (typeof options.constructorAction !== 'undefined') { if ( options.constructorAction === 'error' || options.constructorAction === 'ignore' || options.constructorAction === 'preserve' ) { _options.constructorAction = options.constructorAction; } else { throw new Error( `Incorrect value for constructorAction option, must be "error", "ignore" or undefined but passed ${options.constructorAction}` ); } } if (typeof options.protoAction !== 'undefined') { if ( options.protoAction === 'error' || options.protoAction === 'ignore' || options.protoAction === 'preserve' ) { _options.protoAction = options.protoAction; } else { throw new Error( `Incorrect value for protoAction option, must be "error", "ignore" or undefined but passed ${options.protoAction}` ); } } } var at, // The index of the current character ch, // The current character escapee = { '"': '"', '\\': '\\', '/': '/', b: '\b', f: '\f', n: '\n', r: '\r', t: '\t', }, text, error = function (m) { // Call error when something is wrong. throw { name: 'SyntaxError', message: m, at: at, text: text, }; }, next = function (c) { // If a c parameter is provided, verify that it matches the current character. if (c && c !== ch) { error("Expected '" + c + "' instead of '" + ch + "'"); } // Get the next character. When there are no more characters, // return the empty string. ch = text.charAt(at); at += 1; return ch; }, number = function () { // Parse a number value. var number, string = ''; if (ch === '-') { string = '-'; next('-'); } while (ch >= '0' && ch <= '9') { string += ch; next(); } if (ch === '.') { string += '.'; while (next() && ch >= '0' && ch <= '9') { string += ch; } } if (ch === 'e' || ch === 'E') { string += ch; next(); if (ch === '-' || ch === '+') { string += ch; next(); } while (ch >= '0' && ch <= '9') { string += ch; next(); } } number = +string; if (!isFinite(number)) { error('Bad number'); } else { if (BigNumber == null) BigNumber = __nccwpck_require__(51259); //if (number > 9007199254740992 || number < -9007199254740992) // Bignumber has stricter check: everything with length > 15 digits disallowed if (string.length > 15) return _options.storeAsString ? string : _options.useNativeBigInt ? BigInt(string) : new BigNumber(string); else return !_options.alwaysParseAsBig ? number : _options.useNativeBigInt ? BigInt(number) : new BigNumber(number); } }, string = function () { // Parse a string value. var hex, i, string = '', uffff; // When parsing for string values, we must look for " and \ characters. if (ch === '"') { var startAt = at; while (next()) { if (ch === '"') { if (at - 1 > startAt) string += text.substring(startAt, at - 1); next(); return string; } if (ch === '\\') { if (at - 1 > startAt) string += text.substring(startAt, at - 1); next(); if (ch === 'u') { uffff = 0; for (i = 0; i < 4; i += 1) { hex = parseInt(next(), 16); if (!isFinite(hex)) { break; } uffff = uffff * 16 + hex; } string += String.fromCharCode(uffff); } else if (typeof escapee[ch] === 'string') { string += escapee[ch]; } else { break; } startAt = at; } } } error('Bad string'); }, white = function () { // Skip whitespace. while (ch && ch <= ' ') { next(); } }, word = function () { // true, false, or null. switch (ch) { case 't': next('t'); next('r'); next('u'); next('e'); return true; case 'f': next('f'); next('a'); next('l'); next('s'); next('e'); return false; case 'n': next('n'); next('u'); next('l'); next('l'); return null; } error("Unexpected '" + ch + "'"); }, value, // Place holder for the value function. array = function () { // Parse an array value. var array = []; if (ch === '[') { next('['); white(); if (ch === ']') { next(']'); return array; // empty array } while (ch) { array.push(value()); white(); if (ch === ']') { next(']'); return array; } next(','); white(); } } error('Bad array'); }, object = function () { // Parse an object value. var key, object = Object.create(null); if (ch === '{') { next('{'); white(); if (ch === '}') { next('}'); return object; // empty object } while (ch) { key = string(); white(); next(':'); if ( _options.strict === true && Object.hasOwnProperty.call(object, key) ) { error('Duplicate key "' + key + '"'); } if (suspectProtoRx.test(key) === true) { if (_options.protoAction === 'error') { error('Object contains forbidden prototype property'); } else if (_options.protoAction === 'ignore') { value(); } else { object[key] = value(); } } else if (suspectConstructorRx.test(key) === true) { if (_options.constructorAction === 'error') { error('Object contains forbidden constructor property'); } else if (_options.constructorAction === 'ignore') { value(); } else { object[key] = value(); } } else { object[key] = value(); } white(); if (ch === '}') { next('}'); return object; } next(','); white(); } } error('Bad object'); }; value = function () { // Parse a JSON value. It could be an object, an array, a string, a number, // or a word. white(); switch (ch) { case '{': return object(); case '[': return array(); case '"': return string(); case '-': return number(); default: return ch >= '0' && ch <= '9' ? number() : word(); } }; // Return the json_parse function. It will have access to all of the above // functions and variables. return function (source, reviver) { var result; text = source + ''; at = 0; ch = ' '; result = value(); white(); if (ch) { error('Syntax error'); } // If there is a reviver function, we recursively walk the new structure, // passing each name/value pair to the reviver function for possible // transformation, starting with a temporary root object that holds the result // in an empty key. If there is not a reviver function, we simply return the // result. return typeof reviver === 'function' ? (function walk(holder, key) { var k, v, value = holder[key]; if (value && typeof value === 'object') { Object.keys(value).forEach(function (k) { v = walk(value, k); if (v !== undefined) { value[k] = v; } else { delete value[k]; } }); } return reviver.call(holder, key, value); })({ '': result }, '') : result; }; }; module.exports = json_parse; /***/ }), /***/ 93651: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var BigNumber = __nccwpck_require__(51259); /* json2.js 2013-05-26 Public Domain. NO WARRANTY EXPRESSED OR IMPLIED. USE AT YOUR OWN RISK. See http://www.JSON.org/js.html This code should be minified before deployment. See http://javascript.crockford.com/jsmin.html USE YOUR OWN COPY. IT IS EXTREMELY UNWISE TO LOAD CODE FROM SERVERS YOU DO NOT CONTROL. This file creates a global JSON object containing two methods: stringify and parse. JSON.stringify(value, replacer, space) value any JavaScript value, usually an object or array. replacer an optional parameter that determines how object values are stringified for objects. It can be a function or an array of strings. space an optional parameter that specifies the indentation of nested structures. If it is omitted, the text will be packed without extra whitespace. If it is a number, it will specify the number of spaces to indent at each level. If it is a string (such as '\t' or ' '), it contains the characters used to indent at each level. This method produces a JSON text from a JavaScript value. When an object value is found, if the object contains a toJSON method, its toJSON method will be called and the result will be stringified. A toJSON method does not serialize: it returns the value represented by the name/value pair that should be serialized, or undefined if nothing should be serialized. The toJSON method will be passed the key associated with the value, and this will be bound to the value For example, this would serialize Dates as ISO strings. Date.prototype.toJSON = function (key) { function f(n) { // Format integers to have at least two digits. return n < 10 ? '0' + n : n; } return this.getUTCFullYear() + '-' + f(this.getUTCMonth() + 1) + '-' + f(this.getUTCDate()) + 'T' + f(this.getUTCHours()) + ':' + f(this.getUTCMinutes()) + ':' + f(this.getUTCSeconds()) + 'Z'; }; You can provide an optional replacer method. It will be passed the key and value of each member, with this bound to the containing object. The value that is returned from your method will be serialized. If your method returns undefined, then the member will be excluded from the serialization. If the replacer parameter is an array of strings, then it will be used to select the members to be serialized. It filters the results such that only members with keys listed in the replacer array are stringified. Values that do not have JSON representations, such as undefined or functions, will not be serialized. Such values in objects will be dropped; in arrays they will be replaced with null. You can use a replacer function to replace those with JSON values. JSON.stringify(undefined) returns undefined. The optional space parameter produces a stringification of the value that is filled with line breaks and indentation to make it easier to read. If the space parameter is a non-empty string, then that string will be used for indentation. If the space parameter is a number, then the indentation will be that many spaces. Example: text = JSON.stringify(['e', {pluribus: 'unum'}]); // text is '["e",{"pluribus":"unum"}]' text = JSON.stringify(['e', {pluribus: 'unum'}], null, '\t'); // text is '[\n\t"e",\n\t{\n\t\t"pluribus": "unum"\n\t}\n]' text = JSON.stringify([new Date()], function (key, value) { return this[key] instanceof Date ? 'Date(' + this[key] + ')' : value; }); // text is '["Date(---current time---)"]' JSON.parse(text, reviver) This method parses a JSON text to produce an object or array. It can throw a SyntaxError exception. The optional reviver parameter is a function that can filter and transform the results. It receives each of the keys and values, and its return value is used instead of the original value. If it returns what it received, then the structure is not modified. If it returns undefined then the member is deleted. Example: // Parse the text. Values that look like ISO date strings will // be converted to Date objects. myData = JSON.parse(text, function (key, value) { var a; if (typeof value === 'string') { a = /^(\d{4})-(\d{2})-(\d{2})T(\d{2}):(\d{2}):(\d{2}(?:\.\d*)?)Z$/.exec(value); if (a) { return new Date(Date.UTC(+a[1], +a[2] - 1, +a[3], +a[4], +a[5], +a[6])); } } return value; }); myData = JSON.parse('["Date(09/09/2001)"]', function (key, value) { var d; if (typeof value === 'string' && value.slice(0, 5) === 'Date(' && value.slice(-1) === ')') { d = new Date(value.slice(5, -1)); if (d) { return d; } } return value; }); This is a reference implementation. You are free to copy, modify, or redistribute. */ /*jslint evil: true, regexp: true */ /*members "", "\b", "\t", "\n", "\f", "\r", "\"", JSON, "\\", apply, call, charCodeAt, getUTCDate, getUTCFullYear, getUTCHours, getUTCMinutes, getUTCMonth, getUTCSeconds, hasOwnProperty, join, lastIndex, length, parse, prototype, push, replace, slice, stringify, test, toJSON, toString, valueOf */ // Create a JSON object only if one does not already exist. We create the // methods in a closure to avoid creating global variables. var JSON = module.exports; (function () { 'use strict'; function f(n) { // Format integers to have at least two digits. return n < 10 ? '0' + n : n; } var cx = /[\u0000\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, escapable = /[\\\"\x00-\x1f\x7f-\x9f\u00ad\u0600-\u0604\u070f\u17b4\u17b5\u200c-\u200f\u2028-\u202f\u2060-\u206f\ufeff\ufff0-\uffff]/g, gap, indent, meta = { // table of character substitutions '\b': '\\b', '\t': '\\t', '\n': '\\n', '\f': '\\f', '\r': '\\r', '"' : '\\"', '\\': '\\\\' }, rep; function quote(string) { // If the string contains no control characters, no quote characters, and no // backslash characters, then we can safely slap some quotes around it. // Otherwise we must also replace the offending characters with safe escape // sequences. escapable.lastIndex = 0; return escapable.test(string) ? '"' + string.replace(escapable, function (a) { var c = meta[a]; return typeof c === 'string' ? c : '\\u' + ('0000' + a.charCodeAt(0).toString(16)).slice(-4); }) + '"' : '"' + string + '"'; } function str(key, holder) { // Produce a string from holder[key]. var i, // The loop counter. k, // The member key. v, // The member value. length, mind = gap, partial, value = holder[key], isBigNumber = value != null && (value instanceof BigNumber || BigNumber.isBigNumber(value)); // If the value has a toJSON method, call it to obtain a replacement value. if (value && typeof value === 'object' && typeof value.toJSON === 'function') { value = value.toJSON(key); } // If we were called with a replacer function, then call the replacer to // obtain a replacement value. if (typeof rep === 'function') { value = rep.call(holder, key, value); } // What happens next depends on the value's type. switch (typeof value) { case 'string': if (isBigNumber) { return value; } else { return quote(value); } case 'number': // JSON numbers must be finite. Encode non-finite numbers as null. return isFinite(value) ? String(value) : 'null'; case 'boolean': case 'null': case 'bigint': // If the value is a boolean or null, convert it to a string. Note: // typeof null does not produce 'null'. The case is included here in // the remote chance that this gets fixed someday. return String(value); // If the type is 'object', we might be dealing with an object or an array or // null. case 'object': // Due to a specification blunder in ECMAScript, typeof null is 'object', // so watch out for that case. if (!value) { return 'null'; } // Make an array to hold the partial results of stringifying this object value. gap += indent; partial = []; // Is the value an array? if (Object.prototype.toString.apply(value) === '[object Array]') { // The value is an array. Stringify every element. Use null as a placeholder // for non-JSON values. length = value.length; for (i = 0; i < length; i += 1) { partial[i] = str(i, value) || 'null'; } // Join all of the elements together, separated with commas, and wrap them in // brackets. v = partial.length === 0 ? '[]' : gap ? '[\n' + gap + partial.join(',\n' + gap) + '\n' + mind + ']' : '[' + partial.join(',') + ']'; gap = mind; return v; } // If the replacer is an array, use it to select the members to be stringified. if (rep && typeof rep === 'object') { length = rep.length; for (i = 0; i < length; i += 1) { if (typeof rep[i] === 'string') { k = rep[i]; v = str(k, value); if (v) { partial.push(quote(k) + (gap ? ': ' : ':') + v); } } } } else { // Otherwise, iterate through all of the keys in the object. Object.keys(value).forEach(function(k) { var v = str(k, value); if (v) { partial.push(quote(k) + (gap ? ': ' : ':') + v); } }); } // Join all of the member texts together, separated with commas, // and wrap them in braces. v = partial.length === 0 ? '{}' : gap ? '{\n' + gap + partial.join(',\n' + gap) + '\n' + mind + '}' : '{' + partial.join(',') + '}'; gap = mind; return v; } } // If the JSON object does not yet have a stringify method, give it one. if (typeof JSON.stringify !== 'function') { JSON.stringify = function (value, replacer, space) { // The stringify method takes a value and an optional replacer, and an optional // space parameter, and returns a JSON text. The replacer can be a function // that can replace values, or an array of strings that will select the keys. // A default replacer method can be provided. Use of the space parameter can // produce text that is more easily readable. var i; gap = ''; indent = ''; // If the space parameter is a number, make an indent string containing that // many spaces. if (typeof space === 'number') { for (i = 0; i < space; i += 1) { indent += ' '; } // If the space parameter is a string, it will be used as the indent string. } else if (typeof space === 'string') { indent = space; } // If there is a replacer, it must be a function or an array. // Otherwise, throw an error. rep = replacer; if (replacer && typeof replacer !== 'function' && (typeof replacer !== 'object' || typeof replacer.length !== 'number')) { throw new Error('JSON.stringify'); } // Make a fake root object containing our value under the key of ''. // Return the result of stringifying the value. return str('', {'': value}); }; } }()); /***/ }), /***/ 38622: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var Buffer = (__nccwpck_require__(93058).Buffer); var crypto = __nccwpck_require__(76982); var formatEcdsa = __nccwpck_require__(325); var util = __nccwpck_require__(39023); var MSG_INVALID_ALGORITHM = '"%s" is not a valid algorithm.\n Supported algorithms are:\n "HS256", "HS384", "HS512", "RS256", "RS384", "RS512", "PS256", "PS384", "PS512", "ES256", "ES384", "ES512" and "none".' var MSG_INVALID_SECRET = 'secret must be a string or buffer'; var MSG_INVALID_VERIFIER_KEY = 'key must be a string or a buffer'; var MSG_INVALID_SIGNER_KEY = 'key must be a string, a buffer or an object'; var supportsKeyObjects = typeof crypto.createPublicKey === 'function'; if (supportsKeyObjects) { MSG_INVALID_VERIFIER_KEY += ' or a KeyObject'; MSG_INVALID_SECRET += 'or a KeyObject'; } function checkIsPublicKey(key) { if (Buffer.isBuffer(key)) { return; } if (typeof key === 'string') { return; } if (!supportsKeyObjects) { throw typeError(MSG_INVALID_VERIFIER_KEY); } if (typeof key !== 'object') { throw typeError(MSG_INVALID_VERIFIER_KEY); } if (typeof key.type !== 'string') { throw typeError(MSG_INVALID_VERIFIER_KEY); } if (typeof key.asymmetricKeyType !== 'string') { throw typeError(MSG_INVALID_VERIFIER_KEY); } if (typeof key.export !== 'function') { throw typeError(MSG_INVALID_VERIFIER_KEY); } }; function checkIsPrivateKey(key) { if (Buffer.isBuffer(key)) { return; } if (typeof key === 'string') { return; } if (typeof key === 'object') { return; } throw typeError(MSG_INVALID_SIGNER_KEY); }; function checkIsSecretKey(key) { if (Buffer.isBuffer(key)) { return; } if (typeof key === 'string') { return key; } if (!supportsKeyObjects) { throw typeError(MSG_INVALID_SECRET); } if (typeof key !== 'object') { throw typeError(MSG_INVALID_SECRET); } if (key.type !== 'secret') { throw typeError(MSG_INVALID_SECRET); } if (typeof key.export !== 'function') { throw typeError(MSG_INVALID_SECRET); } } function fromBase64(base64) { return base64 .replace(/=/g, '') .replace(/\+/g, '-') .replace(/\//g, '_'); } function toBase64(base64url) { base64url = base64url.toString(); var padding = 4 - base64url.length % 4; if (padding !== 4) { for (var i = 0; i < padding; ++i) { base64url += '='; } } return base64url .replace(/\-/g, '+') .replace(/_/g, '/'); } function typeError(template) { var args = [].slice.call(arguments, 1); var errMsg = util.format.bind(util, template).apply(null, args); return new TypeError(errMsg); } function bufferOrString(obj) { return Buffer.isBuffer(obj) || typeof obj === 'string'; } function normalizeInput(thing) { if (!bufferOrString(thing)) thing = JSON.stringify(thing); return thing; } function createHmacSigner(bits) { return function sign(thing, secret) { checkIsSecretKey(secret); thing = normalizeInput(thing); var hmac = crypto.createHmac('sha' + bits, secret); var sig = (hmac.update(thing), hmac.digest('base64')) return fromBase64(sig); } } var bufferEqual; var timingSafeEqual = 'timingSafeEqual' in crypto ? function timingSafeEqual(a, b) { if (a.byteLength !== b.byteLength) { return false; } return crypto.timingSafeEqual(a, b) } : function timingSafeEqual(a, b) { if (!bufferEqual) { bufferEqual = __nccwpck_require__(39732); } return bufferEqual(a, b) } function createHmacVerifier(bits) { return function verify(thing, signature, secret) { var computedSig = createHmacSigner(bits)(thing, secret); return timingSafeEqual(Buffer.from(signature), Buffer.from(computedSig)); } } function createKeySigner(bits) { return function sign(thing, privateKey) { checkIsPrivateKey(privateKey); thing = normalizeInput(thing); // Even though we are specifying "RSA" here, this works with ECDSA // keys as well. var signer = crypto.createSign('RSA-SHA' + bits); var sig = (signer.update(thing), signer.sign(privateKey, 'base64')); return fromBase64(sig); } } function createKeyVerifier(bits) { return function verify(thing, signature, publicKey) { checkIsPublicKey(publicKey); thing = normalizeInput(thing); signature = toBase64(signature); var verifier = crypto.createVerify('RSA-SHA' + bits); verifier.update(thing); return verifier.verify(publicKey, signature, 'base64'); } } function createPSSKeySigner(bits) { return function sign(thing, privateKey) { checkIsPrivateKey(privateKey); thing = normalizeInput(thing); var signer = crypto.createSign('RSA-SHA' + bits); var sig = (signer.update(thing), signer.sign({ key: privateKey, padding: crypto.constants.RSA_PKCS1_PSS_PADDING, saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST }, 'base64')); return fromBase64(sig); } } function createPSSKeyVerifier(bits) { return function verify(thing, signature, publicKey) { checkIsPublicKey(publicKey); thing = normalizeInput(thing); signature = toBase64(signature); var verifier = crypto.createVerify('RSA-SHA' + bits); verifier.update(thing); return verifier.verify({ key: publicKey, padding: crypto.constants.RSA_PKCS1_PSS_PADDING, saltLength: crypto.constants.RSA_PSS_SALTLEN_DIGEST }, signature, 'base64'); } } function createECDSASigner(bits) { var inner = createKeySigner(bits); return function sign() { var signature = inner.apply(null, arguments); signature = formatEcdsa.derToJose(signature, 'ES' + bits); return signature; }; } function createECDSAVerifer(bits) { var inner = createKeyVerifier(bits); return function verify(thing, signature, publicKey) { signature = formatEcdsa.joseToDer(signature, 'ES' + bits).toString('base64'); var result = inner(thing, signature, publicKey); return result; }; } function createNoneSigner() { return function sign() { return ''; } } function createNoneVerifier() { return function verify(thing, signature) { return signature === ''; } } module.exports = function jwa(algorithm) { var signerFactories = { hs: createHmacSigner, rs: createKeySigner, ps: createPSSKeySigner, es: createECDSASigner, none: createNoneSigner, } var verifierFactories = { hs: createHmacVerifier, rs: createKeyVerifier, ps: createPSSKeyVerifier, es: createECDSAVerifer, none: createNoneVerifier, } var match = algorithm.match(/^(RS|PS|ES|HS)(256|384|512)$|^(none)$/); if (!match) throw typeError(MSG_INVALID_ALGORITHM, algorithm); var algo = (match[1] || match[3]).toLowerCase(); var bits = match[2]; return { sign: signerFactories[algo](bits), verify: verifierFactories[algo](bits), } }; /***/ }), /***/ 33324: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /*global exports*/ var SignStream = __nccwpck_require__(78600); var VerifyStream = __nccwpck_require__(4368); var ALGORITHMS = [ 'HS256', 'HS384', 'HS512', 'RS256', 'RS384', 'RS512', 'PS256', 'PS384', 'PS512', 'ES256', 'ES384', 'ES512' ]; exports.ALGORITHMS = ALGORITHMS; exports.sign = SignStream.sign; exports.verify = VerifyStream.verify; exports.decode = VerifyStream.decode; exports.isValid = VerifyStream.isValid; exports.createSign = function createSign(opts) { return new SignStream(opts); }; exports.createVerify = function createVerify(opts) { return new VerifyStream(opts); }; /***/ }), /***/ 41831: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*global module, process*/ var Buffer = (__nccwpck_require__(93058).Buffer); var Stream = __nccwpck_require__(2203); var util = __nccwpck_require__(39023); function DataStream(data) { this.buffer = null; this.writable = true; this.readable = true; // No input if (!data) { this.buffer = Buffer.alloc(0); return this; } // Stream if (typeof data.pipe === 'function') { this.buffer = Buffer.alloc(0); data.pipe(this); return this; } // Buffer or String // or Object (assumedly a passworded key) if (data.length || typeof data === 'object') { this.buffer = data; this.writable = false; process.nextTick(function () { this.emit('end', data); this.readable = false; this.emit('close'); }.bind(this)); return this; } throw new TypeError('Unexpected data type ('+ typeof data + ')'); } util.inherits(DataStream, Stream); DataStream.prototype.write = function write(data) { this.buffer = Buffer.concat([this.buffer, Buffer.from(data)]); this.emit('data', data); }; DataStream.prototype.end = function end(data) { if (data) this.write(data); this.emit('end', data); this.emit('close'); this.writable = false; this.readable = false; }; module.exports = DataStream; /***/ }), /***/ 78600: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*global module*/ var Buffer = (__nccwpck_require__(93058).Buffer); var DataStream = __nccwpck_require__(41831); var jwa = __nccwpck_require__(38622); var Stream = __nccwpck_require__(2203); var toString = __nccwpck_require__(95126); var util = __nccwpck_require__(39023); function base64url(string, encoding) { return Buffer .from(string, encoding) .toString('base64') .replace(/=/g, '') .replace(/\+/g, '-') .replace(/\//g, '_'); } function jwsSecuredInput(header, payload, encoding) { encoding = encoding || 'utf8'; var encodedHeader = base64url(toString(header), 'binary'); var encodedPayload = base64url(toString(payload), encoding); return util.format('%s.%s', encodedHeader, encodedPayload); } function jwsSign(opts) { var header = opts.header; var payload = opts.payload; var secretOrKey = opts.secret || opts.privateKey; var encoding = opts.encoding; var algo = jwa(header.alg); var securedInput = jwsSecuredInput(header, payload, encoding); var signature = algo.sign(securedInput, secretOrKey); return util.format('%s.%s', securedInput, signature); } function SignStream(opts) { var secret = opts.secret||opts.privateKey||opts.key; var secretStream = new DataStream(secret); this.readable = true; this.header = opts.header; this.encoding = opts.encoding; this.secret = this.privateKey = this.key = secretStream; this.payload = new DataStream(opts.payload); this.secret.once('close', function () { if (!this.payload.writable && this.readable) this.sign(); }.bind(this)); this.payload.once('close', function () { if (!this.secret.writable && this.readable) this.sign(); }.bind(this)); } util.inherits(SignStream, Stream); SignStream.prototype.sign = function sign() { try { var signature = jwsSign({ header: this.header, payload: this.payload.buffer, secret: this.secret.buffer, encoding: this.encoding }); this.emit('done', signature); this.emit('data', signature); this.emit('end'); this.readable = false; return signature; } catch (e) { this.readable = false; this.emit('error', e); this.emit('close'); } }; SignStream.sign = jwsSign; module.exports = SignStream; /***/ }), /***/ 95126: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*global module*/ var Buffer = (__nccwpck_require__(20181).Buffer); module.exports = function toString(obj) { if (typeof obj === 'string') return obj; if (typeof obj === 'number' || Buffer.isBuffer(obj)) return obj.toString(); return JSON.stringify(obj); }; /***/ }), /***/ 4368: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*global module*/ var Buffer = (__nccwpck_require__(93058).Buffer); var DataStream = __nccwpck_require__(41831); var jwa = __nccwpck_require__(38622); var Stream = __nccwpck_require__(2203); var toString = __nccwpck_require__(95126); var util = __nccwpck_require__(39023); var JWS_REGEX = /^[a-zA-Z0-9\-_]+?\.[a-zA-Z0-9\-_]+?\.([a-zA-Z0-9\-_]+)?$/; function isObject(thing) { return Object.prototype.toString.call(thing) === '[object Object]'; } function safeJsonParse(thing) { if (isObject(thing)) return thing; try { return JSON.parse(thing); } catch (e) { return undefined; } } function headerFromJWS(jwsSig) { var encodedHeader = jwsSig.split('.', 1)[0]; return safeJsonParse(Buffer.from(encodedHeader, 'base64').toString('binary')); } function securedInputFromJWS(jwsSig) { return jwsSig.split('.', 2).join('.'); } function signatureFromJWS(jwsSig) { return jwsSig.split('.')[2]; } function payloadFromJWS(jwsSig, encoding) { encoding = encoding || 'utf8'; var payload = jwsSig.split('.')[1]; return Buffer.from(payload, 'base64').toString(encoding); } function isValidJws(string) { return JWS_REGEX.test(string) && !!headerFromJWS(string); } function jwsVerify(jwsSig, algorithm, secretOrKey) { if (!algorithm) { var err = new Error("Missing algorithm parameter for jws.verify"); err.code = "MISSING_ALGORITHM"; throw err; } jwsSig = toString(jwsSig); var signature = signatureFromJWS(jwsSig); var securedInput = securedInputFromJWS(jwsSig); var algo = jwa(algorithm); return algo.verify(securedInput, signature, secretOrKey); } function jwsDecode(jwsSig, opts) { opts = opts || {}; jwsSig = toString(jwsSig); if (!isValidJws(jwsSig)) return null; var header = headerFromJWS(jwsSig); if (!header) return null; var payload = payloadFromJWS(jwsSig); if (header.typ === 'JWT' || opts.json) payload = JSON.parse(payload, opts.encoding); return { header: header, payload: payload, signature: signatureFromJWS(jwsSig) }; } function VerifyStream(opts) { opts = opts || {}; var secretOrKey = opts.secret||opts.publicKey||opts.key; var secretStream = new DataStream(secretOrKey); this.readable = true; this.algorithm = opts.algorithm; this.encoding = opts.encoding; this.secret = this.publicKey = this.key = secretStream; this.signature = new DataStream(opts.signature); this.secret.once('close', function () { if (!this.signature.writable && this.readable) this.verify(); }.bind(this)); this.signature.once('close', function () { if (!this.secret.writable && this.readable) this.verify(); }.bind(this)); } util.inherits(VerifyStream, Stream); VerifyStream.prototype.verify = function verify() { try { var valid = jwsVerify(this.signature.buffer, this.algorithm, this.key.buffer); var obj = jwsDecode(this.signature.buffer, this.encoding); this.emit('done', valid, obj); this.emit('data', valid); this.emit('end'); this.readable = false; return valid; } catch (e) { this.readable = false; this.emit('error', e); this.emit('close'); } }; VerifyStream.decode = jwsDecode; VerifyStream.isValid = isValidJws; VerifyStream.verify = jwsVerify; module.exports = VerifyStream; /***/ }), /***/ 55641: /***/ ((module) => { "use strict"; /** @type {import('./abs')} */ module.exports = Math.abs; /***/ }), /***/ 96171: /***/ ((module) => { "use strict"; /** @type {import('./floor')} */ module.exports = Math.floor; /***/ }), /***/ 77044: /***/ ((module) => { "use strict"; /** @type {import('./isNaN')} */ module.exports = Number.isNaN || function isNaN(a) { return a !== a; }; /***/ }), /***/ 57147: /***/ ((module) => { "use strict"; /** @type {import('./max')} */ module.exports = Math.max; /***/ }), /***/ 41017: /***/ ((module) => { "use strict"; /** @type {import('./min')} */ module.exports = Math.min; /***/ }), /***/ 56947: /***/ ((module) => { "use strict"; /** @type {import('./pow')} */ module.exports = Math.pow; /***/ }), /***/ 42621: /***/ ((module) => { "use strict"; /** @type {import('./round')} */ module.exports = Math.round; /***/ }), /***/ 30156: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var $isNaN = __nccwpck_require__(77044); /** @type {import('./sign')} */ module.exports = function sign(number) { if ($isNaN(number) || number === 0) { return number; } return number < 0 ? -1 : +1; }; /***/ }), /***/ 99829: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /*! * mime-db * Copyright(c) 2014 Jonathan Ong * Copyright(c) 2015-2022 Douglas Christopher Wilson * MIT Licensed */ /** * Module exports. */ module.exports = __nccwpck_require__(81813) /***/ }), /***/ 14096: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /*! * mime-types * Copyright(c) 2014 Jonathan Ong * Copyright(c) 2015 Douglas Christopher Wilson * MIT Licensed */ /** * Module dependencies. * @private */ var db = __nccwpck_require__(99829) var extname = (__nccwpck_require__(16928).extname) /** * Module variables. * @private */ var EXTRACT_TYPE_REGEXP = /^\s*([^;\s]*)(?:;|\s|$)/ var TEXT_TYPE_REGEXP = /^text\//i /** * Module exports. * @public */ exports.charset = charset exports.charsets = { lookup: charset } exports.contentType = contentType exports.extension = extension exports.extensions = Object.create(null) exports.lookup = lookup exports.types = Object.create(null) // Populate the extensions/types maps populateMaps(exports.extensions, exports.types) /** * Get the default charset for a MIME type. * * @param {string} type * @return {boolean|string} */ function charset (type) { if (!type || typeof type !== 'string') { return false } // TODO: use media-typer var match = EXTRACT_TYPE_REGEXP.exec(type) var mime = match && db[match[1].toLowerCase()] if (mime && mime.charset) { return mime.charset } // default text/* to utf-8 if (match && TEXT_TYPE_REGEXP.test(match[1])) { return 'UTF-8' } return false } /** * Create a full Content-Type header given a MIME type or extension. * * @param {string} str * @return {boolean|string} */ function contentType (str) { // TODO: should this even be in this module? if (!str || typeof str !== 'string') { return false } var mime = str.indexOf('/') === -1 ? exports.lookup(str) : str if (!mime) { return false } // TODO: use content-type or other module if (mime.indexOf('charset') === -1) { var charset = exports.charset(mime) if (charset) mime += '; charset=' + charset.toLowerCase() } return mime } /** * Get the default extension for a MIME type. * * @param {string} type * @return {boolean|string} */ function extension (type) { if (!type || typeof type !== 'string') { return false } // TODO: use media-typer var match = EXTRACT_TYPE_REGEXP.exec(type) // get extensions var exts = match && exports.extensions[match[1].toLowerCase()] if (!exts || !exts.length) { return false } return exts[0] } /** * Lookup the MIME type for a file path/extension. * * @param {string} path * @return {boolean|string} */ function lookup (path) { if (!path || typeof path !== 'string') { return false } // get the extension ("ext" or ".ext" or full path) var extension = extname('x.' + path) .toLowerCase() .substr(1) if (!extension) { return false } return exports.types[extension] || false } /** * Populate the extensions and types maps. * @private */ function populateMaps (extensions, types) { // source preference (least -> most) var preference = ['nginx', 'apache', undefined, 'iana'] Object.keys(db).forEach(function forEachMimeType (type) { var mime = db[type] var exts = mime.extensions if (!exts || !exts.length) { return } // mime -> extensions extensions[type] = exts // extension -> mime for (var i = 0; i < exts.length; i++) { var extension = exts[i] if (types[extension]) { var from = preference.indexOf(db[types[extension]].source) var to = preference.indexOf(mime.source) if (types[extension] !== 'application/octet-stream' && (from > to || (from === to && types[extension].substr(0, 12) === 'application/'))) { // skip the remapping continue } } // set the extension -> mime types[extension] = type } }) } /***/ }), /***/ 14402: /***/ ((module) => { "use strict"; /** * @param typeMap [Object] Map of MIME type -> Array[extensions] * @param ... */ function Mime() { this._types = Object.create(null); this._extensions = Object.create(null); for (let i = 0; i < arguments.length; i++) { this.define(arguments[i]); } this.define = this.define.bind(this); this.getType = this.getType.bind(this); this.getExtension = this.getExtension.bind(this); } /** * Define mimetype -> extension mappings. Each key is a mime-type that maps * to an array of extensions associated with the type. The first extension is * used as the default extension for the type. * * e.g. mime.define({'audio/ogg', ['oga', 'ogg', 'spx']}); * * If a type declares an extension that has already been defined, an error will * be thrown. To suppress this error and force the extension to be associated * with the new type, pass `force`=true. Alternatively, you may prefix the * extension with "*" to map the type to extension, without mapping the * extension to the type. * * e.g. mime.define({'audio/wav', ['wav']}, {'audio/x-wav', ['*wav']}); * * * @param map (Object) type definitions * @param force (Boolean) if true, force overriding of existing definitions */ Mime.prototype.define = function(typeMap, force) { for (let type in typeMap) { let extensions = typeMap[type].map(function(t) { return t.toLowerCase(); }); type = type.toLowerCase(); for (let i = 0; i < extensions.length; i++) { const ext = extensions[i]; // '*' prefix = not the preferred type for this extension. So fixup the // extension, and skip it. if (ext[0] === '*') { continue; } if (!force && (ext in this._types)) { throw new Error( 'Attempt to change mapping for "' + ext + '" extension from "' + this._types[ext] + '" to "' + type + '". Pass `force=true` to allow this, otherwise remove "' + ext + '" from the list of extensions for "' + type + '".' ); } this._types[ext] = type; } // Use first extension as default if (force || !this._extensions[type]) { const ext = extensions[0]; this._extensions[type] = (ext[0] !== '*') ? ext : ext.substr(1); } } }; /** * Lookup a mime type based on extension */ Mime.prototype.getType = function(path) { path = String(path); let last = path.replace(/^.*[/\\]/, '').toLowerCase(); let ext = last.replace(/^.*\./, '').toLowerCase(); let hasPath = last.length < path.length; let hasDot = ext.length < last.length - 1; return (hasDot || !hasPath) && this._types[ext] || null; }; /** * Return file extension associated with a mime type */ Mime.prototype.getExtension = function(type) { type = /^\s*([^;\s]*)/.test(type) && RegExp.$1; return type && this._extensions[type.toLowerCase()] || null; }; module.exports = Mime; /***/ }), /***/ 94900: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; let Mime = __nccwpck_require__(14402); module.exports = new Mime(__nccwpck_require__(83725), __nccwpck_require__(68548)); /***/ }), /***/ 68548: /***/ ((module) => { module.exports = {"application/prs.cww":["cww"],"application/vnd.1000minds.decision-model+xml":["1km"],"application/vnd.3gpp.pic-bw-large":["plb"],"application/vnd.3gpp.pic-bw-small":["psb"],"application/vnd.3gpp.pic-bw-var":["pvb"],"application/vnd.3gpp2.tcap":["tcap"],"application/vnd.3m.post-it-notes":["pwn"],"application/vnd.accpac.simply.aso":["aso"],"application/vnd.accpac.simply.imp":["imp"],"application/vnd.acucobol":["acu"],"application/vnd.acucorp":["atc","acutc"],"application/vnd.adobe.air-application-installer-package+zip":["air"],"application/vnd.adobe.formscentral.fcdt":["fcdt"],"application/vnd.adobe.fxp":["fxp","fxpl"],"application/vnd.adobe.xdp+xml":["xdp"],"application/vnd.adobe.xfdf":["xfdf"],"application/vnd.ahead.space":["ahead"],"application/vnd.airzip.filesecure.azf":["azf"],"application/vnd.airzip.filesecure.azs":["azs"],"application/vnd.amazon.ebook":["azw"],"application/vnd.americandynamics.acc":["acc"],"application/vnd.amiga.ami":["ami"],"application/vnd.android.package-archive":["apk"],"application/vnd.anser-web-certificate-issue-initiation":["cii"],"application/vnd.anser-web-funds-transfer-initiation":["fti"],"application/vnd.antix.game-component":["atx"],"application/vnd.apple.installer+xml":["mpkg"],"application/vnd.apple.keynote":["key"],"application/vnd.apple.mpegurl":["m3u8"],"application/vnd.apple.numbers":["numbers"],"application/vnd.apple.pages":["pages"],"application/vnd.apple.pkpass":["pkpass"],"application/vnd.aristanetworks.swi":["swi"],"application/vnd.astraea-software.iota":["iota"],"application/vnd.audiograph":["aep"],"application/vnd.balsamiq.bmml+xml":["bmml"],"application/vnd.blueice.multipass":["mpm"],"application/vnd.bmi":["bmi"],"application/vnd.businessobjects":["rep"],"application/vnd.chemdraw+xml":["cdxml"],"application/vnd.chipnuts.karaoke-mmd":["mmd"],"application/vnd.cinderella":["cdy"],"application/vnd.citationstyles.style+xml":["csl"],"application/vnd.claymore":["cla"],"application/vnd.cloanto.rp9":["rp9"],"application/vnd.clonk.c4group":["c4g","c4d","c4f","c4p","c4u"],"application/vnd.cluetrust.cartomobile-config":["c11amc"],"application/vnd.cluetrust.cartomobile-config-pkg":["c11amz"],"application/vnd.commonspace":["csp"],"application/vnd.contact.cmsg":["cdbcmsg"],"application/vnd.cosmocaller":["cmc"],"application/vnd.crick.clicker":["clkx"],"application/vnd.crick.clicker.keyboard":["clkk"],"application/vnd.crick.clicker.palette":["clkp"],"application/vnd.crick.clicker.template":["clkt"],"application/vnd.crick.clicker.wordbank":["clkw"],"application/vnd.criticaltools.wbs+xml":["wbs"],"application/vnd.ctc-posml":["pml"],"application/vnd.cups-ppd":["ppd"],"application/vnd.curl.car":["car"],"application/vnd.curl.pcurl":["pcurl"],"application/vnd.dart":["dart"],"application/vnd.data-vision.rdz":["rdz"],"application/vnd.dbf":["dbf"],"application/vnd.dece.data":["uvf","uvvf","uvd","uvvd"],"application/vnd.dece.ttml+xml":["uvt","uvvt"],"application/vnd.dece.unspecified":["uvx","uvvx"],"application/vnd.dece.zip":["uvz","uvvz"],"application/vnd.denovo.fcselayout-link":["fe_launch"],"application/vnd.dna":["dna"],"application/vnd.dolby.mlp":["mlp"],"application/vnd.dpgraph":["dpg"],"application/vnd.dreamfactory":["dfac"],"application/vnd.ds-keypoint":["kpxx"],"application/vnd.dvb.ait":["ait"],"application/vnd.dvb.service":["svc"],"application/vnd.dynageo":["geo"],"application/vnd.ecowin.chart":["mag"],"application/vnd.enliven":["nml"],"application/vnd.epson.esf":["esf"],"application/vnd.epson.msf":["msf"],"application/vnd.epson.quickanime":["qam"],"application/vnd.epson.salt":["slt"],"application/vnd.epson.ssf":["ssf"],"application/vnd.eszigno3+xml":["es3","et3"],"application/vnd.ezpix-album":["ez2"],"application/vnd.ezpix-package":["ez3"],"application/vnd.fdf":["fdf"],"application/vnd.fdsn.mseed":["mseed"],"application/vnd.fdsn.seed":["seed","dataless"],"application/vnd.flographit":["gph"],"application/vnd.fluxtime.clip":["ftc"],"application/vnd.framemaker":["fm","frame","maker","book"],"application/vnd.frogans.fnc":["fnc"],"application/vnd.frogans.ltf":["ltf"],"application/vnd.fsc.weblaunch":["fsc"],"application/vnd.fujitsu.oasys":["oas"],"application/vnd.fujitsu.oasys2":["oa2"],"application/vnd.fujitsu.oasys3":["oa3"],"application/vnd.fujitsu.oasysgp":["fg5"],"application/vnd.fujitsu.oasysprs":["bh2"],"application/vnd.fujixerox.ddd":["ddd"],"application/vnd.fujixerox.docuworks":["xdw"],"application/vnd.fujixerox.docuworks.binder":["xbd"],"application/vnd.fuzzysheet":["fzs"],"application/vnd.genomatix.tuxedo":["txd"],"application/vnd.geogebra.file":["ggb"],"application/vnd.geogebra.tool":["ggt"],"application/vnd.geometry-explorer":["gex","gre"],"application/vnd.geonext":["gxt"],"application/vnd.geoplan":["g2w"],"application/vnd.geospace":["g3w"],"application/vnd.gmx":["gmx"],"application/vnd.google-apps.document":["gdoc"],"application/vnd.google-apps.presentation":["gslides"],"application/vnd.google-apps.spreadsheet":["gsheet"],"application/vnd.google-earth.kml+xml":["kml"],"application/vnd.google-earth.kmz":["kmz"],"application/vnd.grafeq":["gqf","gqs"],"application/vnd.groove-account":["gac"],"application/vnd.groove-help":["ghf"],"application/vnd.groove-identity-message":["gim"],"application/vnd.groove-injector":["grv"],"application/vnd.groove-tool-message":["gtm"],"application/vnd.groove-tool-template":["tpl"],"application/vnd.groove-vcard":["vcg"],"application/vnd.hal+xml":["hal"],"application/vnd.handheld-entertainment+xml":["zmm"],"application/vnd.hbci":["hbci"],"application/vnd.hhe.lesson-player":["les"],"application/vnd.hp-hpgl":["hpgl"],"application/vnd.hp-hpid":["hpid"],"application/vnd.hp-hps":["hps"],"application/vnd.hp-jlyt":["jlt"],"application/vnd.hp-pcl":["pcl"],"application/vnd.hp-pclxl":["pclxl"],"application/vnd.hydrostatix.sof-data":["sfd-hdstx"],"application/vnd.ibm.minipay":["mpy"],"application/vnd.ibm.modcap":["afp","listafp","list3820"],"application/vnd.ibm.rights-management":["irm"],"application/vnd.ibm.secure-container":["sc"],"application/vnd.iccprofile":["icc","icm"],"application/vnd.igloader":["igl"],"application/vnd.immervision-ivp":["ivp"],"application/vnd.immervision-ivu":["ivu"],"application/vnd.insors.igm":["igm"],"application/vnd.intercon.formnet":["xpw","xpx"],"application/vnd.intergeo":["i2g"],"application/vnd.intu.qbo":["qbo"],"application/vnd.intu.qfx":["qfx"],"application/vnd.ipunplugged.rcprofile":["rcprofile"],"application/vnd.irepository.package+xml":["irp"],"application/vnd.is-xpr":["xpr"],"application/vnd.isac.fcs":["fcs"],"application/vnd.jam":["jam"],"application/vnd.jcp.javame.midlet-rms":["rms"],"application/vnd.jisp":["jisp"],"application/vnd.joost.joda-archive":["joda"],"application/vnd.kahootz":["ktz","ktr"],"application/vnd.kde.karbon":["karbon"],"application/vnd.kde.kchart":["chrt"],"application/vnd.kde.kformula":["kfo"],"application/vnd.kde.kivio":["flw"],"application/vnd.kde.kontour":["kon"],"application/vnd.kde.kpresenter":["kpr","kpt"],"application/vnd.kde.kspread":["ksp"],"application/vnd.kde.kword":["kwd","kwt"],"application/vnd.kenameaapp":["htke"],"application/vnd.kidspiration":["kia"],"application/vnd.kinar":["kne","knp"],"application/vnd.koan":["skp","skd","skt","skm"],"application/vnd.kodak-descriptor":["sse"],"application/vnd.las.las+xml":["lasxml"],"application/vnd.llamagraphics.life-balance.desktop":["lbd"],"application/vnd.llamagraphics.life-balance.exchange+xml":["lbe"],"application/vnd.lotus-1-2-3":["123"],"application/vnd.lotus-approach":["apr"],"application/vnd.lotus-freelance":["pre"],"application/vnd.lotus-notes":["nsf"],"application/vnd.lotus-organizer":["org"],"application/vnd.lotus-screencam":["scm"],"application/vnd.lotus-wordpro":["lwp"],"application/vnd.macports.portpkg":["portpkg"],"application/vnd.mapbox-vector-tile":["mvt"],"application/vnd.mcd":["mcd"],"application/vnd.medcalcdata":["mc1"],"application/vnd.mediastation.cdkey":["cdkey"],"application/vnd.mfer":["mwf"],"application/vnd.mfmp":["mfm"],"application/vnd.micrografx.flo":["flo"],"application/vnd.micrografx.igx":["igx"],"application/vnd.mif":["mif"],"application/vnd.mobius.daf":["daf"],"application/vnd.mobius.dis":["dis"],"application/vnd.mobius.mbk":["mbk"],"application/vnd.mobius.mqy":["mqy"],"application/vnd.mobius.msl":["msl"],"application/vnd.mobius.plc":["plc"],"application/vnd.mobius.txf":["txf"],"application/vnd.mophun.application":["mpn"],"application/vnd.mophun.certificate":["mpc"],"application/vnd.mozilla.xul+xml":["xul"],"application/vnd.ms-artgalry":["cil"],"application/vnd.ms-cab-compressed":["cab"],"application/vnd.ms-excel":["xls","xlm","xla","xlc","xlt","xlw"],"application/vnd.ms-excel.addin.macroenabled.12":["xlam"],"application/vnd.ms-excel.sheet.binary.macroenabled.12":["xlsb"],"application/vnd.ms-excel.sheet.macroenabled.12":["xlsm"],"application/vnd.ms-excel.template.macroenabled.12":["xltm"],"application/vnd.ms-fontobject":["eot"],"application/vnd.ms-htmlhelp":["chm"],"application/vnd.ms-ims":["ims"],"application/vnd.ms-lrm":["lrm"],"application/vnd.ms-officetheme":["thmx"],"application/vnd.ms-outlook":["msg"],"application/vnd.ms-pki.seccat":["cat"],"application/vnd.ms-pki.stl":["*stl"],"application/vnd.ms-powerpoint":["ppt","pps","pot"],"application/vnd.ms-powerpoint.addin.macroenabled.12":["ppam"],"application/vnd.ms-powerpoint.presentation.macroenabled.12":["pptm"],"application/vnd.ms-powerpoint.slide.macroenabled.12":["sldm"],"application/vnd.ms-powerpoint.slideshow.macroenabled.12":["ppsm"],"application/vnd.ms-powerpoint.template.macroenabled.12":["potm"],"application/vnd.ms-project":["mpp","mpt"],"application/vnd.ms-word.document.macroenabled.12":["docm"],"application/vnd.ms-word.template.macroenabled.12":["dotm"],"application/vnd.ms-works":["wps","wks","wcm","wdb"],"application/vnd.ms-wpl":["wpl"],"application/vnd.ms-xpsdocument":["xps"],"application/vnd.mseq":["mseq"],"application/vnd.musician":["mus"],"application/vnd.muvee.style":["msty"],"application/vnd.mynfc":["taglet"],"application/vnd.neurolanguage.nlu":["nlu"],"application/vnd.nitf":["ntf","nitf"],"application/vnd.noblenet-directory":["nnd"],"application/vnd.noblenet-sealer":["nns"],"application/vnd.noblenet-web":["nnw"],"application/vnd.nokia.n-gage.ac+xml":["*ac"],"application/vnd.nokia.n-gage.data":["ngdat"],"application/vnd.nokia.n-gage.symbian.install":["n-gage"],"application/vnd.nokia.radio-preset":["rpst"],"application/vnd.nokia.radio-presets":["rpss"],"application/vnd.novadigm.edm":["edm"],"application/vnd.novadigm.edx":["edx"],"application/vnd.novadigm.ext":["ext"],"application/vnd.oasis.opendocument.chart":["odc"],"application/vnd.oasis.opendocument.chart-template":["otc"],"application/vnd.oasis.opendocument.database":["odb"],"application/vnd.oasis.opendocument.formula":["odf"],"application/vnd.oasis.opendocument.formula-template":["odft"],"application/vnd.oasis.opendocument.graphics":["odg"],"application/vnd.oasis.opendocument.graphics-template":["otg"],"application/vnd.oasis.opendocument.image":["odi"],"application/vnd.oasis.opendocument.image-template":["oti"],"application/vnd.oasis.opendocument.presentation":["odp"],"application/vnd.oasis.opendocument.presentation-template":["otp"],"application/vnd.oasis.opendocument.spreadsheet":["ods"],"application/vnd.oasis.opendocument.spreadsheet-template":["ots"],"application/vnd.oasis.opendocument.text":["odt"],"application/vnd.oasis.opendocument.text-master":["odm"],"application/vnd.oasis.opendocument.text-template":["ott"],"application/vnd.oasis.opendocument.text-web":["oth"],"application/vnd.olpc-sugar":["xo"],"application/vnd.oma.dd2+xml":["dd2"],"application/vnd.openblox.game+xml":["obgx"],"application/vnd.openofficeorg.extension":["oxt"],"application/vnd.openstreetmap.data+xml":["osm"],"application/vnd.openxmlformats-officedocument.presentationml.presentation":["pptx"],"application/vnd.openxmlformats-officedocument.presentationml.slide":["sldx"],"application/vnd.openxmlformats-officedocument.presentationml.slideshow":["ppsx"],"application/vnd.openxmlformats-officedocument.presentationml.template":["potx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":["xlsx"],"application/vnd.openxmlformats-officedocument.spreadsheetml.template":["xltx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.document":["docx"],"application/vnd.openxmlformats-officedocument.wordprocessingml.template":["dotx"],"application/vnd.osgeo.mapguide.package":["mgp"],"application/vnd.osgi.dp":["dp"],"application/vnd.osgi.subsystem":["esa"],"application/vnd.palm":["pdb","pqa","oprc"],"application/vnd.pawaafile":["paw"],"application/vnd.pg.format":["str"],"application/vnd.pg.osasli":["ei6"],"application/vnd.picsel":["efif"],"application/vnd.pmi.widget":["wg"],"application/vnd.pocketlearn":["plf"],"application/vnd.powerbuilder6":["pbd"],"application/vnd.previewsystems.box":["box"],"application/vnd.proteus.magazine":["mgz"],"application/vnd.publishare-delta-tree":["qps"],"application/vnd.pvi.ptid1":["ptid"],"application/vnd.quark.quarkxpress":["qxd","qxt","qwd","qwt","qxl","qxb"],"application/vnd.rar":["rar"],"application/vnd.realvnc.bed":["bed"],"application/vnd.recordare.musicxml":["mxl"],"application/vnd.recordare.musicxml+xml":["musicxml"],"application/vnd.rig.cryptonote":["cryptonote"],"application/vnd.rim.cod":["cod"],"application/vnd.rn-realmedia":["rm"],"application/vnd.rn-realmedia-vbr":["rmvb"],"application/vnd.route66.link66+xml":["link66"],"application/vnd.sailingtracker.track":["st"],"application/vnd.seemail":["see"],"application/vnd.sema":["sema"],"application/vnd.semd":["semd"],"application/vnd.semf":["semf"],"application/vnd.shana.informed.formdata":["ifm"],"application/vnd.shana.informed.formtemplate":["itp"],"application/vnd.shana.informed.interchange":["iif"],"application/vnd.shana.informed.package":["ipk"],"application/vnd.simtech-mindmapper":["twd","twds"],"application/vnd.smaf":["mmf"],"application/vnd.smart.teacher":["teacher"],"application/vnd.software602.filler.form+xml":["fo"],"application/vnd.solent.sdkm+xml":["sdkm","sdkd"],"application/vnd.spotfire.dxp":["dxp"],"application/vnd.spotfire.sfs":["sfs"],"application/vnd.stardivision.calc":["sdc"],"application/vnd.stardivision.draw":["sda"],"application/vnd.stardivision.impress":["sdd"],"application/vnd.stardivision.math":["smf"],"application/vnd.stardivision.writer":["sdw","vor"],"application/vnd.stardivision.writer-global":["sgl"],"application/vnd.stepmania.package":["smzip"],"application/vnd.stepmania.stepchart":["sm"],"application/vnd.sun.wadl+xml":["wadl"],"application/vnd.sun.xml.calc":["sxc"],"application/vnd.sun.xml.calc.template":["stc"],"application/vnd.sun.xml.draw":["sxd"],"application/vnd.sun.xml.draw.template":["std"],"application/vnd.sun.xml.impress":["sxi"],"application/vnd.sun.xml.impress.template":["sti"],"application/vnd.sun.xml.math":["sxm"],"application/vnd.sun.xml.writer":["sxw"],"application/vnd.sun.xml.writer.global":["sxg"],"application/vnd.sun.xml.writer.template":["stw"],"application/vnd.sus-calendar":["sus","susp"],"application/vnd.svd":["svd"],"application/vnd.symbian.install":["sis","sisx"],"application/vnd.syncml+xml":["xsm"],"application/vnd.syncml.dm+wbxml":["bdm"],"application/vnd.syncml.dm+xml":["xdm"],"application/vnd.syncml.dmddf+xml":["ddf"],"application/vnd.tao.intent-module-archive":["tao"],"application/vnd.tcpdump.pcap":["pcap","cap","dmp"],"application/vnd.tmobile-livetv":["tmo"],"application/vnd.trid.tpt":["tpt"],"application/vnd.triscape.mxs":["mxs"],"application/vnd.trueapp":["tra"],"application/vnd.ufdl":["ufd","ufdl"],"application/vnd.uiq.theme":["utz"],"application/vnd.umajin":["umj"],"application/vnd.unity":["unityweb"],"application/vnd.uoml+xml":["uoml"],"application/vnd.vcx":["vcx"],"application/vnd.visio":["vsd","vst","vss","vsw"],"application/vnd.visionary":["vis"],"application/vnd.vsf":["vsf"],"application/vnd.wap.wbxml":["wbxml"],"application/vnd.wap.wmlc":["wmlc"],"application/vnd.wap.wmlscriptc":["wmlsc"],"application/vnd.webturbo":["wtb"],"application/vnd.wolfram.player":["nbp"],"application/vnd.wordperfect":["wpd"],"application/vnd.wqd":["wqd"],"application/vnd.wt.stf":["stf"],"application/vnd.xara":["xar"],"application/vnd.xfdl":["xfdl"],"application/vnd.yamaha.hv-dic":["hvd"],"application/vnd.yamaha.hv-script":["hvs"],"application/vnd.yamaha.hv-voice":["hvp"],"application/vnd.yamaha.openscoreformat":["osf"],"application/vnd.yamaha.openscoreformat.osfpvg+xml":["osfpvg"],"application/vnd.yamaha.smaf-audio":["saf"],"application/vnd.yamaha.smaf-phrase":["spf"],"application/vnd.yellowriver-custom-menu":["cmp"],"application/vnd.zul":["zir","zirz"],"application/vnd.zzazz.deck+xml":["zaz"],"application/x-7z-compressed":["7z"],"application/x-abiword":["abw"],"application/x-ace-compressed":["ace"],"application/x-apple-diskimage":["*dmg"],"application/x-arj":["arj"],"application/x-authorware-bin":["aab","x32","u32","vox"],"application/x-authorware-map":["aam"],"application/x-authorware-seg":["aas"],"application/x-bcpio":["bcpio"],"application/x-bdoc":["*bdoc"],"application/x-bittorrent":["torrent"],"application/x-blorb":["blb","blorb"],"application/x-bzip":["bz"],"application/x-bzip2":["bz2","boz"],"application/x-cbr":["cbr","cba","cbt","cbz","cb7"],"application/x-cdlink":["vcd"],"application/x-cfs-compressed":["cfs"],"application/x-chat":["chat"],"application/x-chess-pgn":["pgn"],"application/x-chrome-extension":["crx"],"application/x-cocoa":["cco"],"application/x-conference":["nsc"],"application/x-cpio":["cpio"],"application/x-csh":["csh"],"application/x-debian-package":["*deb","udeb"],"application/x-dgc-compressed":["dgc"],"application/x-director":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"],"application/x-doom":["wad"],"application/x-dtbncx+xml":["ncx"],"application/x-dtbook+xml":["dtb"],"application/x-dtbresource+xml":["res"],"application/x-dvi":["dvi"],"application/x-envoy":["evy"],"application/x-eva":["eva"],"application/x-font-bdf":["bdf"],"application/x-font-ghostscript":["gsf"],"application/x-font-linux-psf":["psf"],"application/x-font-pcf":["pcf"],"application/x-font-snf":["snf"],"application/x-font-type1":["pfa","pfb","pfm","afm"],"application/x-freearc":["arc"],"application/x-futuresplash":["spl"],"application/x-gca-compressed":["gca"],"application/x-glulx":["ulx"],"application/x-gnumeric":["gnumeric"],"application/x-gramps-xml":["gramps"],"application/x-gtar":["gtar"],"application/x-hdf":["hdf"],"application/x-httpd-php":["php"],"application/x-install-instructions":["install"],"application/x-iso9660-image":["*iso"],"application/x-iwork-keynote-sffkey":["*key"],"application/x-iwork-numbers-sffnumbers":["*numbers"],"application/x-iwork-pages-sffpages":["*pages"],"application/x-java-archive-diff":["jardiff"],"application/x-java-jnlp-file":["jnlp"],"application/x-keepass2":["kdbx"],"application/x-latex":["latex"],"application/x-lua-bytecode":["luac"],"application/x-lzh-compressed":["lzh","lha"],"application/x-makeself":["run"],"application/x-mie":["mie"],"application/x-mobipocket-ebook":["prc","mobi"],"application/x-ms-application":["application"],"application/x-ms-shortcut":["lnk"],"application/x-ms-wmd":["wmd"],"application/x-ms-wmz":["wmz"],"application/x-ms-xbap":["xbap"],"application/x-msaccess":["mdb"],"application/x-msbinder":["obd"],"application/x-mscardfile":["crd"],"application/x-msclip":["clp"],"application/x-msdos-program":["*exe"],"application/x-msdownload":["*exe","*dll","com","bat","*msi"],"application/x-msmediaview":["mvb","m13","m14"],"application/x-msmetafile":["*wmf","*wmz","*emf","emz"],"application/x-msmoney":["mny"],"application/x-mspublisher":["pub"],"application/x-msschedule":["scd"],"application/x-msterminal":["trm"],"application/x-mswrite":["wri"],"application/x-netcdf":["nc","cdf"],"application/x-ns-proxy-autoconfig":["pac"],"application/x-nzb":["nzb"],"application/x-perl":["pl","pm"],"application/x-pilot":["*prc","*pdb"],"application/x-pkcs12":["p12","pfx"],"application/x-pkcs7-certificates":["p7b","spc"],"application/x-pkcs7-certreqresp":["p7r"],"application/x-rar-compressed":["*rar"],"application/x-redhat-package-manager":["rpm"],"application/x-research-info-systems":["ris"],"application/x-sea":["sea"],"application/x-sh":["sh"],"application/x-shar":["shar"],"application/x-shockwave-flash":["swf"],"application/x-silverlight-app":["xap"],"application/x-sql":["sql"],"application/x-stuffit":["sit"],"application/x-stuffitx":["sitx"],"application/x-subrip":["srt"],"application/x-sv4cpio":["sv4cpio"],"application/x-sv4crc":["sv4crc"],"application/x-t3vm-image":["t3"],"application/x-tads":["gam"],"application/x-tar":["tar"],"application/x-tcl":["tcl","tk"],"application/x-tex":["tex"],"application/x-tex-tfm":["tfm"],"application/x-texinfo":["texinfo","texi"],"application/x-tgif":["*obj"],"application/x-ustar":["ustar"],"application/x-virtualbox-hdd":["hdd"],"application/x-virtualbox-ova":["ova"],"application/x-virtualbox-ovf":["ovf"],"application/x-virtualbox-vbox":["vbox"],"application/x-virtualbox-vbox-extpack":["vbox-extpack"],"application/x-virtualbox-vdi":["vdi"],"application/x-virtualbox-vhd":["vhd"],"application/x-virtualbox-vmdk":["vmdk"],"application/x-wais-source":["src"],"application/x-web-app-manifest+json":["webapp"],"application/x-x509-ca-cert":["der","crt","pem"],"application/x-xfig":["fig"],"application/x-xliff+xml":["*xlf"],"application/x-xpinstall":["xpi"],"application/x-xz":["xz"],"application/x-zmachine":["z1","z2","z3","z4","z5","z6","z7","z8"],"audio/vnd.dece.audio":["uva","uvva"],"audio/vnd.digital-winds":["eol"],"audio/vnd.dra":["dra"],"audio/vnd.dts":["dts"],"audio/vnd.dts.hd":["dtshd"],"audio/vnd.lucent.voice":["lvp"],"audio/vnd.ms-playready.media.pya":["pya"],"audio/vnd.nuera.ecelp4800":["ecelp4800"],"audio/vnd.nuera.ecelp7470":["ecelp7470"],"audio/vnd.nuera.ecelp9600":["ecelp9600"],"audio/vnd.rip":["rip"],"audio/x-aac":["aac"],"audio/x-aiff":["aif","aiff","aifc"],"audio/x-caf":["caf"],"audio/x-flac":["flac"],"audio/x-m4a":["*m4a"],"audio/x-matroska":["mka"],"audio/x-mpegurl":["m3u"],"audio/x-ms-wax":["wax"],"audio/x-ms-wma":["wma"],"audio/x-pn-realaudio":["ram","ra"],"audio/x-pn-realaudio-plugin":["rmp"],"audio/x-realaudio":["*ra"],"audio/x-wav":["*wav"],"chemical/x-cdx":["cdx"],"chemical/x-cif":["cif"],"chemical/x-cmdf":["cmdf"],"chemical/x-cml":["cml"],"chemical/x-csml":["csml"],"chemical/x-xyz":["xyz"],"image/prs.btif":["btif"],"image/prs.pti":["pti"],"image/vnd.adobe.photoshop":["psd"],"image/vnd.airzip.accelerator.azv":["azv"],"image/vnd.dece.graphic":["uvi","uvvi","uvg","uvvg"],"image/vnd.djvu":["djvu","djv"],"image/vnd.dvb.subtitle":["*sub"],"image/vnd.dwg":["dwg"],"image/vnd.dxf":["dxf"],"image/vnd.fastbidsheet":["fbs"],"image/vnd.fpx":["fpx"],"image/vnd.fst":["fst"],"image/vnd.fujixerox.edmics-mmr":["mmr"],"image/vnd.fujixerox.edmics-rlc":["rlc"],"image/vnd.microsoft.icon":["ico"],"image/vnd.ms-dds":["dds"],"image/vnd.ms-modi":["mdi"],"image/vnd.ms-photo":["wdp"],"image/vnd.net-fpx":["npx"],"image/vnd.pco.b16":["b16"],"image/vnd.tencent.tap":["tap"],"image/vnd.valve.source.texture":["vtf"],"image/vnd.wap.wbmp":["wbmp"],"image/vnd.xiff":["xif"],"image/vnd.zbrush.pcx":["pcx"],"image/x-3ds":["3ds"],"image/x-cmu-raster":["ras"],"image/x-cmx":["cmx"],"image/x-freehand":["fh","fhc","fh4","fh5","fh7"],"image/x-icon":["*ico"],"image/x-jng":["jng"],"image/x-mrsid-image":["sid"],"image/x-ms-bmp":["*bmp"],"image/x-pcx":["*pcx"],"image/x-pict":["pic","pct"],"image/x-portable-anymap":["pnm"],"image/x-portable-bitmap":["pbm"],"image/x-portable-graymap":["pgm"],"image/x-portable-pixmap":["ppm"],"image/x-rgb":["rgb"],"image/x-tga":["tga"],"image/x-xbitmap":["xbm"],"image/x-xpixmap":["xpm"],"image/x-xwindowdump":["xwd"],"message/vnd.wfa.wsc":["wsc"],"model/vnd.collada+xml":["dae"],"model/vnd.dwf":["dwf"],"model/vnd.gdl":["gdl"],"model/vnd.gtw":["gtw"],"model/vnd.mts":["mts"],"model/vnd.opengex":["ogex"],"model/vnd.parasolid.transmit.binary":["x_b"],"model/vnd.parasolid.transmit.text":["x_t"],"model/vnd.sap.vds":["vds"],"model/vnd.usdz+zip":["usdz"],"model/vnd.valve.source.compiled-map":["bsp"],"model/vnd.vtu":["vtu"],"text/prs.lines.tag":["dsc"],"text/vnd.curl":["curl"],"text/vnd.curl.dcurl":["dcurl"],"text/vnd.curl.mcurl":["mcurl"],"text/vnd.curl.scurl":["scurl"],"text/vnd.dvb.subtitle":["sub"],"text/vnd.fly":["fly"],"text/vnd.fmi.flexstor":["flx"],"text/vnd.graphviz":["gv"],"text/vnd.in3d.3dml":["3dml"],"text/vnd.in3d.spot":["spot"],"text/vnd.sun.j2me.app-descriptor":["jad"],"text/vnd.wap.wml":["wml"],"text/vnd.wap.wmlscript":["wmls"],"text/x-asm":["s","asm"],"text/x-c":["c","cc","cxx","cpp","h","hh","dic"],"text/x-component":["htc"],"text/x-fortran":["f","for","f77","f90"],"text/x-handlebars-template":["hbs"],"text/x-java-source":["java"],"text/x-lua":["lua"],"text/x-markdown":["mkd"],"text/x-nfo":["nfo"],"text/x-opml":["opml"],"text/x-org":["*org"],"text/x-pascal":["p","pas"],"text/x-processing":["pde"],"text/x-sass":["sass"],"text/x-scss":["scss"],"text/x-setext":["etx"],"text/x-sfv":["sfv"],"text/x-suse-ymp":["ymp"],"text/x-uuencode":["uu"],"text/x-vcalendar":["vcs"],"text/x-vcard":["vcf"],"video/vnd.dece.hd":["uvh","uvvh"],"video/vnd.dece.mobile":["uvm","uvvm"],"video/vnd.dece.pd":["uvp","uvvp"],"video/vnd.dece.sd":["uvs","uvvs"],"video/vnd.dece.video":["uvv","uvvv"],"video/vnd.dvb.file":["dvb"],"video/vnd.fvt":["fvt"],"video/vnd.mpegurl":["mxu","m4u"],"video/vnd.ms-playready.media.pyv":["pyv"],"video/vnd.uvvu.mp4":["uvu","uvvu"],"video/vnd.vivo":["viv"],"video/x-f4v":["f4v"],"video/x-fli":["fli"],"video/x-flv":["flv"],"video/x-m4v":["m4v"],"video/x-matroska":["mkv","mk3d","mks"],"video/x-mng":["mng"],"video/x-ms-asf":["asf","asx"],"video/x-ms-vob":["vob"],"video/x-ms-wm":["wm"],"video/x-ms-wmv":["wmv"],"video/x-ms-wmx":["wmx"],"video/x-ms-wvx":["wvx"],"video/x-msvideo":["avi"],"video/x-sgi-movie":["movie"],"video/x-smv":["smv"],"x-conference/x-cooltalk":["ice"]}; /***/ }), /***/ 83725: /***/ ((module) => { module.exports = {"application/andrew-inset":["ez"],"application/applixware":["aw"],"application/atom+xml":["atom"],"application/atomcat+xml":["atomcat"],"application/atomdeleted+xml":["atomdeleted"],"application/atomsvc+xml":["atomsvc"],"application/atsc-dwd+xml":["dwd"],"application/atsc-held+xml":["held"],"application/atsc-rsat+xml":["rsat"],"application/bdoc":["bdoc"],"application/calendar+xml":["xcs"],"application/ccxml+xml":["ccxml"],"application/cdfx+xml":["cdfx"],"application/cdmi-capability":["cdmia"],"application/cdmi-container":["cdmic"],"application/cdmi-domain":["cdmid"],"application/cdmi-object":["cdmio"],"application/cdmi-queue":["cdmiq"],"application/cu-seeme":["cu"],"application/dash+xml":["mpd"],"application/davmount+xml":["davmount"],"application/docbook+xml":["dbk"],"application/dssc+der":["dssc"],"application/dssc+xml":["xdssc"],"application/ecmascript":["es","ecma"],"application/emma+xml":["emma"],"application/emotionml+xml":["emotionml"],"application/epub+zip":["epub"],"application/exi":["exi"],"application/express":["exp"],"application/fdt+xml":["fdt"],"application/font-tdpfr":["pfr"],"application/geo+json":["geojson"],"application/gml+xml":["gml"],"application/gpx+xml":["gpx"],"application/gxf":["gxf"],"application/gzip":["gz"],"application/hjson":["hjson"],"application/hyperstudio":["stk"],"application/inkml+xml":["ink","inkml"],"application/ipfix":["ipfix"],"application/its+xml":["its"],"application/java-archive":["jar","war","ear"],"application/java-serialized-object":["ser"],"application/java-vm":["class"],"application/javascript":["js","mjs"],"application/json":["json","map"],"application/json5":["json5"],"application/jsonml+json":["jsonml"],"application/ld+json":["jsonld"],"application/lgr+xml":["lgr"],"application/lost+xml":["lostxml"],"application/mac-binhex40":["hqx"],"application/mac-compactpro":["cpt"],"application/mads+xml":["mads"],"application/manifest+json":["webmanifest"],"application/marc":["mrc"],"application/marcxml+xml":["mrcx"],"application/mathematica":["ma","nb","mb"],"application/mathml+xml":["mathml"],"application/mbox":["mbox"],"application/mediaservercontrol+xml":["mscml"],"application/metalink+xml":["metalink"],"application/metalink4+xml":["meta4"],"application/mets+xml":["mets"],"application/mmt-aei+xml":["maei"],"application/mmt-usd+xml":["musd"],"application/mods+xml":["mods"],"application/mp21":["m21","mp21"],"application/mp4":["mp4s","m4p"],"application/msword":["doc","dot"],"application/mxf":["mxf"],"application/n-quads":["nq"],"application/n-triples":["nt"],"application/node":["cjs"],"application/octet-stream":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"],"application/oda":["oda"],"application/oebps-package+xml":["opf"],"application/ogg":["ogx"],"application/omdoc+xml":["omdoc"],"application/onenote":["onetoc","onetoc2","onetmp","onepkg"],"application/oxps":["oxps"],"application/p2p-overlay+xml":["relo"],"application/patch-ops-error+xml":["xer"],"application/pdf":["pdf"],"application/pgp-encrypted":["pgp"],"application/pgp-signature":["asc","sig"],"application/pics-rules":["prf"],"application/pkcs10":["p10"],"application/pkcs7-mime":["p7m","p7c"],"application/pkcs7-signature":["p7s"],"application/pkcs8":["p8"],"application/pkix-attr-cert":["ac"],"application/pkix-cert":["cer"],"application/pkix-crl":["crl"],"application/pkix-pkipath":["pkipath"],"application/pkixcmp":["pki"],"application/pls+xml":["pls"],"application/postscript":["ai","eps","ps"],"application/provenance+xml":["provx"],"application/pskc+xml":["pskcxml"],"application/raml+yaml":["raml"],"application/rdf+xml":["rdf","owl"],"application/reginfo+xml":["rif"],"application/relax-ng-compact-syntax":["rnc"],"application/resource-lists+xml":["rl"],"application/resource-lists-diff+xml":["rld"],"application/rls-services+xml":["rs"],"application/route-apd+xml":["rapd"],"application/route-s-tsid+xml":["sls"],"application/route-usd+xml":["rusd"],"application/rpki-ghostbusters":["gbr"],"application/rpki-manifest":["mft"],"application/rpki-roa":["roa"],"application/rsd+xml":["rsd"],"application/rss+xml":["rss"],"application/rtf":["rtf"],"application/sbml+xml":["sbml"],"application/scvp-cv-request":["scq"],"application/scvp-cv-response":["scs"],"application/scvp-vp-request":["spq"],"application/scvp-vp-response":["spp"],"application/sdp":["sdp"],"application/senml+xml":["senmlx"],"application/sensml+xml":["sensmlx"],"application/set-payment-initiation":["setpay"],"application/set-registration-initiation":["setreg"],"application/shf+xml":["shf"],"application/sieve":["siv","sieve"],"application/smil+xml":["smi","smil"],"application/sparql-query":["rq"],"application/sparql-results+xml":["srx"],"application/srgs":["gram"],"application/srgs+xml":["grxml"],"application/sru+xml":["sru"],"application/ssdl+xml":["ssdl"],"application/ssml+xml":["ssml"],"application/swid+xml":["swidtag"],"application/tei+xml":["tei","teicorpus"],"application/thraud+xml":["tfi"],"application/timestamped-data":["tsd"],"application/toml":["toml"],"application/trig":["trig"],"application/ttml+xml":["ttml"],"application/ubjson":["ubj"],"application/urc-ressheet+xml":["rsheet"],"application/urc-targetdesc+xml":["td"],"application/voicexml+xml":["vxml"],"application/wasm":["wasm"],"application/widget":["wgt"],"application/winhlp":["hlp"],"application/wsdl+xml":["wsdl"],"application/wspolicy+xml":["wspolicy"],"application/xaml+xml":["xaml"],"application/xcap-att+xml":["xav"],"application/xcap-caps+xml":["xca"],"application/xcap-diff+xml":["xdf"],"application/xcap-el+xml":["xel"],"application/xcap-ns+xml":["xns"],"application/xenc+xml":["xenc"],"application/xhtml+xml":["xhtml","xht"],"application/xliff+xml":["xlf"],"application/xml":["xml","xsl","xsd","rng"],"application/xml-dtd":["dtd"],"application/xop+xml":["xop"],"application/xproc+xml":["xpl"],"application/xslt+xml":["*xsl","xslt"],"application/xspf+xml":["xspf"],"application/xv+xml":["mxml","xhvml","xvml","xvm"],"application/yang":["yang"],"application/yin+xml":["yin"],"application/zip":["zip"],"audio/3gpp":["*3gpp"],"audio/adpcm":["adp"],"audio/amr":["amr"],"audio/basic":["au","snd"],"audio/midi":["mid","midi","kar","rmi"],"audio/mobile-xmf":["mxmf"],"audio/mp3":["*mp3"],"audio/mp4":["m4a","mp4a"],"audio/mpeg":["mpga","mp2","mp2a","mp3","m2a","m3a"],"audio/ogg":["oga","ogg","spx","opus"],"audio/s3m":["s3m"],"audio/silk":["sil"],"audio/wav":["wav"],"audio/wave":["*wav"],"audio/webm":["weba"],"audio/xm":["xm"],"font/collection":["ttc"],"font/otf":["otf"],"font/ttf":["ttf"],"font/woff":["woff"],"font/woff2":["woff2"],"image/aces":["exr"],"image/apng":["apng"],"image/avif":["avif"],"image/bmp":["bmp"],"image/cgm":["cgm"],"image/dicom-rle":["drle"],"image/emf":["emf"],"image/fits":["fits"],"image/g3fax":["g3"],"image/gif":["gif"],"image/heic":["heic"],"image/heic-sequence":["heics"],"image/heif":["heif"],"image/heif-sequence":["heifs"],"image/hej2k":["hej2"],"image/hsj2":["hsj2"],"image/ief":["ief"],"image/jls":["jls"],"image/jp2":["jp2","jpg2"],"image/jpeg":["jpeg","jpg","jpe"],"image/jph":["jph"],"image/jphc":["jhc"],"image/jpm":["jpm"],"image/jpx":["jpx","jpf"],"image/jxr":["jxr"],"image/jxra":["jxra"],"image/jxrs":["jxrs"],"image/jxs":["jxs"],"image/jxsc":["jxsc"],"image/jxsi":["jxsi"],"image/jxss":["jxss"],"image/ktx":["ktx"],"image/ktx2":["ktx2"],"image/png":["png"],"image/sgi":["sgi"],"image/svg+xml":["svg","svgz"],"image/t38":["t38"],"image/tiff":["tif","tiff"],"image/tiff-fx":["tfx"],"image/webp":["webp"],"image/wmf":["wmf"],"message/disposition-notification":["disposition-notification"],"message/global":["u8msg"],"message/global-delivery-status":["u8dsn"],"message/global-disposition-notification":["u8mdn"],"message/global-headers":["u8hdr"],"message/rfc822":["eml","mime"],"model/3mf":["3mf"],"model/gltf+json":["gltf"],"model/gltf-binary":["glb"],"model/iges":["igs","iges"],"model/mesh":["msh","mesh","silo"],"model/mtl":["mtl"],"model/obj":["obj"],"model/step+xml":["stpx"],"model/step+zip":["stpz"],"model/step-xml+zip":["stpxz"],"model/stl":["stl"],"model/vrml":["wrl","vrml"],"model/x3d+binary":["*x3db","x3dbz"],"model/x3d+fastinfoset":["x3db"],"model/x3d+vrml":["*x3dv","x3dvz"],"model/x3d+xml":["x3d","x3dz"],"model/x3d-vrml":["x3dv"],"text/cache-manifest":["appcache","manifest"],"text/calendar":["ics","ifb"],"text/coffeescript":["coffee","litcoffee"],"text/css":["css"],"text/csv":["csv"],"text/html":["html","htm","shtml"],"text/jade":["jade"],"text/jsx":["jsx"],"text/less":["less"],"text/markdown":["markdown","md"],"text/mathml":["mml"],"text/mdx":["mdx"],"text/n3":["n3"],"text/plain":["txt","text","conf","def","list","log","in","ini"],"text/richtext":["rtx"],"text/rtf":["*rtf"],"text/sgml":["sgml","sgm"],"text/shex":["shex"],"text/slim":["slim","slm"],"text/spdx":["spdx"],"text/stylus":["stylus","styl"],"text/tab-separated-values":["tsv"],"text/troff":["t","tr","roff","man","me","ms"],"text/turtle":["ttl"],"text/uri-list":["uri","uris","urls"],"text/vcard":["vcard"],"text/vtt":["vtt"],"text/xml":["*xml"],"text/yaml":["yaml","yml"],"video/3gpp":["3gp","3gpp"],"video/3gpp2":["3g2"],"video/h261":["h261"],"video/h263":["h263"],"video/h264":["h264"],"video/iso.segment":["m4s"],"video/jpeg":["jpgv"],"video/jpm":["*jpm","jpgm"],"video/mj2":["mj2","mjp2"],"video/mp2t":["ts"],"video/mp4":["mp4","mp4v","mpg4"],"video/mpeg":["mpeg","mpg","mpe","m1v","m2v"],"video/ogg":["ogv"],"video/quicktime":["qt","mov"],"video/webm":["webm"]}; /***/ }), /***/ 43772: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = minimatch minimatch.Minimatch = Minimatch var path = (function () { try { return __nccwpck_require__(16928) } catch (e) {}}()) || { sep: '/' } minimatch.sep = path.sep var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} var expand = __nccwpck_require__(94691) var plTypes = { '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, '?': { open: '(?:', close: ')?' }, '+': { open: '(?:', close: ')+' }, '*': { open: '(?:', close: ')*' }, '@': { open: '(?:', close: ')' } } // any single thing other than / // don't need to escape / when using new RegExp() var qmark = '[^/]' // * => any number of characters var star = qmark + '*?' // ** when dots are allowed. Anything goes, except .. and . // not (^ or / followed by one or two dots followed by $ or /), // followed by anything, any number of times. var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' // not a ^ or / followed by a dot, // followed by anything, any number of times. var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' // characters that need to be escaped in RegExp. var reSpecials = charSet('().*{}+?[]^$\\!') // "abc" -> { a:true, b:true, c:true } function charSet (s) { return s.split('').reduce(function (set, c) { set[c] = true return set }, {}) } // normalizes slashes. var slashSplit = /\/+/ minimatch.filter = filter function filter (pattern, options) { options = options || {} return function (p, i, list) { return minimatch(p, pattern, options) } } function ext (a, b) { b = b || {} var t = {} Object.keys(a).forEach(function (k) { t[k] = a[k] }) Object.keys(b).forEach(function (k) { t[k] = b[k] }) return t } minimatch.defaults = function (def) { if (!def || typeof def !== 'object' || !Object.keys(def).length) { return minimatch } var orig = minimatch var m = function minimatch (p, pattern, options) { return orig(p, pattern, ext(def, options)) } m.Minimatch = function Minimatch (pattern, options) { return new orig.Minimatch(pattern, ext(def, options)) } m.Minimatch.defaults = function defaults (options) { return orig.defaults(ext(def, options)).Minimatch } m.filter = function filter (pattern, options) { return orig.filter(pattern, ext(def, options)) } m.defaults = function defaults (options) { return orig.defaults(ext(def, options)) } m.makeRe = function makeRe (pattern, options) { return orig.makeRe(pattern, ext(def, options)) } m.braceExpand = function braceExpand (pattern, options) { return orig.braceExpand(pattern, ext(def, options)) } m.match = function (list, pattern, options) { return orig.match(list, pattern, ext(def, options)) } return m } Minimatch.defaults = function (def) { return minimatch.defaults(def).Minimatch } function minimatch (p, pattern, options) { assertValidPattern(pattern) if (!options) options = {} // shortcut: comments match nothing. if (!options.nocomment && pattern.charAt(0) === '#') { return false } return new Minimatch(pattern, options).match(p) } function Minimatch (pattern, options) { if (!(this instanceof Minimatch)) { return new Minimatch(pattern, options) } assertValidPattern(pattern) if (!options) options = {} pattern = pattern.trim() // windows support: need to use /, not \ if (!options.allowWindowsEscape && path.sep !== '/') { pattern = pattern.split(path.sep).join('/') } this.options = options this.set = [] this.pattern = pattern this.regexp = null this.negate = false this.comment = false this.empty = false this.partial = !!options.partial // make the set of regexps etc. this.make() } Minimatch.prototype.debug = function () {} Minimatch.prototype.make = make function make () { var pattern = this.pattern var options = this.options // empty patterns and comments match nothing. if (!options.nocomment && pattern.charAt(0) === '#') { this.comment = true return } if (!pattern) { this.empty = true return } // step 1: figure out negation, etc. this.parseNegate() // step 2: expand braces var set = this.globSet = this.braceExpand() if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } this.debug(this.pattern, set) // step 3: now we have a set, so turn each one into a series of path-portion // matching patterns. // These will be regexps, except in the case of "**", which is // set to the GLOBSTAR object for globstar behavior, // and will not contain any / characters set = this.globParts = set.map(function (s) { return s.split(slashSplit) }) this.debug(this.pattern, set) // glob --> regexps set = set.map(function (s, si, set) { return s.map(this.parse, this) }, this) this.debug(this.pattern, set) // filter out everything that didn't compile properly. set = set.filter(function (s) { return s.indexOf(false) === -1 }) this.debug(this.pattern, set) this.set = set } Minimatch.prototype.parseNegate = parseNegate function parseNegate () { var pattern = this.pattern var negate = false var options = this.options var negateOffset = 0 if (options.nonegate) return for (var i = 0, l = pattern.length ; i < l && pattern.charAt(i) === '!' ; i++) { negate = !negate negateOffset++ } if (negateOffset) this.pattern = pattern.substr(negateOffset) this.negate = negate } // Brace expansion: // a{b,c}d -> abd acd // a{b,}c -> abc ac // a{0..3}d -> a0d a1d a2d a3d // a{b,c{d,e}f}g -> abg acdfg acefg // a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg // // Invalid sets are not expanded. // a{2..}b -> a{2..}b // a{b}c -> a{b}c minimatch.braceExpand = function (pattern, options) { return braceExpand(pattern, options) } Minimatch.prototype.braceExpand = braceExpand function braceExpand (pattern, options) { if (!options) { if (this instanceof Minimatch) { options = this.options } else { options = {} } } pattern = typeof pattern === 'undefined' ? this.pattern : pattern assertValidPattern(pattern) // Thanks to Yeting Li for // improving this regexp to avoid a ReDOS vulnerability. if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { // shortcut. no need to expand. return [pattern] } return expand(pattern) } var MAX_PATTERN_LENGTH = 1024 * 64 var assertValidPattern = function (pattern) { if (typeof pattern !== 'string') { throw new TypeError('invalid pattern') } if (pattern.length > MAX_PATTERN_LENGTH) { throw new TypeError('pattern is too long') } } // parse a component of the expanded set. // At this point, no pattern may contain "/" in it // so we're going to return a 2d array, where each entry is the full // pattern, split on '/', and then turned into a regular expression. // A regexp is made at the end which joins each array with an // escaped /, and another full one which joins each regexp with |. // // Following the lead of Bash 4.1, note that "**" only has special meaning // when it is the *only* thing in a path portion. Otherwise, any series // of * is equivalent to a single *. Globstar behavior is enabled by // default, and can be disabled by setting options.noglobstar. Minimatch.prototype.parse = parse var SUBPARSE = {} function parse (pattern, isSub) { assertValidPattern(pattern) var options = this.options // shortcuts if (pattern === '**') { if (!options.noglobstar) return GLOBSTAR else pattern = '*' } if (pattern === '') return '' var re = '' var hasMagic = !!options.nocase var escaping = false // ? => one single character var patternListStack = [] var negativeLists = [] var stateChar var inClass = false var reClassStart = -1 var classStart = -1 // . and .. never match anything that doesn't start with ., // even when options.dot is set. var patternStart = pattern.charAt(0) === '.' ? '' // anything // not (start or / followed by . or .. followed by / or end) : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' : '(?!\\.)' var self = this function clearStateChar () { if (stateChar) { // we had some state-tracking character // that wasn't consumed by this pass. switch (stateChar) { case '*': re += star hasMagic = true break case '?': re += qmark hasMagic = true break default: re += '\\' + stateChar break } self.debug('clearStateChar %j %j', stateChar, re) stateChar = false } } for (var i = 0, len = pattern.length, c ; (i < len) && (c = pattern.charAt(i)) ; i++) { this.debug('%s\t%s %s %j', pattern, i, re, c) // skip over any that are escaped. if (escaping && reSpecials[c]) { re += '\\' + c escaping = false continue } switch (c) { /* istanbul ignore next */ case '/': { // completely not allowed, even escaped. // Should already be path-split by now. return false } case '\\': clearStateChar() escaping = true continue // the various stateChar values // for the "extglob" stuff. case '?': case '*': case '+': case '@': case '!': this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) // all of those are literals inside a class, except that // the glob [!a] means [^a] in regexp if (inClass) { this.debug(' in class') if (c === '!' && i === classStart + 1) c = '^' re += c continue } // if we already have a stateChar, then it means // that there was something like ** or +? in there. // Handle the stateChar, then proceed with this one. self.debug('call clearStateChar %j', stateChar) clearStateChar() stateChar = c // if extglob is disabled, then +(asdf|foo) isn't a thing. // just clear the statechar *now*, rather than even diving into // the patternList stuff. if (options.noext) clearStateChar() continue case '(': if (inClass) { re += '(' continue } if (!stateChar) { re += '\\(' continue } patternListStack.push({ type: stateChar, start: i - 1, reStart: re.length, open: plTypes[stateChar].open, close: plTypes[stateChar].close }) // negation is (?:(?!js)[^/]*) re += stateChar === '!' ? '(?:(?!(?:' : '(?:' this.debug('plType %j %j', stateChar, re) stateChar = false continue case ')': if (inClass || !patternListStack.length) { re += '\\)' continue } clearStateChar() hasMagic = true var pl = patternListStack.pop() // negation is (?:(?!js)[^/]*) // The others are (?:) re += pl.close if (pl.type === '!') { negativeLists.push(pl) } pl.reEnd = re.length continue case '|': if (inClass || !patternListStack.length || escaping) { re += '\\|' escaping = false continue } clearStateChar() re += '|' continue // these are mostly the same in regexp and glob case '[': // swallow any state-tracking char before the [ clearStateChar() if (inClass) { re += '\\' + c continue } inClass = true classStart = i reClassStart = re.length re += c continue case ']': // a right bracket shall lose its special // meaning and represent itself in // a bracket expression if it occurs // first in the list. -- POSIX.2 2.8.3.2 if (i === classStart + 1 || !inClass) { re += '\\' + c escaping = false continue } // handle the case where we left a class open. // "[z-a]" is valid, equivalent to "\[z-a\]" // split where the last [ was, make sure we don't have // an invalid re. if so, re-walk the contents of the // would-be class to re-translate any characters that // were passed through as-is // TODO: It would probably be faster to determine this // without a try/catch and a new RegExp, but it's tricky // to do safely. For now, this is safe and works. var cs = pattern.substring(classStart + 1, i) try { RegExp('[' + cs + ']') } catch (er) { // not a valid class! var sp = this.parse(cs, SUBPARSE) re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' hasMagic = hasMagic || sp[1] inClass = false continue } // finish up the class. hasMagic = true inClass = false re += c continue default: // swallow any state char that wasn't consumed clearStateChar() if (escaping) { // no need escaping = false } else if (reSpecials[c] && !(c === '^' && inClass)) { re += '\\' } re += c } // switch } // for // handle the case where we left a class open. // "[abc" is valid, equivalent to "\[abc" if (inClass) { // split where the last [ was, and escape it // this is a huge pita. We now have to re-walk // the contents of the would-be class to re-translate // any characters that were passed through as-is cs = pattern.substr(classStart + 1) sp = this.parse(cs, SUBPARSE) re = re.substr(0, reClassStart) + '\\[' + sp[0] hasMagic = hasMagic || sp[1] } // handle the case where we had a +( thing at the *end* // of the pattern. // each pattern list stack adds 3 chars, and we need to go through // and escape any | chars that were passed through as-is for the regexp. // Go through and escape them, taking care not to double-escape any // | chars that were already escaped. for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { var tail = re.slice(pl.reStart + pl.open.length) this.debug('setting tail', re, pl) // maybe some even number of \, then maybe 1 \, followed by a | tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { if (!$2) { // the | isn't already escaped, so escape it. $2 = '\\' } // need to escape all those slashes *again*, without escaping the // one that we need for escaping the | character. As it works out, // escaping an even number of slashes can be done by simply repeating // it exactly after itself. That's why this trick works. // // I am sorry that you have to see this. return $1 + $1 + $2 + '|' }) this.debug('tail=%j\n %s', tail, tail, pl, re) var t = pl.type === '*' ? star : pl.type === '?' ? qmark : '\\' + pl.type hasMagic = true re = re.slice(0, pl.reStart) + t + '\\(' + tail } // handle trailing things that only matter at the very end. clearStateChar() if (escaping) { // trailing \\ re += '\\\\' } // only need to apply the nodot start if the re starts with // something that could conceivably capture a dot var addPatternStart = false switch (re.charAt(0)) { case '[': case '.': case '(': addPatternStart = true } // Hack to work around lack of negative lookbehind in JS // A pattern like: *.!(x).!(y|z) needs to ensure that a name // like 'a.xyz.yz' doesn't match. So, the first negative // lookahead, has to look ALL the way ahead, to the end of // the pattern. for (var n = negativeLists.length - 1; n > -1; n--) { var nl = negativeLists[n] var nlBefore = re.slice(0, nl.reStart) var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) var nlAfter = re.slice(nl.reEnd) nlLast += nlAfter // Handle nested stuff like *(*.js|!(*.json)), where open parens // mean that we should *not* include the ) in the bit that is considered // "after" the negated section. var openParensBefore = nlBefore.split('(').length - 1 var cleanAfter = nlAfter for (i = 0; i < openParensBefore; i++) { cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') } nlAfter = cleanAfter var dollar = '' if (nlAfter === '' && isSub !== SUBPARSE) { dollar = '$' } var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast re = newRe } // if the re is not "" at this point, then we need to make sure // it doesn't match against an empty path part. // Otherwise a/* will match a/, which it should not. if (re !== '' && hasMagic) { re = '(?=.)' + re } if (addPatternStart) { re = patternStart + re } // parsing just a piece of a larger pattern. if (isSub === SUBPARSE) { return [re, hasMagic] } // skip the regexp for non-magical patterns // unescape anything in it, though, so that it'll be // an exact match against a file etc. if (!hasMagic) { return globUnescape(pattern) } var flags = options.nocase ? 'i' : '' try { var regExp = new RegExp('^' + re + '$', flags) } catch (er) /* istanbul ignore next - should be impossible */ { // If it was an invalid regular expression, then it can't match // anything. This trick looks for a character after the end of // the string, which is of course impossible, except in multi-line // mode, but it's not a /m regex. return new RegExp('$.') } regExp._glob = pattern regExp._src = re return regExp } minimatch.makeRe = function (pattern, options) { return new Minimatch(pattern, options || {}).makeRe() } Minimatch.prototype.makeRe = makeRe function makeRe () { if (this.regexp || this.regexp === false) return this.regexp // at this point, this.set is a 2d array of partial // pattern strings, or "**". // // It's better to use .match(). This function shouldn't // be used, really, but it's pretty convenient sometimes, // when you just want to work with a regex. var set = this.set if (!set.length) { this.regexp = false return this.regexp } var options = this.options var twoStar = options.noglobstar ? star : options.dot ? twoStarDot : twoStarNoDot var flags = options.nocase ? 'i' : '' var re = set.map(function (pattern) { return pattern.map(function (p) { return (p === GLOBSTAR) ? twoStar : (typeof p === 'string') ? regExpEscape(p) : p._src }).join('\\\/') }).join('|') // must match entire pattern // ending in a * or ** will make it less strict. re = '^(?:' + re + ')$' // can match anything, as long as it's not this. if (this.negate) re = '^(?!' + re + ').*$' try { this.regexp = new RegExp(re, flags) } catch (ex) /* istanbul ignore next - should be impossible */ { this.regexp = false } return this.regexp } minimatch.match = function (list, pattern, options) { options = options || {} var mm = new Minimatch(pattern, options) list = list.filter(function (f) { return mm.match(f) }) if (mm.options.nonull && !list.length) { list.push(pattern) } return list } Minimatch.prototype.match = function match (f, partial) { if (typeof partial === 'undefined') partial = this.partial this.debug('match', f, this.pattern) // short-circuit in the case of busted things. // comments, etc. if (this.comment) return false if (this.empty) return f === '' if (f === '/' && partial) return true var options = this.options // windows: need to use /, not \ if (path.sep !== '/') { f = f.split(path.sep).join('/') } // treat the test path as a set of pathparts. f = f.split(slashSplit) this.debug(this.pattern, 'split', f) // just ONE of the pattern sets in this.set needs to match // in order for it to be valid. If negating, then just one // match means that we have failed. // Either way, return on the first hit. var set = this.set this.debug(this.pattern, 'set', set) // Find the basename of the path by looking for the last non-empty segment var filename var i for (i = f.length - 1; i >= 0; i--) { filename = f[i] if (filename) break } for (i = 0; i < set.length; i++) { var pattern = set[i] var file = f if (options.matchBase && pattern.length === 1) { file = [filename] } var hit = this.matchOne(file, pattern, partial) if (hit) { if (options.flipNegate) return true return !this.negate } } // didn't get any hits. this is success if it's a negative // pattern, failure otherwise. if (options.flipNegate) return false return this.negate } // set partial to true to test if, for example, // "/a/b" matches the start of "/*/b/*/d" // Partial means, if you run out of file before you run // out of pattern, then that's fine, as long as all // the parts match. Minimatch.prototype.matchOne = function (file, pattern, partial) { var options = this.options this.debug('matchOne', { 'this': this, file: file, pattern: pattern }) this.debug('matchOne', file.length, pattern.length) for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length ; (fi < fl) && (pi < pl) ; fi++, pi++) { this.debug('matchOne loop') var p = pattern[pi] var f = file[fi] this.debug(pattern, p, f) // should be impossible. // some invalid regexp stuff in the set. /* istanbul ignore if */ if (p === false) return false if (p === GLOBSTAR) { this.debug('GLOBSTAR', [pattern, p, f]) // "**" // a/**/b/**/c would match the following: // a/b/x/y/z/c // a/x/y/z/b/c // a/b/x/b/x/c // a/b/c // To do this, take the rest of the pattern after // the **, and see if it would match the file remainder. // If so, return success. // If not, the ** "swallows" a segment, and try again. // This is recursively awful. // // a/**/b/**/c matching a/b/x/y/z/c // - a matches a // - doublestar // - matchOne(b/x/y/z/c, b/**/c) // - b matches b // - doublestar // - matchOne(x/y/z/c, c) -> no // - matchOne(y/z/c, c) -> no // - matchOne(z/c, c) -> no // - matchOne(c, c) yes, hit var fr = fi var pr = pi + 1 if (pr === pl) { this.debug('** at the end') // a ** at the end will just swallow the rest. // We have found a match. // however, it will not swallow /.x, unless // options.dot is set. // . and .. are *never* matched by **, for explosively // exponential reasons. for (; fi < fl; fi++) { if (file[fi] === '.' || file[fi] === '..' || (!options.dot && file[fi].charAt(0) === '.')) return false } return true } // ok, let's see if we can swallow whatever we can. while (fr < fl) { var swallowee = file[fr] this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) // XXX remove this slice. Just pass the start index. if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { this.debug('globstar found match!', fr, fl, swallowee) // found a match. return true } else { // can't swallow "." or ".." ever. // can only swallow ".foo" when explicitly asked. if (swallowee === '.' || swallowee === '..' || (!options.dot && swallowee.charAt(0) === '.')) { this.debug('dot detected!', file, fr, pattern, pr) break } // ** swallows a segment, and continue. this.debug('globstar swallow a segment, and continue') fr++ } } // no match was found. // However, in partial mode, we can't say this is necessarily over. // If there's more *pattern* left, then /* istanbul ignore if */ if (partial) { // ran out of file this.debug('\n>>> no match, partial?', file, fr, pattern, pr) if (fr === fl) return true } return false } // something other than ** // non-magic patterns just have to match exactly // patterns with magic have been turned into regexps. var hit if (typeof p === 'string') { hit = f === p this.debug('string match', p, f, hit) } else { hit = f.match(p) this.debug('pattern match', p, f, hit) } if (!hit) return false } // Note: ending in / means that we'll get a final "" // at the end of the pattern. This can only match a // corresponding "" at the end of the file. // If the file ends in /, then it can only match a // a pattern that ends in /, unless the pattern just // doesn't have any more for it. But, a/b/ should *not* // match "a/b/*", even though "" matches against the // [^/]*? pattern, except in partial mode, where it might // simply not be reached yet. // However, a/b/ should still satisfy a/* // now either we fell off the end of the pattern, or we're done. if (fi === fl && pi === pl) { // ran out of pattern and filename at the same time. // an exact hit! return true } else if (fi === fl) { // ran out of file, but still had pattern left. // this is ok if we're doing the match as part of // a glob fs traversal. return partial } else /* istanbul ignore else */ if (pi === pl) { // ran out of pattern, still have file left. // this is only acceptable if we're on the very last // empty segment of a file with a trailing slash. // a/* should match a/b/ return (fi === fl - 1) && (file[fi] === '') } // should be unreachable. /* istanbul ignore next */ throw new Error('wtf?') } // replace stuff like \* with * function globUnescape (s) { return s.replace(/\\(.)/g, '$1') } function regExpEscape (s) { return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') } /***/ }), /***/ 70744: /***/ ((module) => { /** * Helpers. */ var s = 1000; var m = s * 60; var h = m * 60; var d = h * 24; var w = d * 7; var y = d * 365.25; /** * Parse or format the given `val`. * * Options: * * - `long` verbose formatting [false] * * @param {String|Number} val * @param {Object} [options] * @throws {Error} throw an error if val is not a non-empty string or a number * @return {String|Number} * @api public */ module.exports = function (val, options) { options = options || {}; var type = typeof val; if (type === 'string' && val.length > 0) { return parse(val); } else if (type === 'number' && isFinite(val)) { return options.long ? fmtLong(val) : fmtShort(val); } throw new Error( 'val is not a non-empty string or a valid number. val=' + JSON.stringify(val) ); }; /** * Parse the given `str` and return milliseconds. * * @param {String} str * @return {Number} * @api private */ function parse(str) { str = String(str); if (str.length > 100) { return; } var match = /^(-?(?:\d+)?\.?\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec( str ); if (!match) { return; } var n = parseFloat(match[1]); var type = (match[2] || 'ms').toLowerCase(); switch (type) { case 'years': case 'year': case 'yrs': case 'yr': case 'y': return n * y; case 'weeks': case 'week': case 'w': return n * w; case 'days': case 'day': case 'd': return n * d; case 'hours': case 'hour': case 'hrs': case 'hr': case 'h': return n * h; case 'minutes': case 'minute': case 'mins': case 'min': case 'm': return n * m; case 'seconds': case 'second': case 'secs': case 'sec': case 's': return n * s; case 'milliseconds': case 'millisecond': case 'msecs': case 'msec': case 'ms': return n; default: return undefined; } } /** * Short format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtShort(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return Math.round(ms / d) + 'd'; } if (msAbs >= h) { return Math.round(ms / h) + 'h'; } if (msAbs >= m) { return Math.round(ms / m) + 'm'; } if (msAbs >= s) { return Math.round(ms / s) + 's'; } return ms + 'ms'; } /** * Long format for `ms`. * * @param {Number} ms * @return {String} * @api private */ function fmtLong(ms) { var msAbs = Math.abs(ms); if (msAbs >= d) { return plural(ms, msAbs, d, 'day'); } if (msAbs >= h) { return plural(ms, msAbs, h, 'hour'); } if (msAbs >= m) { return plural(ms, msAbs, m, 'minute'); } if (msAbs >= s) { return plural(ms, msAbs, s, 'second'); } return ms + ' ms'; } /** * Pluralization helper. */ function plural(ms, msAbs, n, name) { var isPlural = msAbs >= n * 1.5; return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : ''); } /***/ }), /***/ 26705: /***/ ((module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } var Stream = _interopDefault(__nccwpck_require__(2203)); var http = _interopDefault(__nccwpck_require__(58611)); var Url = _interopDefault(__nccwpck_require__(87016)); var whatwgUrl = _interopDefault(__nccwpck_require__(62686)); var https = _interopDefault(__nccwpck_require__(65692)); var zlib = _interopDefault(__nccwpck_require__(43106)); // Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js // fix for "Readable" isn't a named export issue const Readable = Stream.Readable; const BUFFER = Symbol('buffer'); const TYPE = Symbol('type'); class Blob { constructor() { this[TYPE] = ''; const blobParts = arguments[0]; const options = arguments[1]; const buffers = []; let size = 0; if (blobParts) { const a = blobParts; const length = Number(a.length); for (let i = 0; i < length; i++) { const element = a[i]; let buffer; if (element instanceof Buffer) { buffer = element; } else if (ArrayBuffer.isView(element)) { buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); } else if (element instanceof ArrayBuffer) { buffer = Buffer.from(element); } else if (element instanceof Blob) { buffer = element[BUFFER]; } else { buffer = Buffer.from(typeof element === 'string' ? element : String(element)); } size += buffer.length; buffers.push(buffer); } } this[BUFFER] = Buffer.concat(buffers); let type = options && options.type !== undefined && String(options.type).toLowerCase(); if (type && !/[^\u0020-\u007E]/.test(type)) { this[TYPE] = type; } } get size() { return this[BUFFER].length; } get type() { return this[TYPE]; } text() { return Promise.resolve(this[BUFFER].toString()); } arrayBuffer() { const buf = this[BUFFER]; const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); return Promise.resolve(ab); } stream() { const readable = new Readable(); readable._read = function () {}; readable.push(this[BUFFER]); readable.push(null); return readable; } toString() { return '[object Blob]'; } slice() { const size = this.size; const start = arguments[0]; const end = arguments[1]; let relativeStart, relativeEnd; if (start === undefined) { relativeStart = 0; } else if (start < 0) { relativeStart = Math.max(size + start, 0); } else { relativeStart = Math.min(start, size); } if (end === undefined) { relativeEnd = size; } else if (end < 0) { relativeEnd = Math.max(size + end, 0); } else { relativeEnd = Math.min(end, size); } const span = Math.max(relativeEnd - relativeStart, 0); const buffer = this[BUFFER]; const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); const blob = new Blob([], { type: arguments[2] }); blob[BUFFER] = slicedBuffer; return blob; } } Object.defineProperties(Blob.prototype, { size: { enumerable: true }, type: { enumerable: true }, slice: { enumerable: true } }); Object.defineProperty(Blob.prototype, Symbol.toStringTag, { value: 'Blob', writable: false, enumerable: false, configurable: true }); /** * fetch-error.js * * FetchError interface for operational errors */ /** * Create FetchError instance * * @param String message Error message for human * @param String type Error type for machine * @param String systemError For Node.js system error * @return FetchError */ function FetchError(message, type, systemError) { Error.call(this, message); this.message = message; this.type = type; // when err.type is `system`, err.code contains system error code if (systemError) { this.code = this.errno = systemError.code; } // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } FetchError.prototype = Object.create(Error.prototype); FetchError.prototype.constructor = FetchError; FetchError.prototype.name = 'FetchError'; let convert; try { convert = (__nccwpck_require__(42078).convert); } catch (e) {} const INTERNALS = Symbol('Body internals'); // fix an issue where "PassThrough" isn't a named export for node <10 const PassThrough = Stream.PassThrough; /** * Body mixin * * Ref: https://fetch.spec.whatwg.org/#body * * @param Stream body Readable stream * @param Object opts Response options * @return Void */ function Body(body) { var _this = this; var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, _ref$size = _ref.size; let size = _ref$size === undefined ? 0 : _ref$size; var _ref$timeout = _ref.timeout; let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; if (body == null) { // body is undefined or null body = null; } else if (isURLSearchParams(body)) { // body is a URLSearchParams body = Buffer.from(body.toString()); } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { // body is ArrayBuffer body = Buffer.from(body); } else if (ArrayBuffer.isView(body)) { // body is ArrayBufferView body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); } else if (body instanceof Stream) ; else { // none of the above // coerce to string then buffer body = Buffer.from(String(body)); } this[INTERNALS] = { body, disturbed: false, error: null }; this.size = size; this.timeout = timeout; if (body instanceof Stream) { body.on('error', function (err) { const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); _this[INTERNALS].error = error; }); } } Body.prototype = { get body() { return this[INTERNALS].body; }, get bodyUsed() { return this[INTERNALS].disturbed; }, /** * Decode response as ArrayBuffer * * @return Promise */ arrayBuffer() { return consumeBody.call(this).then(function (buf) { return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); }); }, /** * Return raw response as Blob * * @return Promise */ blob() { let ct = this.headers && this.headers.get('content-type') || ''; return consumeBody.call(this).then(function (buf) { return Object.assign( // Prevent copying new Blob([], { type: ct.toLowerCase() }), { [BUFFER]: buf }); }); }, /** * Decode response as json * * @return Promise */ json() { var _this2 = this; return consumeBody.call(this).then(function (buffer) { try { return JSON.parse(buffer.toString()); } catch (err) { return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); } }); }, /** * Decode response as text * * @return Promise */ text() { return consumeBody.call(this).then(function (buffer) { return buffer.toString(); }); }, /** * Decode response as buffer (non-spec api) * * @return Promise */ buffer() { return consumeBody.call(this); }, /** * Decode response as text, while automatically detecting the encoding and * trying to decode to UTF-8 (non-spec api) * * @return Promise */ textConverted() { var _this3 = this; return consumeBody.call(this).then(function (buffer) { return convertBody(buffer, _this3.headers); }); } }; // In browsers, all properties are enumerable. Object.defineProperties(Body.prototype, { body: { enumerable: true }, bodyUsed: { enumerable: true }, arrayBuffer: { enumerable: true }, blob: { enumerable: true }, json: { enumerable: true }, text: { enumerable: true } }); Body.mixIn = function (proto) { for (const name of Object.getOwnPropertyNames(Body.prototype)) { // istanbul ignore else: future proof if (!(name in proto)) { const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); Object.defineProperty(proto, name, desc); } } }; /** * Consume and convert an entire Body to a Buffer. * * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body * * @return Promise */ function consumeBody() { var _this4 = this; if (this[INTERNALS].disturbed) { return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); } this[INTERNALS].disturbed = true; if (this[INTERNALS].error) { return Body.Promise.reject(this[INTERNALS].error); } let body = this.body; // body is null if (body === null) { return Body.Promise.resolve(Buffer.alloc(0)); } // body is blob if (isBlob(body)) { body = body.stream(); } // body is buffer if (Buffer.isBuffer(body)) { return Body.Promise.resolve(body); } // istanbul ignore if: should never happen if (!(body instanceof Stream)) { return Body.Promise.resolve(Buffer.alloc(0)); } // body is stream // get ready to actually consume the body let accum = []; let accumBytes = 0; let abort = false; return new Body.Promise(function (resolve, reject) { let resTimeout; // allow timeout on slow response body if (_this4.timeout) { resTimeout = setTimeout(function () { abort = true; reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); }, _this4.timeout); } // handle stream errors body.on('error', function (err) { if (err.name === 'AbortError') { // if the request was aborted, reject with this Error abort = true; reject(err); } else { // other errors, such as incorrect content-encoding reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); } }); body.on('data', function (chunk) { if (abort || chunk === null) { return; } if (_this4.size && accumBytes + chunk.length > _this4.size) { abort = true; reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); return; } accumBytes += chunk.length; accum.push(chunk); }); body.on('end', function () { if (abort) { return; } clearTimeout(resTimeout); try { resolve(Buffer.concat(accum, accumBytes)); } catch (err) { // handle streams that have accumulated too much data (issue #414) reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); } }); }); } /** * Detect buffer encoding and convert to target encoding * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding * * @param Buffer buffer Incoming buffer * @param String encoding Target encoding * @return String */ function convertBody(buffer, headers) { if (typeof convert !== 'function') { throw new Error('The package `encoding` must be installed to use the textConverted() function'); } const ct = headers.get('content-type'); let charset = 'utf-8'; let res, str; // header if (ct) { res = /charset=([^;]*)/i.exec(ct); } // no charset in content type, peek at response body for at most 1024 bytes str = buffer.slice(0, 1024).toString(); // html5 if (!res && str) { res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; this[MAP] = Object.create(null); if (init instanceof Headers) { const rawHeaders = init.raw(); const headerNames = Object.keys(rawHeaders); for (const headerName of headerNames) { for (const value of rawHeaders[headerName]) { this.append(headerName, value); } } return; } // We don't worry about converting prop to ByteString here as append() // will handle it. if (init == null) ; else if (typeof init === 'object') { const method = init[Symbol.iterator]; if (method != null) { if (typeof method !== 'function') { throw new TypeError('Header pairs must be iterable'); } // sequence> // Note: per spec we have to first exhaust the lists then process them const pairs = []; for (const pair of init) { if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { throw new TypeError('Each header pair must be iterable'); } pairs.push(Array.from(pair)); } for (const pair of pairs) { if (pair.length !== 2) { throw new TypeError('Each header pair must be a name/value tuple'); } this.append(pair[0], pair[1]); } } else { // record for (const key of Object.keys(init)) { const value = init[key]; this.append(key, value); } } } else { throw new TypeError('Provided initializer must be an object'); } } /** * Return combined header value given name * * @param String name Header name * @return Mixed */ get(name) { name = `${name}`; validateName(name); const key = find(this[MAP], name); if (key === undefined) { return null; } return this[MAP][key].join(', '); } /** * Iterate over all headers * * @param Function callback Executed for each item with parameters (value, name, thisArg) * @param Boolean thisArg `this` context for callback function * @return Void */ forEach(callback) { let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; let pairs = getHeaders(this); let i = 0; while (i < pairs.length) { var _pairs$i = pairs[i]; const name = _pairs$i[0], value = _pairs$i[1]; callback.call(thisArg, value, name, this); pairs = getHeaders(this); i++; } } /** * Overwrite header values given name * * @param String name Header name * @param String value Header value * @return Void */ set(name, value) { name = `${name}`; value = `${value}`; validateName(name); validateValue(value); const key = find(this[MAP], name); this[MAP][key !== undefined ? key : name] = [value]; } /** * Append a value onto existing header * * @param String name Header name * @param String value Header value * @return Void */ append(name, value) { name = `${name}`; value = `${value}`; validateName(name); validateValue(value); const key = find(this[MAP], name); if (key !== undefined) { this[MAP][key].push(value); } else { this[MAP][name] = [value]; } } /** * Check for header name existence * * @param String name Header name * @return Boolean */ has(name) { name = `${name}`; validateName(name); return find(this[MAP], name) !== undefined; } /** * Delete all header values given name * * @param String name Header name * @return Void */ delete(name) { name = `${name}`; validateName(name); const key = find(this[MAP], name); if (key !== undefined) { delete this[MAP][key]; } } /** * Return raw headers (non-spec api) * * @return Object */ raw() { return this[MAP]; } /** * Get an iterator on keys. * * @return Iterator */ keys() { return createHeadersIterator(this, 'key'); } /** * Get an iterator on values. * * @return Iterator */ values() { return createHeadersIterator(this, 'value'); } /** * Get an iterator on entries. * * This is the default iterator of the Headers object. * * @return Iterator */ [Symbol.iterator]() { return createHeadersIterator(this, 'key+value'); } } Headers.prototype.entries = Headers.prototype[Symbol.iterator]; Object.defineProperty(Headers.prototype, Symbol.toStringTag, { value: 'Headers', writable: false, enumerable: false, configurable: true }); Object.defineProperties(Headers.prototype, { get: { enumerable: true }, forEach: { enumerable: true }, set: { enumerable: true }, append: { enumerable: true }, has: { enumerable: true }, delete: { enumerable: true }, keys: { enumerable: true }, values: { enumerable: true }, entries: { enumerable: true } }); function getHeaders(headers) { let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; const keys = Object.keys(headers[MAP]).sort(); return keys.map(kind === 'key' ? function (k) { return k.toLowerCase(); } : kind === 'value' ? function (k) { return headers[MAP][k].join(', '); } : function (k) { return [k.toLowerCase(), headers[MAP][k].join(', ')]; }); } const INTERNAL = Symbol('internal'); function createHeadersIterator(target, kind) { const iterator = Object.create(HeadersIteratorPrototype); iterator[INTERNAL] = { target, kind, index: 0 }; return iterator; } const HeadersIteratorPrototype = Object.setPrototypeOf({ next() { // istanbul ignore if if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { throw new TypeError('Value of `this` is not a HeadersIterator'); } var _INTERNAL = this[INTERNAL]; const target = _INTERNAL.target, kind = _INTERNAL.kind, index = _INTERNAL.index; const values = getHeaders(target, kind); const len = values.length; if (index >= len) { return { value: undefined, done: true }; } this[INTERNAL].index = index + 1; return { value: values[index], done: false }; } }, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { value: 'HeadersIterator', writable: false, enumerable: false, configurable: true }); /** * Export the Headers object in a form that Node.js can consume. * * @param Headers headers * @return Object */ function exportNodeCompatibleHeaders(headers) { const obj = Object.assign({ __proto__: null }, headers[MAP]); // http.request() only supports string as Host header. This hack makes // specifying custom Host header possible. const hostHeaderKey = find(headers[MAP], 'Host'); if (hostHeaderKey !== undefined) { obj[hostHeaderKey] = obj[hostHeaderKey][0]; } return obj; } /** * Create a Headers object from an object of headers, ignoring those that do * not conform to HTTP grammar productions. * * @param Object obj Object of headers * @return Headers */ function createHeadersLenient(obj) { const headers = new Headers(); for (const name of Object.keys(obj)) { if (invalidTokenRegex.test(name)) { continue; } if (Array.isArray(obj[name])) { for (const val of obj[name]) { if (invalidHeaderCharRegex.test(val)) { continue; } if (headers[MAP][name] === undefined) { headers[MAP][name] = [val]; } else { headers[MAP][name].push(val); } } } else if (!invalidHeaderCharRegex.test(obj[name])) { headers[MAP][name] = [obj[name]]; } } return headers; } const INTERNALS$1 = Symbol('Response internals'); // fix an issue where "STATUS_CODES" aren't a named export for node <10 const STATUS_CODES = http.STATUS_CODES; /** * Response class * * @param Stream body Readable stream * @param Object opts Response options * @return Void */ class Response { constructor() { let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; Body.call(this, body, opts); const status = opts.status || 200; const headers = new Headers(opts.headers); if (body != null && !headers.has('Content-Type')) { const contentType = extractContentType(body); if (contentType) { headers.append('Content-Type', contentType); } } this[INTERNALS$1] = { url: opts.url, status, statusText: opts.statusText || STATUS_CODES[status], headers, counter: opts.counter }; } get url() { return this[INTERNALS$1].url || ''; } get status() { return this[INTERNALS$1].status; } /** * Convenience property representing if the request ended normally */ get ok() { return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; } get redirected() { return this[INTERNALS$1].counter > 0; } get statusText() { return this[INTERNALS$1].statusText; } get headers() { return this[INTERNALS$1].headers; } /** * Clone this response * * @return Response */ clone() { return new Response(clone(this), { url: this.url, status: this.status, statusText: this.statusText, headers: this.headers, ok: this.ok, redirected: this.redirected }); } } Body.mixIn(Response.prototype); Object.defineProperties(Response.prototype, { url: { enumerable: true }, status: { enumerable: true }, ok: { enumerable: true }, redirected: { enumerable: true }, statusText: { enumerable: true }, headers: { enumerable: true }, clone: { enumerable: true } }); Object.defineProperty(Response.prototype, Symbol.toStringTag, { value: 'Response', writable: false, enumerable: false, configurable: true }); const INTERNALS$2 = Symbol('Request internals'); const URL = Url.URL || whatwgUrl.URL; // fix an issue where "format", "parse" aren't a named export for node <10 const parse_url = Url.parse; const format_url = Url.format; /** * Wrapper around `new URL` to handle arbitrary URLs * * @param {string} urlStr * @return {void} */ function parseURL(urlStr) { /* Check whether the URL is absolute or not Scheme: https://tools.ietf.org/html/rfc3986#section-3.1 Absolute URL: https://tools.ietf.org/html/rfc3986#section-4.3 */ if (/^[a-zA-Z][a-zA-Z\d+\-.]*:/.exec(urlStr)) { urlStr = new URL(urlStr).toString(); } // Fallback to old implementation for arbitrary URLs return parse_url(urlStr); } const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; /** * Check if a value is an instance of Request. * * @param Mixed input * @return Boolean */ function isRequest(input) { return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; } function isAbortSignal(signal) { const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); return !!(proto && proto.constructor.name === 'AbortSignal'); } /** * Request class * * @param Mixed input Url or Request instance * @param Object init Custom options * @return Void */ class Request { constructor(input) { let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; let parsedURL; // normalize input if (!isRequest(input)) { if (input && input.href) { // in order to support Node.js' Url objects; though WHATWG's URL objects // will fall into this branch also (since their `toString()` will return // `href` property anyway) parsedURL = parseURL(input.href); } else { // coerce input to a string before attempting to parse parsedURL = parseURL(`${input}`); } input = {}; } else { parsedURL = parseURL(input.url); } let method = init.method || input.method || 'GET'; method = method.toUpperCase(); if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { throw new TypeError('Request with GET/HEAD method cannot have body'); } let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; Body.call(this, inputBody, { timeout: init.timeout || input.timeout || 0, size: init.size || input.size || 0 }); const headers = new Headers(init.headers || input.headers || {}); if (inputBody != null && !headers.has('Content-Type')) { const contentType = extractContentType(inputBody); if (contentType) { headers.append('Content-Type', contentType); } } let signal = isRequest(input) ? input.signal : null; if ('signal' in init) signal = init.signal; if (signal != null && !isAbortSignal(signal)) { throw new TypeError('Expected signal to be an instanceof AbortSignal'); } this[INTERNALS$2] = { method, redirect: init.redirect || input.redirect || 'follow', headers, parsedURL, signal }; // node-fetch-only options this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; this.counter = init.counter || input.counter || 0; this.agent = init.agent || input.agent; } get method() { return this[INTERNALS$2].method; } get url() { return format_url(this[INTERNALS$2].parsedURL); } get headers() { return this[INTERNALS$2].headers; } get redirect() { return this[INTERNALS$2].redirect; } get signal() { return this[INTERNALS$2].signal; } /** * Clone this request * * @return Request */ clone() { return new Request(this); } } Body.mixIn(Request.prototype); Object.defineProperty(Request.prototype, Symbol.toStringTag, { value: 'Request', writable: false, enumerable: false, configurable: true }); Object.defineProperties(Request.prototype, { method: { enumerable: true }, url: { enumerable: true }, headers: { enumerable: true }, redirect: { enumerable: true }, clone: { enumerable: true }, signal: { enumerable: true } }); /** * Convert a Request to Node.js http request options. * * @param Request A Request instance * @return Object The options object to be passed to http.request */ function getNodeRequestOptions(request) { const parsedURL = request[INTERNALS$2].parsedURL; const headers = new Headers(request[INTERNALS$2].headers); // fetch step 1.3 if (!headers.has('Accept')) { headers.set('Accept', '*/*'); } // Basic fetch if (!parsedURL.protocol || !parsedURL.hostname) { throw new TypeError('Only absolute URLs are supported'); } if (!/^https?:$/.test(parsedURL.protocol)) { throw new TypeError('Only HTTP(S) protocols are supported'); } if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); } // HTTP-network-or-cache fetch steps 2.4-2.7 let contentLengthValue = null; if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { contentLengthValue = '0'; } if (request.body != null) { const totalBytes = getTotalBytes(request); if (typeof totalBytes === 'number') { contentLengthValue = String(totalBytes); } } if (contentLengthValue) { headers.set('Content-Length', contentLengthValue); } // HTTP-network-or-cache fetch step 2.11 if (!headers.has('User-Agent')) { headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); } // HTTP-network-or-cache fetch step 2.15 if (request.compress && !headers.has('Accept-Encoding')) { headers.set('Accept-Encoding', 'gzip,deflate'); } let agent = request.agent; if (typeof agent === 'function') { agent = agent(parsedURL); } // HTTP-network fetch step 4.2 // chunked encoding is handled by Node.js return Object.assign({}, parsedURL, { method: request.method, headers: exportNodeCompatibleHeaders(headers), agent }); } /** * abort-error.js * * AbortError interface for cancelled requests */ /** * Create AbortError instance * * @param String message Error message for human * @return AbortError */ function AbortError(message) { Error.call(this, message); this.type = 'aborted'; this.message = message; // hide custom error implementation details from end-users Error.captureStackTrace(this, this.constructor); } AbortError.prototype = Object.create(Error.prototype); AbortError.prototype.constructor = AbortError; AbortError.prototype.name = 'AbortError'; const URL$1 = Url.URL || whatwgUrl.URL; // fix an issue where "PassThrough", "resolve" aren't a named export for node <10 const PassThrough$1 = Stream.PassThrough; const isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) { const orig = new URL$1(original).hostname; const dest = new URL$1(destination).hostname; return orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest); }; /** * isSameProtocol reports whether the two provided URLs use the same protocol. * * Both domains must already be in canonical form. * @param {string|URL} original * @param {string|URL} destination */ const isSameProtocol = function isSameProtocol(destination, original) { const orig = new URL$1(original).protocol; const dest = new URL$1(destination).protocol; return orig === dest; }; /** * Fetch function * * @param Mixed url Absolute url or Request instance * @param Object opts Fetch options * @return Promise */ function fetch(url, opts) { // allow custom promise if (!fetch.Promise) { throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); } Body.Promise = fetch.Promise; // wrap http.request into fetch return new fetch.Promise(function (resolve, reject) { // build request object const request = new Request(url, opts); const options = getNodeRequestOptions(request); const send = (options.protocol === 'https:' ? https : http).request; const signal = request.signal; let response = null; const abort = function abort() { let error = new AbortError('The user aborted a request.'); reject(error); if (request.body && request.body instanceof Stream.Readable) { destroyStream(request.body, error); } if (!response || !response.body) return; response.body.emit('error', error); }; if (signal && signal.aborted) { abort(); return; } const abortAndFinalize = function abortAndFinalize() { abort(); finalize(); }; // send request const req = send(options); let reqTimeout; if (signal) { signal.addEventListener('abort', abortAndFinalize); } function finalize() { req.abort(); if (signal) signal.removeEventListener('abort', abortAndFinalize); clearTimeout(reqTimeout); } if (request.timeout) { req.once('socket', function (socket) { reqTimeout = setTimeout(function () { reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); finalize(); }, request.timeout); }); } req.on('error', function (err) { reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); if (response && response.body) { destroyStream(response.body, err); } finalize(); }); fixResponseChunkedTransferBadEnding(req, function (err) { if (signal && signal.aborted) { return; } if (response && response.body) { destroyStream(response.body, err); } }); /* c8 ignore next 18 */ if (parseInt(process.version.substring(1)) < 14) { // Before Node.js 14, pipeline() does not fully support async iterators and does not always // properly handle when the socket close/end events are out of order. req.on('socket', function (s) { s.addListener('close', function (hadError) { // if a data listener is still present we didn't end cleanly const hasDataListener = s.listenerCount('data') > 0; // if end happened before close but the socket didn't emit an error, do it now if (response && hasDataListener && !hadError && !(signal && signal.aborted)) { const err = new Error('Premature close'); err.code = 'ERR_STREAM_PREMATURE_CLOSE'; response.body.emit('error', err); } }); }); } req.on('response', function (res) { clearTimeout(reqTimeout); const headers = createHeadersLenient(res.headers); // HTTP fetch step 5 if (fetch.isRedirect(res.statusCode)) { // HTTP fetch step 5.2 const location = headers.get('Location'); // HTTP fetch step 5.3 let locationURL = null; try { locationURL = location === null ? null : new URL$1(location, request.url).toString(); } catch (err) { // error here can only be invalid URL in Location: header // do not throw when options.redirect == manual // let the user extract the errorneous redirect URL if (request.redirect !== 'manual') { reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')); finalize(); return; } } // HTTP fetch step 5.5 switch (request.redirect) { case 'error': reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); finalize(); return; case 'manual': // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. if (locationURL !== null) { // handle corrupted header try { headers.set('Location', locationURL); } catch (err) { // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request reject(err); } } break; case 'follow': // HTTP-redirect fetch step 2 if (locationURL === null) { break; } // HTTP-redirect fetch step 5 if (request.counter >= request.follow) { reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); finalize(); return; } // HTTP-redirect fetch step 6 (counter increment) // Create a new Request object. const requestOpts = { headers: new Headers(request.headers), follow: request.follow, counter: request.counter + 1, agent: request.agent, compress: request.compress, method: request.method, body: request.body, signal: request.signal, timeout: request.timeout, size: request.size }; if (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) { for (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) { requestOpts.headers.delete(name); } } // HTTP-redirect fetch step 9 if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); finalize(); return; } // HTTP-redirect fetch step 11 if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { requestOpts.method = 'GET'; requestOpts.body = undefined; requestOpts.headers.delete('content-length'); } // HTTP-redirect fetch step 15 resolve(fetch(new Request(locationURL, requestOpts))); finalize(); return; } } // prepare response res.once('end', function () { if (signal) signal.removeEventListener('abort', abortAndFinalize); }); let body = res.pipe(new PassThrough$1()); const response_options = { url: request.url, status: res.statusCode, statusText: res.statusMessage, headers: headers, size: request.size, timeout: request.timeout, counter: request.counter }; // HTTP-network fetch step 12.1.1.3 const codings = headers.get('Content-Encoding'); // HTTP-network fetch step 12.1.1.4: handle content codings // in following scenarios we ignore compression support // 1. compression support is disabled // 2. HEAD request // 3. no Content-Encoding header // 4. no content response (204) // 5. content not modified response (304) if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { response = new Response(body, response_options); resolve(response); return; } // For Node v6+ // Be less strict when decoding compressed responses, since sometimes // servers send slightly invalid responses that are still accepted // by common browsers. // Always using Z_SYNC_FLUSH is what cURL does. const zlibOptions = { flush: zlib.Z_SYNC_FLUSH, finishFlush: zlib.Z_SYNC_FLUSH }; // for gzip if (codings == 'gzip' || codings == 'x-gzip') { body = body.pipe(zlib.createGunzip(zlibOptions)); response = new Response(body, response_options); resolve(response); return; } // for deflate if (codings == 'deflate' || codings == 'x-deflate') { // handle the infamous raw deflate response from old servers // a hack for old IIS and Apache servers const raw = res.pipe(new PassThrough$1()); raw.once('data', function (chunk) { // see http://stackoverflow.com/questions/37519828 if ((chunk[0] & 0x0F) === 0x08) { body = body.pipe(zlib.createInflate()); } else { body = body.pipe(zlib.createInflateRaw()); } response = new Response(body, response_options); resolve(response); }); raw.on('end', function () { // some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted. if (!response) { response = new Response(body, response_options); resolve(response); } }); return; } // for br if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { body = body.pipe(zlib.createBrotliDecompress()); response = new Response(body, response_options); resolve(response); return; } // otherwise, use response as-is response = new Response(body, response_options); resolve(response); }); writeToStream(req, request); }); } function fixResponseChunkedTransferBadEnding(request, errorCallback) { let socket; request.on('socket', function (s) { socket = s; }); request.on('response', function (response) { const headers = response.headers; if (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) { response.once('close', function (hadError) { // tests for socket presence, as in some situations the // the 'socket' event is not triggered for the request // (happens in deno), avoids `TypeError` // if a data listener is still present we didn't end cleanly const hasDataListener = socket && socket.listenerCount('data') > 0; if (hasDataListener && !hadError) { const err = new Error('Premature close'); err.code = 'ERR_STREAM_PREMATURE_CLOSE'; errorCallback(err); } }); } }); } function destroyStream(stream, err) { if (stream.destroy) { stream.destroy(err); } else { // node < 8 stream.emit('error', err); stream.end(); } } /** * Redirect code matching * * @param Number code Status code * @return Boolean */ fetch.isRedirect = function (code) { return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; }; // expose Promise fetch.Promise = global.Promise; module.exports = exports = fetch; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = exports; exports.Headers = Headers; exports.Request = Request; exports.Response = Response; exports.FetchError = FetchError; exports.AbortError = AbortError; /***/ }), /***/ 55560: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var wrappy = __nccwpck_require__(58264) module.exports = wrappy(once) module.exports.strict = wrappy(onceStrict) once.proto = once(function () { Object.defineProperty(Function.prototype, 'once', { value: function () { return once(this) }, configurable: true }) Object.defineProperty(Function.prototype, 'onceStrict', { value: function () { return onceStrict(this) }, configurable: true }) }) function once (fn) { var f = function () { if (f.called) return f.value f.called = true return f.value = fn.apply(this, arguments) } f.called = false return f } function onceStrict (fn) { var f = function () { if (f.called) throw new Error(f.onceError) f.called = true return f.value = fn.apply(this, arguments) } var name = fn.name || 'Function wrapped with `once`' f.onceError = name + " shouldn't be called more than once" f.called = false return f } /***/ }), /***/ 58890: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Queue = __nccwpck_require__(30538); const pLimit = concurrency => { if (!((Number.isInteger(concurrency) || concurrency === Infinity) && concurrency > 0)) { throw new TypeError('Expected `concurrency` to be a number from 1 and up'); } const queue = new Queue(); let activeCount = 0; const next = () => { activeCount--; if (queue.size > 0) { queue.dequeue()(); } }; const run = async (fn, resolve, ...args) => { activeCount++; const result = (async () => fn(...args))(); resolve(result); try { await result; } catch {} next(); }; const enqueue = (fn, resolve, ...args) => { queue.enqueue(run.bind(null, fn, resolve, ...args)); (async () => { // This function needs to wait until the next microtask before comparing // `activeCount` to `concurrency`, because `activeCount` is updated asynchronously // when the run function is dequeued and called. The comparison in the if-statement // needs to happen asynchronously as well to get an up-to-date value for `activeCount`. await Promise.resolve(); if (activeCount < concurrency && queue.size > 0) { queue.dequeue()(); } })(); }; const generator = (fn, ...args) => new Promise(resolve => { enqueue(fn, resolve, ...args); }); Object.defineProperties(generator, { activeCount: { get: () => activeCount }, pendingCount: { get: () => queue.size }, clearQueue: { value: () => { queue.clear(); } } }); return generator; }; module.exports = pLimit; /***/ }), /***/ 77777: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; var parseUrl = (__nccwpck_require__(87016).parse); var DEFAULT_PORTS = { ftp: 21, gopher: 70, http: 80, https: 443, ws: 80, wss: 443, }; var stringEndsWith = String.prototype.endsWith || function(s) { return s.length <= this.length && this.indexOf(s, this.length - s.length) !== -1; }; /** * @param {string|object} url - The URL, or the result from url.parse. * @return {string} The URL of the proxy that should handle the request to the * given URL. If no proxy is set, this will be an empty string. */ function getProxyForUrl(url) { var parsedUrl = typeof url === 'string' ? parseUrl(url) : url || {}; var proto = parsedUrl.protocol; var hostname = parsedUrl.host; var port = parsedUrl.port; if (typeof hostname !== 'string' || !hostname || typeof proto !== 'string') { return ''; // Don't proxy URLs without a valid scheme or host. } proto = proto.split(':', 1)[0]; // Stripping ports in this way instead of using parsedUrl.hostname to make // sure that the brackets around IPv6 addresses are kept. hostname = hostname.replace(/:\d*$/, ''); port = parseInt(port) || DEFAULT_PORTS[proto] || 0; if (!shouldProxy(hostname, port)) { return ''; // Don't proxy URLs that match NO_PROXY. } var proxy = getEnv('npm_config_' + proto + '_proxy') || getEnv(proto + '_proxy') || getEnv('npm_config_proxy') || getEnv('all_proxy'); if (proxy && proxy.indexOf('://') === -1) { // Missing scheme in proxy, default to the requested URL's scheme. proxy = proto + '://' + proxy; } return proxy; } /** * Determines whether a given URL should be proxied. * * @param {string} hostname - The host name of the URL. * @param {number} port - The effective port of the URL. * @returns {boolean} Whether the given URL should be proxied. * @private */ function shouldProxy(hostname, port) { var NO_PROXY = (getEnv('npm_config_no_proxy') || getEnv('no_proxy')).toLowerCase(); if (!NO_PROXY) { return true; // Always proxy if NO_PROXY is not set. } if (NO_PROXY === '*') { return false; // Never proxy if wildcard is set. } return NO_PROXY.split(/[,\s]/).every(function(proxy) { if (!proxy) { return true; // Skip zero-length hosts. } var parsedProxy = proxy.match(/^(.+):(\d+)$/); var parsedProxyHostname = parsedProxy ? parsedProxy[1] : proxy; var parsedProxyPort = parsedProxy ? parseInt(parsedProxy[2]) : 0; if (parsedProxyPort && parsedProxyPort !== port) { return true; // Skip if ports don't match. } if (!/^[.*]/.test(parsedProxyHostname)) { // No wildcards, so stop proxying if there is an exact match. return hostname !== parsedProxyHostname; } if (parsedProxyHostname.charAt(0) === '*') { // Remove leading wildcard. parsedProxyHostname = parsedProxyHostname.slice(1); } // Stop proxying if the hostname ends with the no_proxy host. return !stringEndsWith.call(hostname, parsedProxyHostname); }); } /** * Get the value for an environment variable. * * @param {string} key - The name of the environment variable. * @return {string} The value of the environment variable. * @private */ function getEnv(key) { return process.env[key.toLowerCase()] || process.env[key.toUpperCase()] || ''; } exports.getProxyForUrl = getProxyForUrl; /***/ }), /***/ 25500: /***/ ((module) => { "use strict"; const codes = {}; function createErrorType(code, message, Base) { if (!Base) { Base = Error } function getMessage (arg1, arg2, arg3) { if (typeof message === 'string') { return message } else { return message(arg1, arg2, arg3) } } class NodeError extends Base { constructor (arg1, arg2, arg3) { super(getMessage(arg1, arg2, arg3)); } } NodeError.prototype.name = Base.name; NodeError.prototype.code = code; codes[code] = NodeError; } // https://github.com/nodejs/node/blob/v10.8.0/lib/internal/errors.js function oneOf(expected, thing) { if (Array.isArray(expected)) { const len = expected.length; expected = expected.map((i) => String(i)); if (len > 2) { return `one of ${thing} ${expected.slice(0, len - 1).join(', ')}, or ` + expected[len - 1]; } else if (len === 2) { return `one of ${thing} ${expected[0]} or ${expected[1]}`; } else { return `of ${thing} ${expected[0]}`; } } else { return `of ${thing} ${String(expected)}`; } } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/startsWith function startsWith(str, search, pos) { return str.substr(!pos || pos < 0 ? 0 : +pos, search.length) === search; } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/endsWith function endsWith(str, search, this_len) { if (this_len === undefined || this_len > str.length) { this_len = str.length; } return str.substring(this_len - search.length, this_len) === search; } // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/String/includes function includes(str, search, start) { if (typeof start !== 'number') { start = 0; } if (start + search.length > str.length) { return false; } else { return str.indexOf(search, start) !== -1; } } createErrorType('ERR_INVALID_OPT_VALUE', function (name, value) { return 'The value "' + value + '" is invalid for option "' + name + '"' }, TypeError); createErrorType('ERR_INVALID_ARG_TYPE', function (name, expected, actual) { // determiner: 'must be' or 'must not be' let determiner; if (typeof expected === 'string' && startsWith(expected, 'not ')) { determiner = 'must not be'; expected = expected.replace(/^not /, ''); } else { determiner = 'must be'; } let msg; if (endsWith(name, ' argument')) { // For cases like 'first argument' msg = `The ${name} ${determiner} ${oneOf(expected, 'type')}`; } else { const type = includes(name, '.') ? 'property' : 'argument'; msg = `The "${name}" ${type} ${determiner} ${oneOf(expected, 'type')}`; } msg += `. Received type ${typeof actual}`; return msg; }, TypeError); createErrorType('ERR_STREAM_PUSH_AFTER_EOF', 'stream.push() after EOF'); createErrorType('ERR_METHOD_NOT_IMPLEMENTED', function (name) { return 'The ' + name + ' method is not implemented' }); createErrorType('ERR_STREAM_PREMATURE_CLOSE', 'Premature close'); createErrorType('ERR_STREAM_DESTROYED', function (name) { return 'Cannot call ' + name + ' after a stream was destroyed'; }); createErrorType('ERR_MULTIPLE_CALLBACK', 'Callback called multiple times'); createErrorType('ERR_STREAM_CANNOT_PIPE', 'Cannot pipe, not readable'); createErrorType('ERR_STREAM_WRITE_AFTER_END', 'write after end'); createErrorType('ERR_STREAM_NULL_VALUES', 'May not write null values to stream', TypeError); createErrorType('ERR_UNKNOWN_ENCODING', function (arg) { return 'Unknown encoding: ' + arg }, TypeError); createErrorType('ERR_STREAM_UNSHIFT_AFTER_END_EVENT', 'stream.unshift() after end event'); module.exports.F = codes; /***/ }), /***/ 82063: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a duplex stream is just a stream that is both readable and writable. // Since JS doesn't have multiple prototypal inheritance, this class // prototypally inherits from Readable, and then parasitically from // Writable. /**/ var objectKeys = Object.keys || function (obj) { var keys = []; for (var key in obj) keys.push(key); return keys; }; /**/ module.exports = Duplex; var Readable = __nccwpck_require__(86893); var Writable = __nccwpck_require__(38797); __nccwpck_require__(39598)(Duplex, Readable); { // Allow the keys array to be GC'ed. var keys = objectKeys(Writable.prototype); for (var v = 0; v < keys.length; v++) { var method = keys[v]; if (!Duplex.prototype[method]) Duplex.prototype[method] = Writable.prototype[method]; } } function Duplex(options) { if (!(this instanceof Duplex)) return new Duplex(options); Readable.call(this, options); Writable.call(this, options); this.allowHalfOpen = true; if (options) { if (options.readable === false) this.readable = false; if (options.writable === false) this.writable = false; if (options.allowHalfOpen === false) { this.allowHalfOpen = false; this.once('end', onend); } } } Object.defineProperty(Duplex.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState.highWaterMark; } }); Object.defineProperty(Duplex.prototype, 'writableBuffer', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState && this._writableState.getBuffer(); } }); Object.defineProperty(Duplex.prototype, 'writableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState.length; } }); // the no-half-open enforcer function onend() { // If the writable side ended, then we're ok. if (this._writableState.ended) return; // no more data can be written. // But allow more writes to happen in this tick. process.nextTick(onEndNT, this); } function onEndNT(self) { self.end(); } Object.defineProperty(Duplex.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { if (this._readableState === undefined || this._writableState === undefined) { return false; } return this._readableState.destroyed && this._writableState.destroyed; }, set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (this._readableState === undefined || this._writableState === undefined) { return; } // backward compatibility, the user is explicitly // managing destroyed this._readableState.destroyed = value; this._writableState.destroyed = value; } }); /***/ }), /***/ 35283: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a passthrough stream. // basically just the most minimal sort of Transform stream. // Every written chunk gets output as-is. module.exports = PassThrough; var Transform = __nccwpck_require__(12337); __nccwpck_require__(39598)(PassThrough, Transform); function PassThrough(options) { if (!(this instanceof PassThrough)) return new PassThrough(options); Transform.call(this, options); } PassThrough.prototype._transform = function (chunk, encoding, cb) { cb(null, chunk); }; /***/ }), /***/ 86893: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. module.exports = Readable; /**/ var Duplex; /**/ Readable.ReadableState = ReadableState; /**/ var EE = (__nccwpck_require__(24434).EventEmitter); var EElistenerCount = function EElistenerCount(emitter, type) { return emitter.listeners(type).length; }; /**/ /**/ var Stream = __nccwpck_require__(63283); /**/ var Buffer = (__nccwpck_require__(20181).Buffer); var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } /**/ var debugUtil = __nccwpck_require__(39023); var debug; if (debugUtil && debugUtil.debuglog) { debug = debugUtil.debuglog('stream'); } else { debug = function debug() {}; } /**/ var BufferList = __nccwpck_require__(77336); var destroyImpl = __nccwpck_require__(65089); var _require = __nccwpck_require__(54874), getHighWaterMark = _require.getHighWaterMark; var _require$codes = (__nccwpck_require__(25500)/* .codes */ .F), ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, ERR_STREAM_PUSH_AFTER_EOF = _require$codes.ERR_STREAM_PUSH_AFTER_EOF, ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, ERR_STREAM_UNSHIFT_AFTER_END_EVENT = _require$codes.ERR_STREAM_UNSHIFT_AFTER_END_EVENT; // Lazy loaded to improve the startup performance. var StringDecoder; var createReadableStreamAsyncIterator; var from; __nccwpck_require__(39598)(Readable, Stream); var errorOrDestroy = destroyImpl.errorOrDestroy; var kProxyEvents = ['error', 'close', 'destroy', 'pause', 'resume']; function prependListener(emitter, event, fn) { // Sadly this is not cacheable as some libraries bundle their own // event emitter implementation with them. if (typeof emitter.prependListener === 'function') return emitter.prependListener(event, fn); // This is a hack to make sure that our error handler is attached before any // userland ones. NEVER DO THIS. This is here only because this code needs // to continue to work with older versions of Node.js that do not include // the prependListener() method. The goal is to eventually remove this hack. if (!emitter._events || !emitter._events[event]) emitter.on(event, fn);else if (Array.isArray(emitter._events[event])) emitter._events[event].unshift(fn);else emitter._events[event] = [fn, emitter._events[event]]; } function ReadableState(options, stream, isDuplex) { Duplex = Duplex || __nccwpck_require__(82063); options = options || {}; // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream. // These options can be provided separately as readableXXX and writableXXX. if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away this.objectMode = !!options.objectMode; if (isDuplex) this.objectMode = this.objectMode || !!options.readableObjectMode; // the point at which it stops calling _read() to fill the buffer // Note: 0 is a valid value, means "don't call _read preemptively ever" this.highWaterMark = getHighWaterMark(this, options, 'readableHighWaterMark', isDuplex); // A linked list is used to store data chunks instead of an array because the // linked list can remove elements from the beginning faster than // array.shift() this.buffer = new BufferList(); this.length = 0; this.pipes = null; this.pipesCount = 0; this.flowing = null; this.ended = false; this.endEmitted = false; this.reading = false; // a flag to be able to tell if the event 'readable'/'data' is emitted // immediately, or on a later tick. We set this to true at first, because // any actions that shouldn't happen until "later" should generally also // not happen before the first read call. this.sync = true; // whenever we return null, then we set a flag to say // that we're awaiting a 'readable' event emission. this.needReadable = false; this.emittedReadable = false; this.readableListening = false; this.resumeScheduled = false; this.paused = true; // Should close be emitted on destroy. Defaults to true. this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'end' (and potentially 'finish') this.autoDestroy = !!options.autoDestroy; // has it been destroyed this.destroyed = false; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; // the number of writers that are awaiting a drain event in .pipe()s this.awaitDrain = 0; // if true, a maybeReadMore has been scheduled this.readingMore = false; this.decoder = null; this.encoding = null; if (options.encoding) { if (!StringDecoder) StringDecoder = (__nccwpck_require__(80634)/* .StringDecoder */ .I); this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } } function Readable(options) { Duplex = Duplex || __nccwpck_require__(82063); if (!(this instanceof Readable)) return new Readable(options); // Checking for a Stream.Duplex instance is faster here instead of inside // the ReadableState constructor, at least with V8 6.5 var isDuplex = this instanceof Duplex; this._readableState = new ReadableState(options, this, isDuplex); // legacy this.readable = true; if (options) { if (typeof options.read === 'function') this._read = options.read; if (typeof options.destroy === 'function') this._destroy = options.destroy; } Stream.call(this); } Object.defineProperty(Readable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { if (this._readableState === undefined) { return false; } return this._readableState.destroyed; }, set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._readableState) { return; } // backward compatibility, the user is explicitly // managing destroyed this._readableState.destroyed = value; } }); Readable.prototype.destroy = destroyImpl.destroy; Readable.prototype._undestroy = destroyImpl.undestroy; Readable.prototype._destroy = function (err, cb) { cb(err); }; // Manually shove something into the read() buffer. // This returns true if the highWaterMark has not been hit yet, // similar to how Writable.write() returns true if you should // write() some more. Readable.prototype.push = function (chunk, encoding) { var state = this._readableState; var skipChunkCheck; if (!state.objectMode) { if (typeof chunk === 'string') { encoding = encoding || state.defaultEncoding; if (encoding !== state.encoding) { chunk = Buffer.from(chunk, encoding); encoding = ''; } skipChunkCheck = true; } } else { skipChunkCheck = true; } return readableAddChunk(this, chunk, encoding, false, skipChunkCheck); }; // Unshift should *always* be something directly out of read() Readable.prototype.unshift = function (chunk) { return readableAddChunk(this, chunk, null, true, false); }; function readableAddChunk(stream, chunk, encoding, addToFront, skipChunkCheck) { debug('readableAddChunk', chunk); var state = stream._readableState; if (chunk === null) { state.reading = false; onEofChunk(stream, state); } else { var er; if (!skipChunkCheck) er = chunkInvalid(state, chunk); if (er) { errorOrDestroy(stream, er); } else if (state.objectMode || chunk && chunk.length > 0) { if (typeof chunk !== 'string' && !state.objectMode && Object.getPrototypeOf(chunk) !== Buffer.prototype) { chunk = _uint8ArrayToBuffer(chunk); } if (addToFront) { if (state.endEmitted) errorOrDestroy(stream, new ERR_STREAM_UNSHIFT_AFTER_END_EVENT());else addChunk(stream, state, chunk, true); } else if (state.ended) { errorOrDestroy(stream, new ERR_STREAM_PUSH_AFTER_EOF()); } else if (state.destroyed) { return false; } else { state.reading = false; if (state.decoder && !encoding) { chunk = state.decoder.write(chunk); if (state.objectMode || chunk.length !== 0) addChunk(stream, state, chunk, false);else maybeReadMore(stream, state); } else { addChunk(stream, state, chunk, false); } } } else if (!addToFront) { state.reading = false; maybeReadMore(stream, state); } } // We can push more data if we are below the highWaterMark. // Also, if we have no data yet, we can stand some more bytes. // This is to work around cases where hwm=0, such as the repl. return !state.ended && (state.length < state.highWaterMark || state.length === 0); } function addChunk(stream, state, chunk, addToFront) { if (state.flowing && state.length === 0 && !state.sync) { state.awaitDrain = 0; stream.emit('data', chunk); } else { // update the buffer info. state.length += state.objectMode ? 1 : chunk.length; if (addToFront) state.buffer.unshift(chunk);else state.buffer.push(chunk); if (state.needReadable) emitReadable(stream); } maybeReadMore(stream, state); } function chunkInvalid(state, chunk) { var er; if (!_isUint8Array(chunk) && typeof chunk !== 'string' && chunk !== undefined && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer', 'Uint8Array'], chunk); } return er; } Readable.prototype.isPaused = function () { return this._readableState.flowing === false; }; // backwards compatibility. Readable.prototype.setEncoding = function (enc) { if (!StringDecoder) StringDecoder = (__nccwpck_require__(80634)/* .StringDecoder */ .I); var decoder = new StringDecoder(enc); this._readableState.decoder = decoder; // If setEncoding(null), decoder.encoding equals utf8 this._readableState.encoding = this._readableState.decoder.encoding; // Iterate over current buffer to convert already stored Buffers: var p = this._readableState.buffer.head; var content = ''; while (p !== null) { content += decoder.write(p.data); p = p.next; } this._readableState.buffer.clear(); if (content !== '') this._readableState.buffer.push(content); this._readableState.length = content.length; return this; }; // Don't raise the hwm > 1GB var MAX_HWM = 0x40000000; function computeNewHighWaterMark(n) { if (n >= MAX_HWM) { // TODO(ronag): Throw ERR_VALUE_OUT_OF_RANGE. n = MAX_HWM; } else { // Get the next highest power of 2 to prevent increasing hwm excessively in // tiny amounts n--; n |= n >>> 1; n |= n >>> 2; n |= n >>> 4; n |= n >>> 8; n |= n >>> 16; n++; } return n; } // This function is designed to be inlinable, so please take care when making // changes to the function body. function howMuchToRead(n, state) { if (n <= 0 || state.length === 0 && state.ended) return 0; if (state.objectMode) return 1; if (n !== n) { // Only flow one buffer at a time if (state.flowing && state.length) return state.buffer.head.data.length;else return state.length; } // If we're asking for more than the current hwm, then raise the hwm. if (n > state.highWaterMark) state.highWaterMark = computeNewHighWaterMark(n); if (n <= state.length) return n; // Don't have enough if (!state.ended) { state.needReadable = true; return 0; } return state.length; } // you can override either this method, or the async _read(n) below. Readable.prototype.read = function (n) { debug('read', n); n = parseInt(n, 10); var state = this._readableState; var nOrig = n; if (n !== 0) state.emittedReadable = false; // if we're doing read(0) to trigger a readable event, but we // already have a bunch of data in the buffer, then just trigger // the 'readable' event and move on. if (n === 0 && state.needReadable && ((state.highWaterMark !== 0 ? state.length >= state.highWaterMark : state.length > 0) || state.ended)) { debug('read: emitReadable', state.length, state.ended); if (state.length === 0 && state.ended) endReadable(this);else emitReadable(this); return null; } n = howMuchToRead(n, state); // if we've ended, and we're now clear, then finish it up. if (n === 0 && state.ended) { if (state.length === 0) endReadable(this); return null; } // All the actual chunk generation logic needs to be // *below* the call to _read. The reason is that in certain // synthetic stream cases, such as passthrough streams, _read // may be a completely synchronous operation which may change // the state of the read buffer, providing enough data when // before there was *not* enough. // // So, the steps are: // 1. Figure out what the state of things will be after we do // a read from the buffer. // // 2. If that resulting state will trigger a _read, then call _read. // Note that this may be asynchronous, or synchronous. Yes, it is // deeply ugly to write APIs this way, but that still doesn't mean // that the Readable class should behave improperly, as streams are // designed to be sync/async agnostic. // Take note if the _read call is sync or async (ie, if the read call // has returned yet), so that we know whether or not it's safe to emit // 'readable' etc. // // 3. Actually pull the requested chunks out of the buffer and return. // if we need a readable event, then we need to do some reading. var doRead = state.needReadable; debug('need readable', doRead); // if we currently have less than the highWaterMark, then also read some if (state.length === 0 || state.length - n < state.highWaterMark) { doRead = true; debug('length less than watermark', doRead); } // however, if we've ended, then there's no point, and if we're already // reading, then it's unnecessary. if (state.ended || state.reading) { doRead = false; debug('reading or ended', doRead); } else if (doRead) { debug('do read'); state.reading = true; state.sync = true; // if the length is currently zero, then we *need* a readable event. if (state.length === 0) state.needReadable = true; // call internal read method this._read(state.highWaterMark); state.sync = false; // If _read pushed data synchronously, then `reading` will be false, // and we need to re-evaluate how much data we can return to the user. if (!state.reading) n = howMuchToRead(nOrig, state); } var ret; if (n > 0) ret = fromList(n, state);else ret = null; if (ret === null) { state.needReadable = state.length <= state.highWaterMark; n = 0; } else { state.length -= n; state.awaitDrain = 0; } if (state.length === 0) { // If we have nothing in the buffer, then we want to know // as soon as we *do* get something into the buffer. if (!state.ended) state.needReadable = true; // If we tried to read() past the EOF, then emit end on the next tick. if (nOrig !== n && state.ended) endReadable(this); } if (ret !== null) this.emit('data', ret); return ret; }; function onEofChunk(stream, state) { debug('onEofChunk'); if (state.ended) return; if (state.decoder) { var chunk = state.decoder.end(); if (chunk && chunk.length) { state.buffer.push(chunk); state.length += state.objectMode ? 1 : chunk.length; } } state.ended = true; if (state.sync) { // if we are sync, wait until next tick to emit the data. // Otherwise we risk emitting data in the flow() // the readable code triggers during a read() call emitReadable(stream); } else { // emit 'readable' now to make sure it gets picked up. state.needReadable = false; if (!state.emittedReadable) { state.emittedReadable = true; emitReadable_(stream); } } } // Don't emit readable right away in sync mode, because this can trigger // another read() call => stack overflow. This way, it might trigger // a nextTick recursion warning, but that's not so bad. function emitReadable(stream) { var state = stream._readableState; debug('emitReadable', state.needReadable, state.emittedReadable); state.needReadable = false; if (!state.emittedReadable) { debug('emitReadable', state.flowing); state.emittedReadable = true; process.nextTick(emitReadable_, stream); } } function emitReadable_(stream) { var state = stream._readableState; debug('emitReadable_', state.destroyed, state.length, state.ended); if (!state.destroyed && (state.length || state.ended)) { stream.emit('readable'); state.emittedReadable = false; } // The stream needs another readable event if // 1. It is not flowing, as the flow mechanism will take // care of it. // 2. It is not ended. // 3. It is below the highWaterMark, so we can schedule // another readable later. state.needReadable = !state.flowing && !state.ended && state.length <= state.highWaterMark; flow(stream); } // at this point, the user has presumably seen the 'readable' event, // and called read() to consume some data. that may have triggered // in turn another _read(n) call, in which case reading = true if // it's in progress. // However, if we're not ended, or reading, and the length < hwm, // then go ahead and try to read some more preemptively. function maybeReadMore(stream, state) { if (!state.readingMore) { state.readingMore = true; process.nextTick(maybeReadMore_, stream, state); } } function maybeReadMore_(stream, state) { // Attempt to read more data if we should. // // The conditions for reading more data are (one of): // - Not enough data buffered (state.length < state.highWaterMark). The loop // is responsible for filling the buffer with enough data if such data // is available. If highWaterMark is 0 and we are not in the flowing mode // we should _not_ attempt to buffer any extra data. We'll get more data // when the stream consumer calls read() instead. // - No data in the buffer, and the stream is in flowing mode. In this mode // the loop below is responsible for ensuring read() is called. Failing to // call read here would abort the flow and there's no other mechanism for // continuing the flow if the stream consumer has just subscribed to the // 'data' event. // // In addition to the above conditions to keep reading data, the following // conditions prevent the data from being read: // - The stream has ended (state.ended). // - There is already a pending 'read' operation (state.reading). This is a // case where the the stream has called the implementation defined _read() // method, but they are processing the call asynchronously and have _not_ // called push() with new data. In this case we skip performing more // read()s. The execution ends in this method again after the _read() ends // up calling push() with more data. while (!state.reading && !state.ended && (state.length < state.highWaterMark || state.flowing && state.length === 0)) { var len = state.length; debug('maybeReadMore read 0'); stream.read(0); if (len === state.length) // didn't get any data, stop spinning. break; } state.readingMore = false; } // abstract method. to be overridden in specific implementation classes. // call cb(er, data) where data is <= n in length. // for virtual (non-string, non-buffer) streams, "length" is somewhat // arbitrary, and perhaps not very meaningful. Readable.prototype._read = function (n) { errorOrDestroy(this, new ERR_METHOD_NOT_IMPLEMENTED('_read()')); }; Readable.prototype.pipe = function (dest, pipeOpts) { var src = this; var state = this._readableState; switch (state.pipesCount) { case 0: state.pipes = dest; break; case 1: state.pipes = [state.pipes, dest]; break; default: state.pipes.push(dest); break; } state.pipesCount += 1; debug('pipe count=%d opts=%j', state.pipesCount, pipeOpts); var doEnd = (!pipeOpts || pipeOpts.end !== false) && dest !== process.stdout && dest !== process.stderr; var endFn = doEnd ? onend : unpipe; if (state.endEmitted) process.nextTick(endFn);else src.once('end', endFn); dest.on('unpipe', onunpipe); function onunpipe(readable, unpipeInfo) { debug('onunpipe'); if (readable === src) { if (unpipeInfo && unpipeInfo.hasUnpiped === false) { unpipeInfo.hasUnpiped = true; cleanup(); } } } function onend() { debug('onend'); dest.end(); } // when the dest drains, it reduces the awaitDrain counter // on the source. This would be more elegant with a .once() // handler in flow(), but adding and removing repeatedly is // too slow. var ondrain = pipeOnDrain(src); dest.on('drain', ondrain); var cleanedUp = false; function cleanup() { debug('cleanup'); // cleanup event handlers once the pipe is broken dest.removeListener('close', onclose); dest.removeListener('finish', onfinish); dest.removeListener('drain', ondrain); dest.removeListener('error', onerror); dest.removeListener('unpipe', onunpipe); src.removeListener('end', onend); src.removeListener('end', unpipe); src.removeListener('data', ondata); cleanedUp = true; // if the reader is waiting for a drain event from this // specific writer, then it would cause it to never start // flowing again. // So, if this is awaiting a drain, then we just call it now. // If we don't know, then assume that we are waiting for one. if (state.awaitDrain && (!dest._writableState || dest._writableState.needDrain)) ondrain(); } src.on('data', ondata); function ondata(chunk) { debug('ondata'); var ret = dest.write(chunk); debug('dest.write', ret); if (ret === false) { // If the user unpiped during `dest.write()`, it is possible // to get stuck in a permanently paused state if that write // also returned false. // => Check whether `dest` is still a piping destination. if ((state.pipesCount === 1 && state.pipes === dest || state.pipesCount > 1 && indexOf(state.pipes, dest) !== -1) && !cleanedUp) { debug('false write response, pause', state.awaitDrain); state.awaitDrain++; } src.pause(); } } // if the dest has an error, then stop piping into it. // however, don't suppress the throwing behavior for this. function onerror(er) { debug('onerror', er); unpipe(); dest.removeListener('error', onerror); if (EElistenerCount(dest, 'error') === 0) errorOrDestroy(dest, er); } // Make sure our error handler is attached before userland ones. prependListener(dest, 'error', onerror); // Both close and finish should trigger unpipe, but only once. function onclose() { dest.removeListener('finish', onfinish); unpipe(); } dest.once('close', onclose); function onfinish() { debug('onfinish'); dest.removeListener('close', onclose); unpipe(); } dest.once('finish', onfinish); function unpipe() { debug('unpipe'); src.unpipe(dest); } // tell the dest that it's being piped to dest.emit('pipe', src); // start the flow if it hasn't been started already. if (!state.flowing) { debug('pipe resume'); src.resume(); } return dest; }; function pipeOnDrain(src) { return function pipeOnDrainFunctionResult() { var state = src._readableState; debug('pipeOnDrain', state.awaitDrain); if (state.awaitDrain) state.awaitDrain--; if (state.awaitDrain === 0 && EElistenerCount(src, 'data')) { state.flowing = true; flow(src); } }; } Readable.prototype.unpipe = function (dest) { var state = this._readableState; var unpipeInfo = { hasUnpiped: false }; // if we're not piping anywhere, then do nothing. if (state.pipesCount === 0) return this; // just one destination. most common case. if (state.pipesCount === 1) { // passed in one, but it's not the right one. if (dest && dest !== state.pipes) return this; if (!dest) dest = state.pipes; // got a match. state.pipes = null; state.pipesCount = 0; state.flowing = false; if (dest) dest.emit('unpipe', this, unpipeInfo); return this; } // slow case. multiple pipe destinations. if (!dest) { // remove all. var dests = state.pipes; var len = state.pipesCount; state.pipes = null; state.pipesCount = 0; state.flowing = false; for (var i = 0; i < len; i++) dests[i].emit('unpipe', this, { hasUnpiped: false }); return this; } // try to find the right one. var index = indexOf(state.pipes, dest); if (index === -1) return this; state.pipes.splice(index, 1); state.pipesCount -= 1; if (state.pipesCount === 1) state.pipes = state.pipes[0]; dest.emit('unpipe', this, unpipeInfo); return this; }; // set up data events if they are asked for // Ensure readable listeners eventually get something Readable.prototype.on = function (ev, fn) { var res = Stream.prototype.on.call(this, ev, fn); var state = this._readableState; if (ev === 'data') { // update readableListening so that resume() may be a no-op // a few lines down. This is needed to support once('readable'). state.readableListening = this.listenerCount('readable') > 0; // Try start flowing on next tick if stream isn't explicitly paused if (state.flowing !== false) this.resume(); } else if (ev === 'readable') { if (!state.endEmitted && !state.readableListening) { state.readableListening = state.needReadable = true; state.flowing = false; state.emittedReadable = false; debug('on readable', state.length, state.reading); if (state.length) { emitReadable(this); } else if (!state.reading) { process.nextTick(nReadingNextTick, this); } } } return res; }; Readable.prototype.addListener = Readable.prototype.on; Readable.prototype.removeListener = function (ev, fn) { var res = Stream.prototype.removeListener.call(this, ev, fn); if (ev === 'readable') { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen // after readable has been emitted but before I/O (nextTick) to // support once('readable', fn) cycles. This means that calling // resume within the same tick will have no // effect. process.nextTick(updateReadableListening, this); } return res; }; Readable.prototype.removeAllListeners = function (ev) { var res = Stream.prototype.removeAllListeners.apply(this, arguments); if (ev === 'readable' || ev === undefined) { // We need to check if there is someone still listening to // readable and reset the state. However this needs to happen // after readable has been emitted but before I/O (nextTick) to // support once('readable', fn) cycles. This means that calling // resume within the same tick will have no // effect. process.nextTick(updateReadableListening, this); } return res; }; function updateReadableListening(self) { var state = self._readableState; state.readableListening = self.listenerCount('readable') > 0; if (state.resumeScheduled && !state.paused) { // flowing needs to be set to true now, otherwise // the upcoming resume will not flow. state.flowing = true; // crude way to check if we should resume } else if (self.listenerCount('data') > 0) { self.resume(); } } function nReadingNextTick(self) { debug('readable nexttick read 0'); self.read(0); } // pause() and resume() are remnants of the legacy readable stream API // If the user uses them, then switch into old mode. Readable.prototype.resume = function () { var state = this._readableState; if (!state.flowing) { debug('resume'); // we flow only if there is no one listening // for readable, but we still have to call // resume() state.flowing = !state.readableListening; resume(this, state); } state.paused = false; return this; }; function resume(stream, state) { if (!state.resumeScheduled) { state.resumeScheduled = true; process.nextTick(resume_, stream, state); } } function resume_(stream, state) { debug('resume', state.reading); if (!state.reading) { stream.read(0); } state.resumeScheduled = false; stream.emit('resume'); flow(stream); if (state.flowing && !state.reading) stream.read(0); } Readable.prototype.pause = function () { debug('call pause flowing=%j', this._readableState.flowing); if (this._readableState.flowing !== false) { debug('pause'); this._readableState.flowing = false; this.emit('pause'); } this._readableState.paused = true; return this; }; function flow(stream) { var state = stream._readableState; debug('flow', state.flowing); while (state.flowing && stream.read() !== null); } // wrap an old-style stream as the async data source. // This is *not* part of the readable stream interface. // It is an ugly unfortunate mess of history. Readable.prototype.wrap = function (stream) { var _this = this; var state = this._readableState; var paused = false; stream.on('end', function () { debug('wrapped end'); if (state.decoder && !state.ended) { var chunk = state.decoder.end(); if (chunk && chunk.length) _this.push(chunk); } _this.push(null); }); stream.on('data', function (chunk) { debug('wrapped data'); if (state.decoder) chunk = state.decoder.write(chunk); // don't skip over falsy values in objectMode if (state.objectMode && (chunk === null || chunk === undefined)) return;else if (!state.objectMode && (!chunk || !chunk.length)) return; var ret = _this.push(chunk); if (!ret) { paused = true; stream.pause(); } }); // proxy all the other methods. // important when wrapping filters and duplexes. for (var i in stream) { if (this[i] === undefined && typeof stream[i] === 'function') { this[i] = function methodWrap(method) { return function methodWrapReturnFunction() { return stream[method].apply(stream, arguments); }; }(i); } } // proxy certain important events. for (var n = 0; n < kProxyEvents.length; n++) { stream.on(kProxyEvents[n], this.emit.bind(this, kProxyEvents[n])); } // when we try to consume some more bytes, simply unpause the // underlying stream. this._read = function (n) { debug('wrapped _read', n); if (paused) { paused = false; stream.resume(); } }; return this; }; if (typeof Symbol === 'function') { Readable.prototype[Symbol.asyncIterator] = function () { if (createReadableStreamAsyncIterator === undefined) { createReadableStreamAsyncIterator = __nccwpck_require__(14868); } return createReadableStreamAsyncIterator(this); }; } Object.defineProperty(Readable.prototype, 'readableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._readableState.highWaterMark; } }); Object.defineProperty(Readable.prototype, 'readableBuffer', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._readableState && this._readableState.buffer; } }); Object.defineProperty(Readable.prototype, 'readableFlowing', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._readableState.flowing; }, set: function set(state) { if (this._readableState) { this._readableState.flowing = state; } } }); // exposed for testing purposes only. Readable._fromList = fromList; Object.defineProperty(Readable.prototype, 'readableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._readableState.length; } }); // Pluck off n bytes from an array of buffers. // Length is the combined lengths of all the buffers in the list. // This function is designed to be inlinable, so please take care when making // changes to the function body. function fromList(n, state) { // nothing buffered if (state.length === 0) return null; var ret; if (state.objectMode) ret = state.buffer.shift();else if (!n || n >= state.length) { // read it all, truncate the list if (state.decoder) ret = state.buffer.join('');else if (state.buffer.length === 1) ret = state.buffer.first();else ret = state.buffer.concat(state.length); state.buffer.clear(); } else { // read part of list ret = state.buffer.consume(n, state.decoder); } return ret; } function endReadable(stream) { var state = stream._readableState; debug('endReadable', state.endEmitted); if (!state.endEmitted) { state.ended = true; process.nextTick(endReadableNT, state, stream); } } function endReadableNT(state, stream) { debug('endReadableNT', state.endEmitted, state.length); // Check that we didn't get one last unshift. if (!state.endEmitted && state.length === 0) { state.endEmitted = true; stream.readable = false; stream.emit('end'); if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the writable side is ready for autoDestroy as well var wState = stream._writableState; if (!wState || wState.autoDestroy && wState.finished) { stream.destroy(); } } } } if (typeof Symbol === 'function') { Readable.from = function (iterable, opts) { if (from === undefined) { from = __nccwpck_require__(4659); } return from(Readable, iterable, opts); }; } function indexOf(xs, x) { for (var i = 0, l = xs.length; i < l; i++) { if (xs[i] === x) return i; } return -1; } /***/ }), /***/ 12337: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // a transform stream is a readable/writable stream where you do // something with the data. Sometimes it's called a "filter", // but that's not a great name for it, since that implies a thing where // some bits pass through, and others are simply ignored. (That would // be a valid example of a transform, of course.) // // While the output is causally related to the input, it's not a // necessarily symmetric or synchronous transformation. For example, // a zlib stream might take multiple plain-text writes(), and then // emit a single compressed chunk some time in the future. // // Here's how this works: // // The Transform stream has all the aspects of the readable and writable // stream classes. When you write(chunk), that calls _write(chunk,cb) // internally, and returns false if there's a lot of pending writes // buffered up. When you call read(), that calls _read(n) until // there's enough pending readable data buffered up. // // In a transform stream, the written data is placed in a buffer. When // _read(n) is called, it transforms the queued up data, calling the // buffered _write cb's as it consumes chunks. If consuming a single // written chunk would result in multiple output chunks, then the first // outputted bit calls the readcb, and subsequent chunks just go into // the read buffer, and will cause it to emit 'readable' if necessary. // // This way, back-pressure is actually determined by the reading side, // since _read has to be called to start processing a new chunk. However, // a pathological inflate type of transform can cause excessive buffering // here. For example, imagine a stream where every byte of input is // interpreted as an integer from 0-255, and then results in that many // bytes of output. Writing the 4 bytes {ff,ff,ff,ff} would result in // 1kb of data being output. In this case, you could write a very small // amount of input, and end up with a very large amount of output. In // such a pathological inflating mechanism, there'd be no way to tell // the system to stop doing the transform. A single 4MB write could // cause the system to run out of memory. // // However, even in such a pathological case, only a single written chunk // would be consumed, and then the rest would wait (un-transformed) until // the results of the previous transformed chunk were consumed. module.exports = Transform; var _require$codes = (__nccwpck_require__(25500)/* .codes */ .F), ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, ERR_TRANSFORM_ALREADY_TRANSFORMING = _require$codes.ERR_TRANSFORM_ALREADY_TRANSFORMING, ERR_TRANSFORM_WITH_LENGTH_0 = _require$codes.ERR_TRANSFORM_WITH_LENGTH_0; var Duplex = __nccwpck_require__(82063); __nccwpck_require__(39598)(Transform, Duplex); function afterTransform(er, data) { var ts = this._transformState; ts.transforming = false; var cb = ts.writecb; if (cb === null) { return this.emit('error', new ERR_MULTIPLE_CALLBACK()); } ts.writechunk = null; ts.writecb = null; if (data != null) // single equals check for both `null` and `undefined` this.push(data); cb(er); var rs = this._readableState; rs.reading = false; if (rs.needReadable || rs.length < rs.highWaterMark) { this._read(rs.highWaterMark); } } function Transform(options) { if (!(this instanceof Transform)) return new Transform(options); Duplex.call(this, options); this._transformState = { afterTransform: afterTransform.bind(this), needTransform: false, transforming: false, writecb: null, writechunk: null, writeencoding: null }; // start out asking for a readable event once data is transformed. this._readableState.needReadable = true; // we have implemented the _read method, and done the other things // that Readable wants before the first _read call, so unset the // sync guard flag. this._readableState.sync = false; if (options) { if (typeof options.transform === 'function') this._transform = options.transform; if (typeof options.flush === 'function') this._flush = options.flush; } // When the writable side finishes, then flush out anything remaining. this.on('prefinish', prefinish); } function prefinish() { var _this = this; if (typeof this._flush === 'function' && !this._readableState.destroyed) { this._flush(function (er, data) { done(_this, er, data); }); } else { done(this, null, null); } } Transform.prototype.push = function (chunk, encoding) { this._transformState.needTransform = false; return Duplex.prototype.push.call(this, chunk, encoding); }; // This is the part where you do stuff! // override this function in implementation classes. // 'chunk' is an input chunk. // // Call `push(newChunk)` to pass along transformed output // to the readable side. You may call 'push' zero or more times. // // Call `cb(err)` when you are done with this chunk. If you pass // an error, then that'll put the hurt on the whole operation. If you // never call cb(), then you'll never get another chunk. Transform.prototype._transform = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_transform()')); }; Transform.prototype._write = function (chunk, encoding, cb) { var ts = this._transformState; ts.writecb = cb; ts.writechunk = chunk; ts.writeencoding = encoding; if (!ts.transforming) { var rs = this._readableState; if (ts.needTransform || rs.needReadable || rs.length < rs.highWaterMark) this._read(rs.highWaterMark); } }; // Doesn't matter what the args are here. // _transform does all the work. // That we got here means that the readable side wants more data. Transform.prototype._read = function (n) { var ts = this._transformState; if (ts.writechunk !== null && !ts.transforming) { ts.transforming = true; this._transform(ts.writechunk, ts.writeencoding, ts.afterTransform); } else { // mark that we need a transform, so that any data that comes in // will get processed, now that we've asked for it. ts.needTransform = true; } }; Transform.prototype._destroy = function (err, cb) { Duplex.prototype._destroy.call(this, err, function (err2) { cb(err2); }); }; function done(stream, er, data) { if (er) return stream.emit('error', er); if (data != null) // single equals check for both `null` and `undefined` stream.push(data); // TODO(BridgeAR): Write a test for these two error cases // if there's nothing in the write buffer, then that means // that nothing more will ever be provided if (stream._writableState.length) throw new ERR_TRANSFORM_WITH_LENGTH_0(); if (stream._transformState.transforming) throw new ERR_TRANSFORM_ALREADY_TRANSFORMING(); return stream.push(null); } /***/ }), /***/ 38797: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. // A bit simpler than readable streams. // Implement an async ._write(chunk, encoding, cb), and it'll handle all // the drain event emission and buffering. module.exports = Writable; /* */ function WriteReq(chunk, encoding, cb) { this.chunk = chunk; this.encoding = encoding; this.callback = cb; this.next = null; } // It seems a linked list but it is not // there will be only 2 of these for each stream function CorkedRequest(state) { var _this = this; this.next = null; this.entry = null; this.finish = function () { onCorkedFinish(_this, state); }; } /* */ /**/ var Duplex; /**/ Writable.WritableState = WritableState; /**/ var internalUtil = { deprecate: __nccwpck_require__(24488) }; /**/ /**/ var Stream = __nccwpck_require__(63283); /**/ var Buffer = (__nccwpck_require__(20181).Buffer); var OurUint8Array = (typeof global !== 'undefined' ? global : typeof window !== 'undefined' ? window : typeof self !== 'undefined' ? self : {}).Uint8Array || function () {}; function _uint8ArrayToBuffer(chunk) { return Buffer.from(chunk); } function _isUint8Array(obj) { return Buffer.isBuffer(obj) || obj instanceof OurUint8Array; } var destroyImpl = __nccwpck_require__(65089); var _require = __nccwpck_require__(54874), getHighWaterMark = _require.getHighWaterMark; var _require$codes = (__nccwpck_require__(25500)/* .codes */ .F), ERR_INVALID_ARG_TYPE = _require$codes.ERR_INVALID_ARG_TYPE, ERR_METHOD_NOT_IMPLEMENTED = _require$codes.ERR_METHOD_NOT_IMPLEMENTED, ERR_MULTIPLE_CALLBACK = _require$codes.ERR_MULTIPLE_CALLBACK, ERR_STREAM_CANNOT_PIPE = _require$codes.ERR_STREAM_CANNOT_PIPE, ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED, ERR_STREAM_NULL_VALUES = _require$codes.ERR_STREAM_NULL_VALUES, ERR_STREAM_WRITE_AFTER_END = _require$codes.ERR_STREAM_WRITE_AFTER_END, ERR_UNKNOWN_ENCODING = _require$codes.ERR_UNKNOWN_ENCODING; var errorOrDestroy = destroyImpl.errorOrDestroy; __nccwpck_require__(39598)(Writable, Stream); function nop() {} function WritableState(options, stream, isDuplex) { Duplex = Duplex || __nccwpck_require__(82063); options = options || {}; // Duplex streams are both readable and writable, but share // the same options object. // However, some cases require setting options to different // values for the readable and the writable sides of the duplex stream, // e.g. options.readableObjectMode vs. options.writableObjectMode, etc. if (typeof isDuplex !== 'boolean') isDuplex = stream instanceof Duplex; // object stream flag to indicate whether or not this stream // contains buffers or objects. this.objectMode = !!options.objectMode; if (isDuplex) this.objectMode = this.objectMode || !!options.writableObjectMode; // the point at which write() starts returning false // Note: 0 is a valid value, means that we always return false if // the entire buffer is not flushed immediately on write() this.highWaterMark = getHighWaterMark(this, options, 'writableHighWaterMark', isDuplex); // if _final has been called this.finalCalled = false; // drain event flag. this.needDrain = false; // at the start of calling end() this.ending = false; // when end() has been called, and returned this.ended = false; // when 'finish' is emitted this.finished = false; // has it been destroyed this.destroyed = false; // should we decode strings into buffers before passing to _write? // this is here so that some node-core streams can optimize string // handling at a lower level. var noDecode = options.decodeStrings === false; this.decodeStrings = !noDecode; // Crypto is kind of old and crusty. Historically, its default string // encoding is 'binary' so we have to make this configurable. // Everything else in the universe uses 'utf8', though. this.defaultEncoding = options.defaultEncoding || 'utf8'; // not an actual buffer we keep track of, but a measurement // of how much we're waiting to get pushed to some underlying // socket or file. this.length = 0; // a flag to see when we're in the middle of a write. this.writing = false; // when true all writes will be buffered until .uncork() call this.corked = 0; // a flag to be able to tell if the onwrite cb is called immediately, // or on a later tick. We set this to true at first, because any // actions that shouldn't happen until "later" should generally also // not happen before the first write call. this.sync = true; // a flag to know if we're processing previously buffered items, which // may call the _write() callback in the same tick, so that we don't // end up in an overlapped onwrite situation. this.bufferProcessing = false; // the callback that's passed to _write(chunk,cb) this.onwrite = function (er) { onwrite(stream, er); }; // the callback that the user supplies to write(chunk,encoding,cb) this.writecb = null; // the amount that is being written when _write is called. this.writelen = 0; this.bufferedRequest = null; this.lastBufferedRequest = null; // number of pending user-supplied write callbacks // this must be 0 before 'finish' can be emitted this.pendingcb = 0; // emit prefinish if the only thing we're waiting for is _write cbs // This is relevant for synchronous Transform streams this.prefinished = false; // True if the error was already emitted and should not be thrown again this.errorEmitted = false; // Should close be emitted on destroy. Defaults to true. this.emitClose = options.emitClose !== false; // Should .destroy() be called after 'finish' (and potentially 'end') this.autoDestroy = !!options.autoDestroy; // count buffered requests this.bufferedRequestCount = 0; // allocate the first CorkedRequest, there is always // one allocated and free to use, and we maintain at most two this.corkedRequestsFree = new CorkedRequest(this); } WritableState.prototype.getBuffer = function getBuffer() { var current = this.bufferedRequest; var out = []; while (current) { out.push(current); current = current.next; } return out; }; (function () { try { Object.defineProperty(WritableState.prototype, 'buffer', { get: internalUtil.deprecate(function writableStateBufferGetter() { return this.getBuffer(); }, '_writableState.buffer is deprecated. Use _writableState.getBuffer ' + 'instead.', 'DEP0003') }); } catch (_) {} })(); // Test _writableState for inheritance to account for Duplex streams, // whose prototype chain only points to Readable. var realHasInstance; if (typeof Symbol === 'function' && Symbol.hasInstance && typeof Function.prototype[Symbol.hasInstance] === 'function') { realHasInstance = Function.prototype[Symbol.hasInstance]; Object.defineProperty(Writable, Symbol.hasInstance, { value: function value(object) { if (realHasInstance.call(this, object)) return true; if (this !== Writable) return false; return object && object._writableState instanceof WritableState; } }); } else { realHasInstance = function realHasInstance(object) { return object instanceof this; }; } function Writable(options) { Duplex = Duplex || __nccwpck_require__(82063); // Writable ctor is applied to Duplexes, too. // `realHasInstance` is necessary because using plain `instanceof` // would return false, as no `_writableState` property is attached. // Trying to use the custom `instanceof` for Writable here will also break the // Node.js LazyTransform implementation, which has a non-trivial getter for // `_writableState` that would lead to infinite recursion. // Checking for a Stream.Duplex instance is faster here instead of inside // the WritableState constructor, at least with V8 6.5 var isDuplex = this instanceof Duplex; if (!isDuplex && !realHasInstance.call(Writable, this)) return new Writable(options); this._writableState = new WritableState(options, this, isDuplex); // legacy. this.writable = true; if (options) { if (typeof options.write === 'function') this._write = options.write; if (typeof options.writev === 'function') this._writev = options.writev; if (typeof options.destroy === 'function') this._destroy = options.destroy; if (typeof options.final === 'function') this._final = options.final; } Stream.call(this); } // Otherwise people can pipe Writable streams, which is just wrong. Writable.prototype.pipe = function () { errorOrDestroy(this, new ERR_STREAM_CANNOT_PIPE()); }; function writeAfterEnd(stream, cb) { var er = new ERR_STREAM_WRITE_AFTER_END(); // TODO: defer error events consistently everywhere, not just the cb errorOrDestroy(stream, er); process.nextTick(cb, er); } // Checks that a user-supplied chunk is valid, especially for the particular // mode the stream is in. Currently this means that `null` is never accepted // and undefined/non-string values are only allowed in object mode. function validChunk(stream, state, chunk, cb) { var er; if (chunk === null) { er = new ERR_STREAM_NULL_VALUES(); } else if (typeof chunk !== 'string' && !state.objectMode) { er = new ERR_INVALID_ARG_TYPE('chunk', ['string', 'Buffer'], chunk); } if (er) { errorOrDestroy(stream, er); process.nextTick(cb, er); return false; } return true; } Writable.prototype.write = function (chunk, encoding, cb) { var state = this._writableState; var ret = false; var isBuf = !state.objectMode && _isUint8Array(chunk); if (isBuf && !Buffer.isBuffer(chunk)) { chunk = _uint8ArrayToBuffer(chunk); } if (typeof encoding === 'function') { cb = encoding; encoding = null; } if (isBuf) encoding = 'buffer';else if (!encoding) encoding = state.defaultEncoding; if (typeof cb !== 'function') cb = nop; if (state.ending) writeAfterEnd(this, cb);else if (isBuf || validChunk(this, state, chunk, cb)) { state.pendingcb++; ret = writeOrBuffer(this, state, isBuf, chunk, encoding, cb); } return ret; }; Writable.prototype.cork = function () { this._writableState.corked++; }; Writable.prototype.uncork = function () { var state = this._writableState; if (state.corked) { state.corked--; if (!state.writing && !state.corked && !state.bufferProcessing && state.bufferedRequest) clearBuffer(this, state); } }; Writable.prototype.setDefaultEncoding = function setDefaultEncoding(encoding) { // node::ParseEncoding() requires lower case. if (typeof encoding === 'string') encoding = encoding.toLowerCase(); if (!(['hex', 'utf8', 'utf-8', 'ascii', 'binary', 'base64', 'ucs2', 'ucs-2', 'utf16le', 'utf-16le', 'raw'].indexOf((encoding + '').toLowerCase()) > -1)) throw new ERR_UNKNOWN_ENCODING(encoding); this._writableState.defaultEncoding = encoding; return this; }; Object.defineProperty(Writable.prototype, 'writableBuffer', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState && this._writableState.getBuffer(); } }); function decodeChunk(state, chunk, encoding) { if (!state.objectMode && state.decodeStrings !== false && typeof chunk === 'string') { chunk = Buffer.from(chunk, encoding); } return chunk; } Object.defineProperty(Writable.prototype, 'writableHighWaterMark', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState.highWaterMark; } }); // if we're already writing something, then just put this // in the queue, and wait our turn. Otherwise, call _write // If we return false, then we need a drain event, so set that flag. function writeOrBuffer(stream, state, isBuf, chunk, encoding, cb) { if (!isBuf) { var newChunk = decodeChunk(state, chunk, encoding); if (chunk !== newChunk) { isBuf = true; encoding = 'buffer'; chunk = newChunk; } } var len = state.objectMode ? 1 : chunk.length; state.length += len; var ret = state.length < state.highWaterMark; // we must ensure that previous needDrain will not be reset to false. if (!ret) state.needDrain = true; if (state.writing || state.corked) { var last = state.lastBufferedRequest; state.lastBufferedRequest = { chunk: chunk, encoding: encoding, isBuf: isBuf, callback: cb, next: null }; if (last) { last.next = state.lastBufferedRequest; } else { state.bufferedRequest = state.lastBufferedRequest; } state.bufferedRequestCount += 1; } else { doWrite(stream, state, false, len, chunk, encoding, cb); } return ret; } function doWrite(stream, state, writev, len, chunk, encoding, cb) { state.writelen = len; state.writecb = cb; state.writing = true; state.sync = true; if (state.destroyed) state.onwrite(new ERR_STREAM_DESTROYED('write'));else if (writev) stream._writev(chunk, state.onwrite);else stream._write(chunk, encoding, state.onwrite); state.sync = false; } function onwriteError(stream, state, sync, er, cb) { --state.pendingcb; if (sync) { // defer the callback if we are being called synchronously // to avoid piling up things on the stack process.nextTick(cb, er); // this can emit finish, and it will always happen // after error process.nextTick(finishMaybe, stream, state); stream._writableState.errorEmitted = true; errorOrDestroy(stream, er); } else { // the caller expect this to happen before if // it is async cb(er); stream._writableState.errorEmitted = true; errorOrDestroy(stream, er); // this can emit finish, but finish must // always follow error finishMaybe(stream, state); } } function onwriteStateUpdate(state) { state.writing = false; state.writecb = null; state.length -= state.writelen; state.writelen = 0; } function onwrite(stream, er) { var state = stream._writableState; var sync = state.sync; var cb = state.writecb; if (typeof cb !== 'function') throw new ERR_MULTIPLE_CALLBACK(); onwriteStateUpdate(state); if (er) onwriteError(stream, state, sync, er, cb);else { // Check if we're actually ready to finish, but don't emit yet var finished = needFinish(state) || stream.destroyed; if (!finished && !state.corked && !state.bufferProcessing && state.bufferedRequest) { clearBuffer(stream, state); } if (sync) { process.nextTick(afterWrite, stream, state, finished, cb); } else { afterWrite(stream, state, finished, cb); } } } function afterWrite(stream, state, finished, cb) { if (!finished) onwriteDrain(stream, state); state.pendingcb--; cb(); finishMaybe(stream, state); } // Must force callback to be called on nextTick, so that we don't // emit 'drain' before the write() consumer gets the 'false' return // value, and has a chance to attach a 'drain' listener. function onwriteDrain(stream, state) { if (state.length === 0 && state.needDrain) { state.needDrain = false; stream.emit('drain'); } } // if there's something in the buffer waiting, then process it function clearBuffer(stream, state) { state.bufferProcessing = true; var entry = state.bufferedRequest; if (stream._writev && entry && entry.next) { // Fast case, write everything using _writev() var l = state.bufferedRequestCount; var buffer = new Array(l); var holder = state.corkedRequestsFree; holder.entry = entry; var count = 0; var allBuffers = true; while (entry) { buffer[count] = entry; if (!entry.isBuf) allBuffers = false; entry = entry.next; count += 1; } buffer.allBuffers = allBuffers; doWrite(stream, state, true, state.length, buffer, '', holder.finish); // doWrite is almost always async, defer these to save a bit of time // as the hot path ends with doWrite state.pendingcb++; state.lastBufferedRequest = null; if (holder.next) { state.corkedRequestsFree = holder.next; holder.next = null; } else { state.corkedRequestsFree = new CorkedRequest(state); } state.bufferedRequestCount = 0; } else { // Slow case, write chunks one-by-one while (entry) { var chunk = entry.chunk; var encoding = entry.encoding; var cb = entry.callback; var len = state.objectMode ? 1 : chunk.length; doWrite(stream, state, false, len, chunk, encoding, cb); entry = entry.next; state.bufferedRequestCount--; // if we didn't call the onwrite immediately, then // it means that we need to wait until it does. // also, that means that the chunk and cb are currently // being processed, so move the buffer counter past them. if (state.writing) { break; } } if (entry === null) state.lastBufferedRequest = null; } state.bufferedRequest = entry; state.bufferProcessing = false; } Writable.prototype._write = function (chunk, encoding, cb) { cb(new ERR_METHOD_NOT_IMPLEMENTED('_write()')); }; Writable.prototype._writev = null; Writable.prototype.end = function (chunk, encoding, cb) { var state = this._writableState; if (typeof chunk === 'function') { cb = chunk; chunk = null; encoding = null; } else if (typeof encoding === 'function') { cb = encoding; encoding = null; } if (chunk !== null && chunk !== undefined) this.write(chunk, encoding); // .end() fully uncorks if (state.corked) { state.corked = 1; this.uncork(); } // ignore unnecessary end() calls. if (!state.ending) endWritable(this, state, cb); return this; }; Object.defineProperty(Writable.prototype, 'writableLength', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { return this._writableState.length; } }); function needFinish(state) { return state.ending && state.length === 0 && state.bufferedRequest === null && !state.finished && !state.writing; } function callFinal(stream, state) { stream._final(function (err) { state.pendingcb--; if (err) { errorOrDestroy(stream, err); } state.prefinished = true; stream.emit('prefinish'); finishMaybe(stream, state); }); } function prefinish(stream, state) { if (!state.prefinished && !state.finalCalled) { if (typeof stream._final === 'function' && !state.destroyed) { state.pendingcb++; state.finalCalled = true; process.nextTick(callFinal, stream, state); } else { state.prefinished = true; stream.emit('prefinish'); } } } function finishMaybe(stream, state) { var need = needFinish(state); if (need) { prefinish(stream, state); if (state.pendingcb === 0) { state.finished = true; stream.emit('finish'); if (state.autoDestroy) { // In case of duplex streams we need a way to detect // if the readable side is ready for autoDestroy as well var rState = stream._readableState; if (!rState || rState.autoDestroy && rState.endEmitted) { stream.destroy(); } } } } return need; } function endWritable(stream, state, cb) { state.ending = true; finishMaybe(stream, state); if (cb) { if (state.finished) process.nextTick(cb);else stream.once('finish', cb); } state.ended = true; stream.writable = false; } function onCorkedFinish(corkReq, state, err) { var entry = corkReq.entry; corkReq.entry = null; while (entry) { var cb = entry.callback; state.pendingcb--; cb(err); entry = entry.next; } // reuse the free corkReq. state.corkedRequestsFree.next = corkReq; } Object.defineProperty(Writable.prototype, 'destroyed', { // making it explicit this property is not enumerable // because otherwise some prototype manipulation in // userland will fail enumerable: false, get: function get() { if (this._writableState === undefined) { return false; } return this._writableState.destroyed; }, set: function set(value) { // we ignore the value if the stream // has not been initialized yet if (!this._writableState) { return; } // backward compatibility, the user is explicitly // managing destroyed this._writableState.destroyed = value; } }); Writable.prototype.destroy = destroyImpl.destroy; Writable.prototype._undestroy = destroyImpl.undestroy; Writable.prototype._destroy = function (err, cb) { cb(err); }; /***/ }), /***/ 14868: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var _Object$setPrototypeO; function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } var finished = __nccwpck_require__(36815); var kLastResolve = Symbol('lastResolve'); var kLastReject = Symbol('lastReject'); var kError = Symbol('error'); var kEnded = Symbol('ended'); var kLastPromise = Symbol('lastPromise'); var kHandlePromise = Symbol('handlePromise'); var kStream = Symbol('stream'); function createIterResult(value, done) { return { value: value, done: done }; } function readAndResolve(iter) { var resolve = iter[kLastResolve]; if (resolve !== null) { var data = iter[kStream].read(); // we defer if data is null // we can be expecting either 'end' or // 'error' if (data !== null) { iter[kLastPromise] = null; iter[kLastResolve] = null; iter[kLastReject] = null; resolve(createIterResult(data, false)); } } } function onReadable(iter) { // we wait for the next tick, because it might // emit an error with process.nextTick process.nextTick(readAndResolve, iter); } function wrapForNext(lastPromise, iter) { return function (resolve, reject) { lastPromise.then(function () { if (iter[kEnded]) { resolve(createIterResult(undefined, true)); return; } iter[kHandlePromise](resolve, reject); }, reject); }; } var AsyncIteratorPrototype = Object.getPrototypeOf(function () {}); var ReadableStreamAsyncIteratorPrototype = Object.setPrototypeOf((_Object$setPrototypeO = { get stream() { return this[kStream]; }, next: function next() { var _this = this; // if we have detected an error in the meanwhile // reject straight away var error = this[kError]; if (error !== null) { return Promise.reject(error); } if (this[kEnded]) { return Promise.resolve(createIterResult(undefined, true)); } if (this[kStream].destroyed) { // We need to defer via nextTick because if .destroy(err) is // called, the error will be emitted via nextTick, and // we cannot guarantee that there is no error lingering around // waiting to be emitted. return new Promise(function (resolve, reject) { process.nextTick(function () { if (_this[kError]) { reject(_this[kError]); } else { resolve(createIterResult(undefined, true)); } }); }); } // if we have multiple next() calls // we will wait for the previous Promise to finish // this logic is optimized to support for await loops, // where next() is only called once at a time var lastPromise = this[kLastPromise]; var promise; if (lastPromise) { promise = new Promise(wrapForNext(lastPromise, this)); } else { // fast path needed to support multiple this.push() // without triggering the next() queue var data = this[kStream].read(); if (data !== null) { return Promise.resolve(createIterResult(data, false)); } promise = new Promise(this[kHandlePromise]); } this[kLastPromise] = promise; return promise; } }, _defineProperty(_Object$setPrototypeO, Symbol.asyncIterator, function () { return this; }), _defineProperty(_Object$setPrototypeO, "return", function _return() { var _this2 = this; // destroy(err, cb) is a private API // we can guarantee we have that here, because we control the // Readable class this is attached to return new Promise(function (resolve, reject) { _this2[kStream].destroy(null, function (err) { if (err) { reject(err); return; } resolve(createIterResult(undefined, true)); }); }); }), _Object$setPrototypeO), AsyncIteratorPrototype); var createReadableStreamAsyncIterator = function createReadableStreamAsyncIterator(stream) { var _Object$create; var iterator = Object.create(ReadableStreamAsyncIteratorPrototype, (_Object$create = {}, _defineProperty(_Object$create, kStream, { value: stream, writable: true }), _defineProperty(_Object$create, kLastResolve, { value: null, writable: true }), _defineProperty(_Object$create, kLastReject, { value: null, writable: true }), _defineProperty(_Object$create, kError, { value: null, writable: true }), _defineProperty(_Object$create, kEnded, { value: stream._readableState.endEmitted, writable: true }), _defineProperty(_Object$create, kHandlePromise, { value: function value(resolve, reject) { var data = iterator[kStream].read(); if (data) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; resolve(createIterResult(data, false)); } else { iterator[kLastResolve] = resolve; iterator[kLastReject] = reject; } }, writable: true }), _Object$create)); iterator[kLastPromise] = null; finished(stream, function (err) { if (err && err.code !== 'ERR_STREAM_PREMATURE_CLOSE') { var reject = iterator[kLastReject]; // reject if we are waiting for data in the Promise // returned by next() and store the error if (reject !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; reject(err); } iterator[kError] = err; return; } var resolve = iterator[kLastResolve]; if (resolve !== null) { iterator[kLastPromise] = null; iterator[kLastResolve] = null; iterator[kLastReject] = null; resolve(createIterResult(undefined, true)); } iterator[kEnded] = true; }); stream.on('readable', onReadable.bind(null, iterator)); return iterator; }; module.exports = createReadableStreamAsyncIterator; /***/ }), /***/ 77336: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _classCallCheck(instance, Constructor) { if (!(instance instanceof Constructor)) { throw new TypeError("Cannot call a class as a function"); } } function _defineProperties(target, props) { for (var i = 0; i < props.length; i++) { var descriptor = props[i]; descriptor.enumerable = descriptor.enumerable || false; descriptor.configurable = true; if ("value" in descriptor) descriptor.writable = true; Object.defineProperty(target, _toPropertyKey(descriptor.key), descriptor); } } function _createClass(Constructor, protoProps, staticProps) { if (protoProps) _defineProperties(Constructor.prototype, protoProps); if (staticProps) _defineProperties(Constructor, staticProps); Object.defineProperty(Constructor, "prototype", { writable: false }); return Constructor; } function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } var _require = __nccwpck_require__(20181), Buffer = _require.Buffer; var _require2 = __nccwpck_require__(39023), inspect = _require2.inspect; var custom = inspect && inspect.custom || 'inspect'; function copyBuffer(src, target, offset) { Buffer.prototype.copy.call(src, target, offset); } module.exports = /*#__PURE__*/function () { function BufferList() { _classCallCheck(this, BufferList); this.head = null; this.tail = null; this.length = 0; } _createClass(BufferList, [{ key: "push", value: function push(v) { var entry = { data: v, next: null }; if (this.length > 0) this.tail.next = entry;else this.head = entry; this.tail = entry; ++this.length; } }, { key: "unshift", value: function unshift(v) { var entry = { data: v, next: this.head }; if (this.length === 0) this.tail = entry; this.head = entry; ++this.length; } }, { key: "shift", value: function shift() { if (this.length === 0) return; var ret = this.head.data; if (this.length === 1) this.head = this.tail = null;else this.head = this.head.next; --this.length; return ret; } }, { key: "clear", value: function clear() { this.head = this.tail = null; this.length = 0; } }, { key: "join", value: function join(s) { if (this.length === 0) return ''; var p = this.head; var ret = '' + p.data; while (p = p.next) ret += s + p.data; return ret; } }, { key: "concat", value: function concat(n) { if (this.length === 0) return Buffer.alloc(0); var ret = Buffer.allocUnsafe(n >>> 0); var p = this.head; var i = 0; while (p) { copyBuffer(p.data, ret, i); i += p.data.length; p = p.next; } return ret; } // Consumes a specified amount of bytes or characters from the buffered data. }, { key: "consume", value: function consume(n, hasStrings) { var ret; if (n < this.head.data.length) { // `slice` is the same for buffers and strings. ret = this.head.data.slice(0, n); this.head.data = this.head.data.slice(n); } else if (n === this.head.data.length) { // First chunk is a perfect match. ret = this.shift(); } else { // Result spans more than one buffer. ret = hasStrings ? this._getString(n) : this._getBuffer(n); } return ret; } }, { key: "first", value: function first() { return this.head.data; } // Consumes a specified amount of characters from the buffered data. }, { key: "_getString", value: function _getString(n) { var p = this.head; var c = 1; var ret = p.data; n -= ret.length; while (p = p.next) { var str = p.data; var nb = n > str.length ? str.length : n; if (nb === str.length) ret += str;else ret += str.slice(0, n); n -= nb; if (n === 0) { if (nb === str.length) { ++c; if (p.next) this.head = p.next;else this.head = this.tail = null; } else { this.head = p; p.data = str.slice(nb); } break; } ++c; } this.length -= c; return ret; } // Consumes a specified amount of bytes from the buffered data. }, { key: "_getBuffer", value: function _getBuffer(n) { var ret = Buffer.allocUnsafe(n); var p = this.head; var c = 1; p.data.copy(ret); n -= p.data.length; while (p = p.next) { var buf = p.data; var nb = n > buf.length ? buf.length : n; buf.copy(ret, ret.length - n, 0, nb); n -= nb; if (n === 0) { if (nb === buf.length) { ++c; if (p.next) this.head = p.next;else this.head = this.tail = null; } else { this.head = p; p.data = buf.slice(nb); } break; } ++c; } this.length -= c; return ret; } // Make sure the linked list only shows the minimal necessary information. }, { key: custom, value: function value(_, options) { return inspect(this, _objectSpread(_objectSpread({}, options), {}, { // Only inspect one level. depth: 0, // It should not recurse. customInspect: false })); } }]); return BufferList; }(); /***/ }), /***/ 65089: /***/ ((module) => { "use strict"; // undocumented cb() API, needed for core, not for public API function destroy(err, cb) { var _this = this; var readableDestroyed = this._readableState && this._readableState.destroyed; var writableDestroyed = this._writableState && this._writableState.destroyed; if (readableDestroyed || writableDestroyed) { if (cb) { cb(err); } else if (err) { if (!this._writableState) { process.nextTick(emitErrorNT, this, err); } else if (!this._writableState.errorEmitted) { this._writableState.errorEmitted = true; process.nextTick(emitErrorNT, this, err); } } return this; } // we set destroyed to true before firing error callbacks in order // to make it re-entrance safe in case destroy() is called within callbacks if (this._readableState) { this._readableState.destroyed = true; } // if this is a duplex stream mark the writable part as destroyed as well if (this._writableState) { this._writableState.destroyed = true; } this._destroy(err || null, function (err) { if (!cb && err) { if (!_this._writableState) { process.nextTick(emitErrorAndCloseNT, _this, err); } else if (!_this._writableState.errorEmitted) { _this._writableState.errorEmitted = true; process.nextTick(emitErrorAndCloseNT, _this, err); } else { process.nextTick(emitCloseNT, _this); } } else if (cb) { process.nextTick(emitCloseNT, _this); cb(err); } else { process.nextTick(emitCloseNT, _this); } }); return this; } function emitErrorAndCloseNT(self, err) { emitErrorNT(self, err); emitCloseNT(self); } function emitCloseNT(self) { if (self._writableState && !self._writableState.emitClose) return; if (self._readableState && !self._readableState.emitClose) return; self.emit('close'); } function undestroy() { if (this._readableState) { this._readableState.destroyed = false; this._readableState.reading = false; this._readableState.ended = false; this._readableState.endEmitted = false; } if (this._writableState) { this._writableState.destroyed = false; this._writableState.ended = false; this._writableState.ending = false; this._writableState.finalCalled = false; this._writableState.prefinished = false; this._writableState.finished = false; this._writableState.errorEmitted = false; } } function emitErrorNT(self, err) { self.emit('error', err); } function errorOrDestroy(stream, err) { // We have tests that rely on errors being emitted // in the same tick, so changing this is semver major. // For now when you opt-in to autoDestroy we allow // the error to be emitted nextTick. In a future // semver major update we should change the default to this. var rState = stream._readableState; var wState = stream._writableState; if (rState && rState.autoDestroy || wState && wState.autoDestroy) stream.destroy(err);else stream.emit('error', err); } module.exports = { destroy: destroy, undestroy: undestroy, errorOrDestroy: errorOrDestroy }; /***/ }), /***/ 36815: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Ported from https://github.com/mafintosh/end-of-stream with // permission from the author, Mathias Buus (@mafintosh). var ERR_STREAM_PREMATURE_CLOSE = (__nccwpck_require__(25500)/* .codes */ .F).ERR_STREAM_PREMATURE_CLOSE; function once(callback) { var called = false; return function () { if (called) return; called = true; for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) { args[_key] = arguments[_key]; } callback.apply(this, args); }; } function noop() {} function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } function eos(stream, opts, callback) { if (typeof opts === 'function') return eos(stream, null, opts); if (!opts) opts = {}; callback = once(callback || noop); var readable = opts.readable || opts.readable !== false && stream.readable; var writable = opts.writable || opts.writable !== false && stream.writable; var onlegacyfinish = function onlegacyfinish() { if (!stream.writable) onfinish(); }; var writableEnded = stream._writableState && stream._writableState.finished; var onfinish = function onfinish() { writable = false; writableEnded = true; if (!readable) callback.call(stream); }; var readableEnded = stream._readableState && stream._readableState.endEmitted; var onend = function onend() { readable = false; readableEnded = true; if (!writable) callback.call(stream); }; var onerror = function onerror(err) { callback.call(stream, err); }; var onclose = function onclose() { var err; if (readable && !readableEnded) { if (!stream._readableState || !stream._readableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } if (writable && !writableEnded) { if (!stream._writableState || !stream._writableState.ended) err = new ERR_STREAM_PREMATURE_CLOSE(); return callback.call(stream, err); } }; var onrequest = function onrequest() { stream.req.on('finish', onfinish); }; if (isRequest(stream)) { stream.on('complete', onfinish); stream.on('abort', onclose); if (stream.req) onrequest();else stream.on('request', onrequest); } else if (writable && !stream._writableState) { // legacy streams stream.on('end', onlegacyfinish); stream.on('close', onlegacyfinish); } stream.on('end', onend); stream.on('finish', onfinish); if (opts.error !== false) stream.on('error', onerror); stream.on('close', onclose); return function () { stream.removeListener('complete', onfinish); stream.removeListener('abort', onclose); stream.removeListener('request', onrequest); if (stream.req) stream.req.removeListener('finish', onfinish); stream.removeListener('end', onlegacyfinish); stream.removeListener('close', onlegacyfinish); stream.removeListener('finish', onfinish); stream.removeListener('end', onend); stream.removeListener('error', onerror); stream.removeListener('close', onclose); }; } module.exports = eos; /***/ }), /***/ 4659: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; function asyncGeneratorStep(gen, resolve, reject, _next, _throw, key, arg) { try { var info = gen[key](arg); var value = info.value; } catch (error) { reject(error); return; } if (info.done) { resolve(value); } else { Promise.resolve(value).then(_next, _throw); } } function _asyncToGenerator(fn) { return function () { var self = this, args = arguments; return new Promise(function (resolve, reject) { var gen = fn.apply(self, args); function _next(value) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "next", value); } function _throw(err) { asyncGeneratorStep(gen, resolve, reject, _next, _throw, "throw", err); } _next(undefined); }); }; } function ownKeys(object, enumerableOnly) { var keys = Object.keys(object); if (Object.getOwnPropertySymbols) { var symbols = Object.getOwnPropertySymbols(object); enumerableOnly && (symbols = symbols.filter(function (sym) { return Object.getOwnPropertyDescriptor(object, sym).enumerable; })), keys.push.apply(keys, symbols); } return keys; } function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = null != arguments[i] ? arguments[i] : {}; i % 2 ? ownKeys(Object(source), !0).forEach(function (key) { _defineProperty(target, key, source[key]); }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) { Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); }); } return target; } function _defineProperty(obj, key, value) { key = _toPropertyKey(key); if (key in obj) { Object.defineProperty(obj, key, { value: value, enumerable: true, configurable: true, writable: true }); } else { obj[key] = value; } return obj; } function _toPropertyKey(arg) { var key = _toPrimitive(arg, "string"); return typeof key === "symbol" ? key : String(key); } function _toPrimitive(input, hint) { if (typeof input !== "object" || input === null) return input; var prim = input[Symbol.toPrimitive]; if (prim !== undefined) { var res = prim.call(input, hint || "default"); if (typeof res !== "object") return res; throw new TypeError("@@toPrimitive must return a primitive value."); } return (hint === "string" ? String : Number)(input); } var ERR_INVALID_ARG_TYPE = (__nccwpck_require__(25500)/* .codes */ .F).ERR_INVALID_ARG_TYPE; function from(Readable, iterable, opts) { var iterator; if (iterable && typeof iterable.next === 'function') { iterator = iterable; } else if (iterable && iterable[Symbol.asyncIterator]) iterator = iterable[Symbol.asyncIterator]();else if (iterable && iterable[Symbol.iterator]) iterator = iterable[Symbol.iterator]();else throw new ERR_INVALID_ARG_TYPE('iterable', ['Iterable'], iterable); var readable = new Readable(_objectSpread({ objectMode: true }, opts)); // Reading boolean to protect against _read // being called before last iteration completion. var reading = false; readable._read = function () { if (!reading) { reading = true; next(); } }; function next() { return _next2.apply(this, arguments); } function _next2() { _next2 = _asyncToGenerator(function* () { try { var _yield$iterator$next = yield iterator.next(), value = _yield$iterator$next.value, done = _yield$iterator$next.done; if (done) { readable.push(null); } else if (readable.push(yield value)) { next(); } else { reading = false; } } catch (err) { readable.destroy(err); } }); return _next2.apply(this, arguments); } return readable; } module.exports = from; /***/ }), /***/ 16701: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Ported from https://github.com/mafintosh/pump with // permission from the author, Mathias Buus (@mafintosh). var eos; function once(callback) { var called = false; return function () { if (called) return; called = true; callback.apply(void 0, arguments); }; } var _require$codes = (__nccwpck_require__(25500)/* .codes */ .F), ERR_MISSING_ARGS = _require$codes.ERR_MISSING_ARGS, ERR_STREAM_DESTROYED = _require$codes.ERR_STREAM_DESTROYED; function noop(err) { // Rethrow the error if it exists to avoid swallowing it if (err) throw err; } function isRequest(stream) { return stream.setHeader && typeof stream.abort === 'function'; } function destroyer(stream, reading, writing, callback) { callback = once(callback); var closed = false; stream.on('close', function () { closed = true; }); if (eos === undefined) eos = __nccwpck_require__(36815); eos(stream, { readable: reading, writable: writing }, function (err) { if (err) return callback(err); closed = true; callback(); }); var destroyed = false; return function (err) { if (closed) return; if (destroyed) return; destroyed = true; // request.destroy just do .end - .abort is what we want if (isRequest(stream)) return stream.abort(); if (typeof stream.destroy === 'function') return stream.destroy(); callback(err || new ERR_STREAM_DESTROYED('pipe')); }; } function call(fn) { fn(); } function pipe(from, to) { return from.pipe(to); } function popCallback(streams) { if (!streams.length) return noop; if (typeof streams[streams.length - 1] !== 'function') return noop; return streams.pop(); } function pipeline() { for (var _len = arguments.length, streams = new Array(_len), _key = 0; _key < _len; _key++) { streams[_key] = arguments[_key]; } var callback = popCallback(streams); if (Array.isArray(streams[0])) streams = streams[0]; if (streams.length < 2) { throw new ERR_MISSING_ARGS('streams'); } var error; var destroys = streams.map(function (stream, i) { var reading = i < streams.length - 1; var writing = i > 0; return destroyer(stream, reading, writing, function (err) { if (!error) error = err; if (err) destroys.forEach(call); if (reading) return; destroys.forEach(call); callback(error); }); }); return streams.reduce(pipe); } module.exports = pipeline; /***/ }), /***/ 54874: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var ERR_INVALID_OPT_VALUE = (__nccwpck_require__(25500)/* .codes */ .F).ERR_INVALID_OPT_VALUE; function highWaterMarkFrom(options, isDuplex, duplexKey) { return options.highWaterMark != null ? options.highWaterMark : isDuplex ? options[duplexKey] : null; } function getHighWaterMark(state, options, duplexKey, isDuplex) { var hwm = highWaterMarkFrom(options, isDuplex, duplexKey); if (hwm != null) { if (!(isFinite(hwm) && Math.floor(hwm) === hwm) || hwm < 0) { var name = isDuplex ? duplexKey : 'highWaterMark'; throw new ERR_INVALID_OPT_VALUE(name, hwm); } return Math.floor(hwm); } // Default value return state.objectMode ? 16 : 16 * 1024; } module.exports = { getHighWaterMark: getHighWaterMark }; /***/ }), /***/ 63283: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(2203); /***/ }), /***/ 86131: /***/ ((module, exports, __nccwpck_require__) => { var Stream = __nccwpck_require__(2203); if (process.env.READABLE_STREAM === 'disable' && Stream) { module.exports = Stream.Readable; Object.assign(module.exports, Stream); module.exports.Stream = Stream; } else { exports = module.exports = __nccwpck_require__(86893); exports.Stream = Stream || exports; exports.Readable = exports; exports.Writable = __nccwpck_require__(38797); exports.Duplex = __nccwpck_require__(82063); exports.Transform = __nccwpck_require__(12337); exports.PassThrough = __nccwpck_require__(35283); exports.finished = __nccwpck_require__(36815); exports.pipeline = __nccwpck_require__(16701); } /***/ }), /***/ 67842: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const {PassThrough} = __nccwpck_require__(2203); const extend = __nccwpck_require__(23860); let debug = () => {}; if ( typeof process !== 'undefined' && 'env' in process && typeof process.env === 'object' && process.env.DEBUG === 'retry-request' ) { debug = message => { console.log('retry-request:', message); }; } const DEFAULTS = { objectMode: false, retries: 2, /* The maximum time to delay in seconds. If retryDelayMultiplier results in a delay greater than maxRetryDelay, retries should delay by maxRetryDelay seconds instead. */ maxRetryDelay: 64, /* The multiplier by which to increase the delay time between the completion of failed requests, and the initiation of the subsequent retrying request. */ retryDelayMultiplier: 2, /* The length of time to keep retrying in seconds. The last sleep period will be shortened as necessary, so that the last retry runs at deadline (and not considerably beyond it). The total time starting from when the initial request is sent, after which an error will be returned, regardless of the retrying attempts made meanwhile. */ totalTimeout: 600, noResponseRetries: 2, currentRetryAttempt: 0, shouldRetryFn: function (response) { const retryRanges = [ // https://en.wikipedia.org/wiki/List_of_HTTP_status_codes // 1xx - Retry (Informational, request still processing) // 2xx - Do not retry (Success) // 3xx - Do not retry (Redirect) // 4xx - Do not retry (Client errors) // 429 - Retry ("Too Many Requests") // 5xx - Retry (Server errors) [100, 199], [429, 429], [500, 599], ]; const statusCode = response.statusCode; debug(`Response status: ${statusCode}`); let range; while ((range = retryRanges.shift())) { if (statusCode >= range[0] && statusCode <= range[1]) { // Not a successful status or redirect. return true; } } }, }; function retryRequest(requestOpts, opts, callback) { if (typeof requestOpts === 'string') { requestOpts = {url: requestOpts}; } const streamMode = typeof arguments[arguments.length - 1] !== 'function'; if (typeof opts === 'function') { callback = opts; } const manualCurrentRetryAttemptWasSet = opts && typeof opts.currentRetryAttempt === 'number'; opts = extend({}, DEFAULTS, opts); if (typeof opts.request === 'undefined') { throw new Error('A request library must be provided to retry-request.'); } let currentRetryAttempt = opts.currentRetryAttempt; let numNoResponseAttempts = 0; let streamResponseHandled = false; let retryStream; let requestStream; let delayStream; let activeRequest; const retryRequest = { abort: function () { if (activeRequest && activeRequest.abort) { activeRequest.abort(); } }, }; if (streamMode) { retryStream = new PassThrough({objectMode: opts.objectMode}); retryStream.abort = resetStreams; } const timeOfFirstRequest = Date.now(); if (currentRetryAttempt > 0) { retryAfterDelay(currentRetryAttempt); } else { makeRequest(); } if (streamMode) { return retryStream; } else { return retryRequest; } function resetStreams() { delayStream = null; if (requestStream) { requestStream.abort && requestStream.abort(); requestStream.cancel && requestStream.cancel(); if (requestStream.destroy) { requestStream.destroy(); } else if (requestStream.end) { requestStream.end(); } } } function makeRequest() { let finishHandled = false; currentRetryAttempt++; debug(`Current retry attempt: ${currentRetryAttempt}`); function handleFinish(args = []) { if (!finishHandled) { finishHandled = true; retryStream.emit('complete', ...args); } } if (streamMode) { streamResponseHandled = false; delayStream = new PassThrough({objectMode: opts.objectMode}); requestStream = opts.request(requestOpts); setImmediate(() => { retryStream.emit('request'); }); requestStream // gRPC via google-cloud-node can emit an `error` as well as a `response` // Whichever it emits, we run with-- we can't run with both. That's what // is up with the `streamResponseHandled` tracking. .on('error', err => { if (streamResponseHandled) { return; } streamResponseHandled = true; onResponse(err); }) .on('response', (resp, body) => { if (streamResponseHandled) { return; } streamResponseHandled = true; onResponse(null, resp, body); }) .on('complete', (...params) => handleFinish(params)) .on('finish', (...params) => handleFinish(params)); requestStream.pipe(delayStream); } else { activeRequest = opts.request(requestOpts, onResponse); } } function retryAfterDelay(currentRetryAttempt) { if (streamMode) { resetStreams(); } const nextRetryDelay = getNextRetryDelay({ maxRetryDelay: opts.maxRetryDelay, retryDelayMultiplier: opts.retryDelayMultiplier, retryNumber: currentRetryAttempt, timeOfFirstRequest, totalTimeout: opts.totalTimeout, }); debug(`Next retry delay: ${nextRetryDelay}`); if (nextRetryDelay <= 0) { numNoResponseAttempts = opts.noResponseRetries + 1; return; } setTimeout(makeRequest, nextRetryDelay); } function onResponse(err, response, body) { // An error such as DNS resolution. if (err) { numNoResponseAttempts++; if (numNoResponseAttempts <= opts.noResponseRetries) { retryAfterDelay(numNoResponseAttempts); } else { if (streamMode) { retryStream.emit('error', err); retryStream.end(); } else { callback(err, response, body); } } return; } // Send the response to see if we should try again. // NOTE: "currentRetryAttempt" isn't accurate by default, as it counts // the very first request sent as the first "retry". It is only accurate // when a user provides their own "currentRetryAttempt" option at // instantiation. const adjustedCurrentRetryAttempt = manualCurrentRetryAttemptWasSet ? currentRetryAttempt : currentRetryAttempt - 1; if ( adjustedCurrentRetryAttempt < opts.retries && opts.shouldRetryFn(response) ) { retryAfterDelay(currentRetryAttempt); return; } // No more attempts need to be made, just continue on. if (streamMode) { retryStream.emit('response', response); delayStream.pipe(retryStream); requestStream.on('error', err => { retryStream.destroy(err); }); } else { callback(err, response, body); } } } module.exports = retryRequest; function getNextRetryDelay(config) { const { maxRetryDelay, retryDelayMultiplier, retryNumber, timeOfFirstRequest, totalTimeout, } = config; const maxRetryDelayMs = maxRetryDelay * 1000; const totalTimeoutMs = totalTimeout * 1000; const jitter = Math.floor(Math.random() * 1000); const calculatedNextRetryDelay = Math.pow(retryDelayMultiplier, retryNumber) * 1000 + jitter; const maxAllowableDelayMs = totalTimeoutMs - (Date.now() - timeOfFirstRequest); return Math.min( calculatedNextRetryDelay, maxAllowableDelayMs, maxRetryDelayMs ); } module.exports.defaults = DEFAULTS; module.exports.getNextRetryDelay = getNextRetryDelay; /***/ }), /***/ 5546: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(67084); /***/ }), /***/ 67084: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { var RetryOperation = __nccwpck_require__(39538); exports.operation = function(options) { var timeouts = exports.timeouts(options); return new RetryOperation(timeouts, { forever: options && (options.forever || options.retries === Infinity), unref: options && options.unref, maxRetryTime: options && options.maxRetryTime }); }; exports.timeouts = function(options) { if (options instanceof Array) { return [].concat(options); } var opts = { retries: 10, factor: 2, minTimeout: 1 * 1000, maxTimeout: Infinity, randomize: false }; for (var key in options) { opts[key] = options[key]; } if (opts.minTimeout > opts.maxTimeout) { throw new Error('minTimeout is greater than maxTimeout'); } var timeouts = []; for (var i = 0; i < opts.retries; i++) { timeouts.push(this.createTimeout(i, opts)); } if (options && options.forever && !timeouts.length) { timeouts.push(this.createTimeout(i, opts)); } // sort the array numerically ascending timeouts.sort(function(a,b) { return a - b; }); return timeouts; }; exports.createTimeout = function(attempt, opts) { var random = (opts.randomize) ? (Math.random() + 1) : 1; var timeout = Math.round(random * Math.max(opts.minTimeout, 1) * Math.pow(opts.factor, attempt)); timeout = Math.min(timeout, opts.maxTimeout); return timeout; }; exports.wrap = function(obj, options, methods) { if (options instanceof Array) { methods = options; options = null; } if (!methods) { methods = []; for (var key in obj) { if (typeof obj[key] === 'function') { methods.push(key); } } } for (var i = 0; i < methods.length; i++) { var method = methods[i]; var original = obj[method]; obj[method] = function retryWrapper(original) { var op = exports.operation(options); var args = Array.prototype.slice.call(arguments, 1); var callback = args.pop(); args.push(function(err) { if (op.retry(err)) { return; } if (err) { arguments[0] = op.mainError(); } callback.apply(this, arguments); }); op.attempt(function() { original.apply(obj, args); }); }.bind(obj, original); obj[method].options = options; } }; /***/ }), /***/ 39538: /***/ ((module) => { function RetryOperation(timeouts, options) { // Compatibility for the old (timeouts, retryForever) signature if (typeof options === 'boolean') { options = { forever: options }; } this._originalTimeouts = JSON.parse(JSON.stringify(timeouts)); this._timeouts = timeouts; this._options = options || {}; this._maxRetryTime = options && options.maxRetryTime || Infinity; this._fn = null; this._errors = []; this._attempts = 1; this._operationTimeout = null; this._operationTimeoutCb = null; this._timeout = null; this._operationStart = null; this._timer = null; if (this._options.forever) { this._cachedTimeouts = this._timeouts.slice(0); } } module.exports = RetryOperation; RetryOperation.prototype.reset = function() { this._attempts = 1; this._timeouts = this._originalTimeouts.slice(0); } RetryOperation.prototype.stop = function() { if (this._timeout) { clearTimeout(this._timeout); } if (this._timer) { clearTimeout(this._timer); } this._timeouts = []; this._cachedTimeouts = null; }; RetryOperation.prototype.retry = function(err) { if (this._timeout) { clearTimeout(this._timeout); } if (!err) { return false; } var currentTime = new Date().getTime(); if (err && currentTime - this._operationStart >= this._maxRetryTime) { this._errors.push(err); this._errors.unshift(new Error('RetryOperation timeout occurred')); return false; } this._errors.push(err); var timeout = this._timeouts.shift(); if (timeout === undefined) { if (this._cachedTimeouts) { // retry forever, only keep last error this._errors.splice(0, this._errors.length - 1); timeout = this._cachedTimeouts.slice(-1); } else { return false; } } var self = this; this._timer = setTimeout(function() { self._attempts++; if (self._operationTimeoutCb) { self._timeout = setTimeout(function() { self._operationTimeoutCb(self._attempts); }, self._operationTimeout); if (self._options.unref) { self._timeout.unref(); } } self._fn(self._attempts); }, timeout); if (this._options.unref) { this._timer.unref(); } return true; }; RetryOperation.prototype.attempt = function(fn, timeoutOps) { this._fn = fn; if (timeoutOps) { if (timeoutOps.timeout) { this._operationTimeout = timeoutOps.timeout; } if (timeoutOps.cb) { this._operationTimeoutCb = timeoutOps.cb; } } var self = this; if (this._operationTimeoutCb) { this._timeout = setTimeout(function() { self._operationTimeoutCb(); }, self._operationTimeout); } this._operationStart = new Date().getTime(); this._fn(this._attempts); }; RetryOperation.prototype.try = function(fn) { console.log('Using RetryOperation.try() is deprecated'); this.attempt(fn); }; RetryOperation.prototype.start = function(fn) { console.log('Using RetryOperation.start() is deprecated'); this.attempt(fn); }; RetryOperation.prototype.start = RetryOperation.prototype.try; RetryOperation.prototype.errors = function() { return this._errors; }; RetryOperation.prototype.attempts = function() { return this._attempts; }; RetryOperation.prototype.mainError = function() { if (this._errors.length === 0) { return null; } var counts = {}; var mainError = null; var mainErrorCount = 0; for (var i = 0; i < this._errors.length; i++) { var error = this._errors[i]; var message = error.message; var count = (counts[message] || 0) + 1; counts[message] = count; if (count >= mainErrorCount) { mainError = error; mainErrorCount = count; } } return mainError; }; /***/ }), /***/ 93058: /***/ ((module, exports, __nccwpck_require__) => { /*! safe-buffer. MIT License. Feross Aboukhadijeh */ /* eslint-disable node/no-deprecated-api */ var buffer = __nccwpck_require__(20181) var Buffer = buffer.Buffer // alternative to using Object.keys for old browsers function copyProps (src, dst) { for (var key in src) { dst[key] = src[key] } } if (Buffer.from && Buffer.alloc && Buffer.allocUnsafe && Buffer.allocUnsafeSlow) { module.exports = buffer } else { // Copy properties from require('buffer') copyProps(buffer, exports) exports.Buffer = SafeBuffer } function SafeBuffer (arg, encodingOrOffset, length) { return Buffer(arg, encodingOrOffset, length) } SafeBuffer.prototype = Object.create(Buffer.prototype) // Copy static methods from Buffer copyProps(Buffer, SafeBuffer) SafeBuffer.from = function (arg, encodingOrOffset, length) { if (typeof arg === 'number') { throw new TypeError('Argument must not be a number') } return Buffer(arg, encodingOrOffset, length) } SafeBuffer.alloc = function (size, fill, encoding) { if (typeof size !== 'number') { throw new TypeError('Argument must be a number') } var buf = Buffer(size) if (fill !== undefined) { if (typeof encoding === 'string') { buf.fill(fill, encoding) } else { buf.fill(fill) } } else { buf.fill(0) } return buf } SafeBuffer.allocUnsafe = function (size) { if (typeof size !== 'number') { throw new TypeError('Argument must be a number') } return Buffer(size) } SafeBuffer.allocUnsafeSlow = function (size) { if (typeof size !== 'number') { throw new TypeError('Argument must be a number') } return buffer.SlowBuffer(size) } /***/ }), /***/ 39318: /***/ ((module, exports) => { exports = module.exports = SemVer var debug /* istanbul ignore next */ if (typeof process === 'object' && process.env && process.env.NODE_DEBUG && /\bsemver\b/i.test(process.env.NODE_DEBUG)) { debug = function () { var args = Array.prototype.slice.call(arguments, 0) args.unshift('SEMVER') console.log.apply(console, args) } } else { debug = function () {} } // Note: this is the semver.org version of the spec that it implements // Not necessarily the package version of this code. exports.SEMVER_SPEC_VERSION = '2.0.0' var MAX_LENGTH = 256 var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || /* istanbul ignore next */ 9007199254740991 // Max safe segment length for coercion. var MAX_SAFE_COMPONENT_LENGTH = 16 var MAX_SAFE_BUILD_LENGTH = MAX_LENGTH - 6 // The actual regexps go on exports.re var re = exports.re = [] var safeRe = exports.safeRe = [] var src = exports.src = [] var t = exports.tokens = {} var R = 0 function tok (n) { t[n] = R++ } var LETTERDASHNUMBER = '[a-zA-Z0-9-]' // Replace some greedy regex tokens to prevent regex dos issues. These regex are // used internally via the safeRe object since all inputs in this library get // normalized first to trim and collapse all extra whitespace. The original // regexes are exported for userland consumption and lower level usage. A // future breaking change could export the safer regex only with a note that // all input should have extra whitespace removed. var safeRegexReplacements = [ ['\\s', 1], ['\\d', MAX_LENGTH], [LETTERDASHNUMBER, MAX_SAFE_BUILD_LENGTH], ] function makeSafeRe (value) { for (var i = 0; i < safeRegexReplacements.length; i++) { var token = safeRegexReplacements[i][0] var max = safeRegexReplacements[i][1] value = value .split(token + '*').join(token + '{0,' + max + '}') .split(token + '+').join(token + '{1,' + max + '}') } return value } // The following Regular Expressions can be used for tokenizing, // validating, and parsing SemVer version strings. // ## Numeric Identifier // A single `0`, or a non-zero digit followed by zero or more digits. tok('NUMERICIDENTIFIER') src[t.NUMERICIDENTIFIER] = '0|[1-9]\\d*' tok('NUMERICIDENTIFIERLOOSE') src[t.NUMERICIDENTIFIERLOOSE] = '\\d+' // ## Non-numeric Identifier // Zero or more digits, followed by a letter or hyphen, and then zero or // more letters, digits, or hyphens. tok('NONNUMERICIDENTIFIER') src[t.NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-]' + LETTERDASHNUMBER + '*' // ## Main Version // Three dot-separated numeric identifiers. tok('MAINVERSION') src[t.MAINVERSION] = '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + '(' + src[t.NUMERICIDENTIFIER] + ')\\.' + '(' + src[t.NUMERICIDENTIFIER] + ')' tok('MAINVERSIONLOOSE') src[t.MAINVERSIONLOOSE] = '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')\\.' + '(' + src[t.NUMERICIDENTIFIERLOOSE] + ')' // ## Pre-release Version Identifier // A numeric identifier, or a non-numeric identifier. tok('PRERELEASEIDENTIFIER') src[t.PRERELEASEIDENTIFIER] = '(?:' + src[t.NUMERICIDENTIFIER] + '|' + src[t.NONNUMERICIDENTIFIER] + ')' tok('PRERELEASEIDENTIFIERLOOSE') src[t.PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[t.NUMERICIDENTIFIERLOOSE] + '|' + src[t.NONNUMERICIDENTIFIER] + ')' // ## Pre-release Version // Hyphen, followed by one or more dot-separated pre-release version // identifiers. tok('PRERELEASE') src[t.PRERELEASE] = '(?:-(' + src[t.PRERELEASEIDENTIFIER] + '(?:\\.' + src[t.PRERELEASEIDENTIFIER] + ')*))' tok('PRERELEASELOOSE') src[t.PRERELEASELOOSE] = '(?:-?(' + src[t.PRERELEASEIDENTIFIERLOOSE] + '(?:\\.' + src[t.PRERELEASEIDENTIFIERLOOSE] + ')*))' // ## Build Metadata Identifier // Any combination of digits, letters, or hyphens. tok('BUILDIDENTIFIER') src[t.BUILDIDENTIFIER] = LETTERDASHNUMBER + '+' // ## Build Metadata // Plus sign, followed by one or more period-separated build metadata // identifiers. tok('BUILD') src[t.BUILD] = '(?:\\+(' + src[t.BUILDIDENTIFIER] + '(?:\\.' + src[t.BUILDIDENTIFIER] + ')*))' // ## Full Version String // A main version, followed optionally by a pre-release version and // build metadata. // Note that the only major, minor, patch, and pre-release sections of // the version string are capturing groups. The build metadata is not a // capturing group, because it should not ever be used in version // comparison. tok('FULL') tok('FULLPLAIN') src[t.FULLPLAIN] = 'v?' + src[t.MAINVERSION] + src[t.PRERELEASE] + '?' + src[t.BUILD] + '?' src[t.FULL] = '^' + src[t.FULLPLAIN] + '$' // like full, but allows v1.2.3 and =1.2.3, which people do sometimes. // also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty // common in the npm registry. tok('LOOSEPLAIN') src[t.LOOSEPLAIN] = '[v=\\s]*' + src[t.MAINVERSIONLOOSE] + src[t.PRERELEASELOOSE] + '?' + src[t.BUILD] + '?' tok('LOOSE') src[t.LOOSE] = '^' + src[t.LOOSEPLAIN] + '$' tok('GTLT') src[t.GTLT] = '((?:<|>)?=?)' // Something like "2.*" or "1.2.x". // Note that "x.x" is a valid xRange identifer, meaning "any version" // Only the first item is strictly required. tok('XRANGEIDENTIFIERLOOSE') src[t.XRANGEIDENTIFIERLOOSE] = src[t.NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' tok('XRANGEIDENTIFIER') src[t.XRANGEIDENTIFIER] = src[t.NUMERICIDENTIFIER] + '|x|X|\\*' tok('XRANGEPLAIN') src[t.XRANGEPLAIN] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + '(?:\\.(' + src[t.XRANGEIDENTIFIER] + ')' + '(?:' + src[t.PRERELEASE] + ')?' + src[t.BUILD] + '?' + ')?)?' tok('XRANGEPLAINLOOSE') src[t.XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + '(?:\\.(' + src[t.XRANGEIDENTIFIERLOOSE] + ')' + '(?:' + src[t.PRERELEASELOOSE] + ')?' + src[t.BUILD] + '?' + ')?)?' tok('XRANGE') src[t.XRANGE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAIN] + '$' tok('XRANGELOOSE') src[t.XRANGELOOSE] = '^' + src[t.GTLT] + '\\s*' + src[t.XRANGEPLAINLOOSE] + '$' // Coercion. // Extract anything that could conceivably be a part of a valid semver tok('COERCE') src[t.COERCE] = '(^|[^\\d])' + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + '(?:$|[^\\d])' tok('COERCERTL') re[t.COERCERTL] = new RegExp(src[t.COERCE], 'g') safeRe[t.COERCERTL] = new RegExp(makeSafeRe(src[t.COERCE]), 'g') // Tilde ranges. // Meaning is "reasonably at or greater than" tok('LONETILDE') src[t.LONETILDE] = '(?:~>?)' tok('TILDETRIM') src[t.TILDETRIM] = '(\\s*)' + src[t.LONETILDE] + '\\s+' re[t.TILDETRIM] = new RegExp(src[t.TILDETRIM], 'g') safeRe[t.TILDETRIM] = new RegExp(makeSafeRe(src[t.TILDETRIM]), 'g') var tildeTrimReplace = '$1~' tok('TILDE') src[t.TILDE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAIN] + '$' tok('TILDELOOSE') src[t.TILDELOOSE] = '^' + src[t.LONETILDE] + src[t.XRANGEPLAINLOOSE] + '$' // Caret ranges. // Meaning is "at least and backwards compatible with" tok('LONECARET') src[t.LONECARET] = '(?:\\^)' tok('CARETTRIM') src[t.CARETTRIM] = '(\\s*)' + src[t.LONECARET] + '\\s+' re[t.CARETTRIM] = new RegExp(src[t.CARETTRIM], 'g') safeRe[t.CARETTRIM] = new RegExp(makeSafeRe(src[t.CARETTRIM]), 'g') var caretTrimReplace = '$1^' tok('CARET') src[t.CARET] = '^' + src[t.LONECARET] + src[t.XRANGEPLAIN] + '$' tok('CARETLOOSE') src[t.CARETLOOSE] = '^' + src[t.LONECARET] + src[t.XRANGEPLAINLOOSE] + '$' // A simple gt/lt/eq thing, or just "" to indicate "any version" tok('COMPARATORLOOSE') src[t.COMPARATORLOOSE] = '^' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + ')$|^$' tok('COMPARATOR') src[t.COMPARATOR] = '^' + src[t.GTLT] + '\\s*(' + src[t.FULLPLAIN] + ')$|^$' // An expression to strip any whitespace between the gtlt and the thing // it modifies, so that `> 1.2.3` ==> `>1.2.3` tok('COMPARATORTRIM') src[t.COMPARATORTRIM] = '(\\s*)' + src[t.GTLT] + '\\s*(' + src[t.LOOSEPLAIN] + '|' + src[t.XRANGEPLAIN] + ')' // this one has to use the /g flag re[t.COMPARATORTRIM] = new RegExp(src[t.COMPARATORTRIM], 'g') safeRe[t.COMPARATORTRIM] = new RegExp(makeSafeRe(src[t.COMPARATORTRIM]), 'g') var comparatorTrimReplace = '$1$2$3' // Something like `1.2.3 - 1.2.4` // Note that these all use the loose form, because they'll be // checked against either the strict or loose comparator form // later. tok('HYPHENRANGE') src[t.HYPHENRANGE] = '^\\s*(' + src[t.XRANGEPLAIN] + ')' + '\\s+-\\s+' + '(' + src[t.XRANGEPLAIN] + ')' + '\\s*$' tok('HYPHENRANGELOOSE') src[t.HYPHENRANGELOOSE] = '^\\s*(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s+-\\s+' + '(' + src[t.XRANGEPLAINLOOSE] + ')' + '\\s*$' // Star ranges basically just allow anything at all. tok('STAR') src[t.STAR] = '(<|>)?=?\\s*\\*' // Compile to actual regexp objects. // All are flag-free, unless they were created above with a flag. for (var i = 0; i < R; i++) { debug(i, src[i]) if (!re[i]) { re[i] = new RegExp(src[i]) // Replace all greedy whitespace to prevent regex dos issues. These regex are // used internally via the safeRe object since all inputs in this library get // normalized first to trim and collapse all extra whitespace. The original // regexes are exported for userland consumption and lower level usage. A // future breaking change could export the safer regex only with a note that // all input should have extra whitespace removed. safeRe[i] = new RegExp(makeSafeRe(src[i])) } } exports.parse = parse function parse (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (version instanceof SemVer) { return version } if (typeof version !== 'string') { return null } if (version.length > MAX_LENGTH) { return null } var r = options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL] if (!r.test(version)) { return null } try { return new SemVer(version, options) } catch (er) { return null } } exports.valid = valid function valid (version, options) { var v = parse(version, options) return v ? v.version : null } exports.clean = clean function clean (version, options) { var s = parse(version.trim().replace(/^[=v]+/, ''), options) return s ? s.version : null } exports.SemVer = SemVer function SemVer (version, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (version instanceof SemVer) { if (version.loose === options.loose) { return version } else { version = version.version } } else if (typeof version !== 'string') { throw new TypeError('Invalid Version: ' + version) } if (version.length > MAX_LENGTH) { throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') } if (!(this instanceof SemVer)) { return new SemVer(version, options) } debug('SemVer', version, options) this.options = options this.loose = !!options.loose var m = version.trim().match(options.loose ? safeRe[t.LOOSE] : safeRe[t.FULL]) if (!m) { throw new TypeError('Invalid Version: ' + version) } this.raw = version // these are actually numbers this.major = +m[1] this.minor = +m[2] this.patch = +m[3] if (this.major > MAX_SAFE_INTEGER || this.major < 0) { throw new TypeError('Invalid major version') } if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { throw new TypeError('Invalid minor version') } if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { throw new TypeError('Invalid patch version') } // numberify any prerelease numeric ids if (!m[4]) { this.prerelease = [] } else { this.prerelease = m[4].split('.').map(function (id) { if (/^[0-9]+$/.test(id)) { var num = +id if (num >= 0 && num < MAX_SAFE_INTEGER) { return num } } return id }) } this.build = m[5] ? m[5].split('.') : [] this.format() } SemVer.prototype.format = function () { this.version = this.major + '.' + this.minor + '.' + this.patch if (this.prerelease.length) { this.version += '-' + this.prerelease.join('.') } return this.version } SemVer.prototype.toString = function () { return this.version } SemVer.prototype.compare = function (other) { debug('SemVer.compare', this.version, this.options, other) if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } return this.compareMain(other) || this.comparePre(other) } SemVer.prototype.compareMain = function (other) { if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } return compareIdentifiers(this.major, other.major) || compareIdentifiers(this.minor, other.minor) || compareIdentifiers(this.patch, other.patch) } SemVer.prototype.comparePre = function (other) { if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } // NOT having a prerelease is > having one if (this.prerelease.length && !other.prerelease.length) { return -1 } else if (!this.prerelease.length && other.prerelease.length) { return 1 } else if (!this.prerelease.length && !other.prerelease.length) { return 0 } var i = 0 do { var a = this.prerelease[i] var b = other.prerelease[i] debug('prerelease compare', i, a, b) if (a === undefined && b === undefined) { return 0 } else if (b === undefined) { return 1 } else if (a === undefined) { return -1 } else if (a === b) { continue } else { return compareIdentifiers(a, b) } } while (++i) } SemVer.prototype.compareBuild = function (other) { if (!(other instanceof SemVer)) { other = new SemVer(other, this.options) } var i = 0 do { var a = this.build[i] var b = other.build[i] debug('prerelease compare', i, a, b) if (a === undefined && b === undefined) { return 0 } else if (b === undefined) { return 1 } else if (a === undefined) { return -1 } else if (a === b) { continue } else { return compareIdentifiers(a, b) } } while (++i) } // preminor will bump the version up to the next minor release, and immediately // down to pre-release. premajor and prepatch work the same way. SemVer.prototype.inc = function (release, identifier) { switch (release) { case 'premajor': this.prerelease.length = 0 this.patch = 0 this.minor = 0 this.major++ this.inc('pre', identifier) break case 'preminor': this.prerelease.length = 0 this.patch = 0 this.minor++ this.inc('pre', identifier) break case 'prepatch': // If this is already a prerelease, it will bump to the next version // drop any prereleases that might already exist, since they are not // relevant at this point. this.prerelease.length = 0 this.inc('patch', identifier) this.inc('pre', identifier) break // If the input is a non-prerelease version, this acts the same as // prepatch. case 'prerelease': if (this.prerelease.length === 0) { this.inc('patch', identifier) } this.inc('pre', identifier) break case 'major': // If this is a pre-major version, bump up to the same major version. // Otherwise increment major. // 1.0.0-5 bumps to 1.0.0 // 1.1.0 bumps to 2.0.0 if (this.minor !== 0 || this.patch !== 0 || this.prerelease.length === 0) { this.major++ } this.minor = 0 this.patch = 0 this.prerelease = [] break case 'minor': // If this is a pre-minor version, bump up to the same minor version. // Otherwise increment minor. // 1.2.0-5 bumps to 1.2.0 // 1.2.1 bumps to 1.3.0 if (this.patch !== 0 || this.prerelease.length === 0) { this.minor++ } this.patch = 0 this.prerelease = [] break case 'patch': // If this is not a pre-release version, it will increment the patch. // If it is a pre-release it will bump up to the same patch version. // 1.2.0-5 patches to 1.2.0 // 1.2.0 patches to 1.2.1 if (this.prerelease.length === 0) { this.patch++ } this.prerelease = [] break // This probably shouldn't be used publicly. // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. case 'pre': if (this.prerelease.length === 0) { this.prerelease = [0] } else { var i = this.prerelease.length while (--i >= 0) { if (typeof this.prerelease[i] === 'number') { this.prerelease[i]++ i = -2 } } if (i === -1) { // didn't increment anything this.prerelease.push(0) } } if (identifier) { // 1.2.0-beta.1 bumps to 1.2.0-beta.2, // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 if (this.prerelease[0] === identifier) { if (isNaN(this.prerelease[1])) { this.prerelease = [identifier, 0] } } else { this.prerelease = [identifier, 0] } } break default: throw new Error('invalid increment argument: ' + release) } this.format() this.raw = this.version return this } exports.inc = inc function inc (version, release, loose, identifier) { if (typeof (loose) === 'string') { identifier = loose loose = undefined } try { return new SemVer(version, loose).inc(release, identifier).version } catch (er) { return null } } exports.diff = diff function diff (version1, version2) { if (eq(version1, version2)) { return null } else { var v1 = parse(version1) var v2 = parse(version2) var prefix = '' if (v1.prerelease.length || v2.prerelease.length) { prefix = 'pre' var defaultResult = 'prerelease' } for (var key in v1) { if (key === 'major' || key === 'minor' || key === 'patch') { if (v1[key] !== v2[key]) { return prefix + key } } } return defaultResult // may be undefined } } exports.compareIdentifiers = compareIdentifiers var numeric = /^[0-9]+$/ function compareIdentifiers (a, b) { var anum = numeric.test(a) var bnum = numeric.test(b) if (anum && bnum) { a = +a b = +b } return a === b ? 0 : (anum && !bnum) ? -1 : (bnum && !anum) ? 1 : a < b ? -1 : 1 } exports.rcompareIdentifiers = rcompareIdentifiers function rcompareIdentifiers (a, b) { return compareIdentifiers(b, a) } exports.major = major function major (a, loose) { return new SemVer(a, loose).major } exports.minor = minor function minor (a, loose) { return new SemVer(a, loose).minor } exports.patch = patch function patch (a, loose) { return new SemVer(a, loose).patch } exports.compare = compare function compare (a, b, loose) { return new SemVer(a, loose).compare(new SemVer(b, loose)) } exports.compareLoose = compareLoose function compareLoose (a, b) { return compare(a, b, true) } exports.compareBuild = compareBuild function compareBuild (a, b, loose) { var versionA = new SemVer(a, loose) var versionB = new SemVer(b, loose) return versionA.compare(versionB) || versionA.compareBuild(versionB) } exports.rcompare = rcompare function rcompare (a, b, loose) { return compare(b, a, loose) } exports.sort = sort function sort (list, loose) { return list.sort(function (a, b) { return exports.compareBuild(a, b, loose) }) } exports.rsort = rsort function rsort (list, loose) { return list.sort(function (a, b) { return exports.compareBuild(b, a, loose) }) } exports.gt = gt function gt (a, b, loose) { return compare(a, b, loose) > 0 } exports.lt = lt function lt (a, b, loose) { return compare(a, b, loose) < 0 } exports.eq = eq function eq (a, b, loose) { return compare(a, b, loose) === 0 } exports.neq = neq function neq (a, b, loose) { return compare(a, b, loose) !== 0 } exports.gte = gte function gte (a, b, loose) { return compare(a, b, loose) >= 0 } exports.lte = lte function lte (a, b, loose) { return compare(a, b, loose) <= 0 } exports.cmp = cmp function cmp (a, op, b, loose) { switch (op) { case '===': if (typeof a === 'object') a = a.version if (typeof b === 'object') b = b.version return a === b case '!==': if (typeof a === 'object') a = a.version if (typeof b === 'object') b = b.version return a !== b case '': case '=': case '==': return eq(a, b, loose) case '!=': return neq(a, b, loose) case '>': return gt(a, b, loose) case '>=': return gte(a, b, loose) case '<': return lt(a, b, loose) case '<=': return lte(a, b, loose) default: throw new TypeError('Invalid operator: ' + op) } } exports.Comparator = Comparator function Comparator (comp, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (comp instanceof Comparator) { if (comp.loose === !!options.loose) { return comp } else { comp = comp.value } } if (!(this instanceof Comparator)) { return new Comparator(comp, options) } comp = comp.trim().split(/\s+/).join(' ') debug('comparator', comp, options) this.options = options this.loose = !!options.loose this.parse(comp) if (this.semver === ANY) { this.value = '' } else { this.value = this.operator + this.semver.version } debug('comp', this) } var ANY = {} Comparator.prototype.parse = function (comp) { var r = this.options.loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var m = comp.match(r) if (!m) { throw new TypeError('Invalid comparator: ' + comp) } this.operator = m[1] !== undefined ? m[1] : '' if (this.operator === '=') { this.operator = '' } // if it literally is just '>' or '' then allow anything. if (!m[2]) { this.semver = ANY } else { this.semver = new SemVer(m[2], this.options.loose) } } Comparator.prototype.toString = function () { return this.value } Comparator.prototype.test = function (version) { debug('Comparator.test', version, this.options.loose) if (this.semver === ANY || version === ANY) { return true } if (typeof version === 'string') { try { version = new SemVer(version, this.options) } catch (er) { return false } } return cmp(version, this.operator, this.semver, this.options) } Comparator.prototype.intersects = function (comp, options) { if (!(comp instanceof Comparator)) { throw new TypeError('a Comparator is required') } if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } var rangeTmp if (this.operator === '') { if (this.value === '') { return true } rangeTmp = new Range(comp.value, options) return satisfies(this.value, rangeTmp, options) } else if (comp.operator === '') { if (comp.value === '') { return true } rangeTmp = new Range(this.value, options) return satisfies(comp.semver, rangeTmp, options) } var sameDirectionIncreasing = (this.operator === '>=' || this.operator === '>') && (comp.operator === '>=' || comp.operator === '>') var sameDirectionDecreasing = (this.operator === '<=' || this.operator === '<') && (comp.operator === '<=' || comp.operator === '<') var sameSemVer = this.semver.version === comp.semver.version var differentDirectionsInclusive = (this.operator === '>=' || this.operator === '<=') && (comp.operator === '>=' || comp.operator === '<=') var oppositeDirectionsLessThan = cmp(this.semver, '<', comp.semver, options) && ((this.operator === '>=' || this.operator === '>') && (comp.operator === '<=' || comp.operator === '<')) var oppositeDirectionsGreaterThan = cmp(this.semver, '>', comp.semver, options) && ((this.operator === '<=' || this.operator === '<') && (comp.operator === '>=' || comp.operator === '>')) return sameDirectionIncreasing || sameDirectionDecreasing || (sameSemVer && differentDirectionsInclusive) || oppositeDirectionsLessThan || oppositeDirectionsGreaterThan } exports.Range = Range function Range (range, options) { if (!options || typeof options !== 'object') { options = { loose: !!options, includePrerelease: false } } if (range instanceof Range) { if (range.loose === !!options.loose && range.includePrerelease === !!options.includePrerelease) { return range } else { return new Range(range.raw, options) } } if (range instanceof Comparator) { return new Range(range.value, options) } if (!(this instanceof Range)) { return new Range(range, options) } this.options = options this.loose = !!options.loose this.includePrerelease = !!options.includePrerelease // First reduce all whitespace as much as possible so we do not have to rely // on potentially slow regexes like \s*. This is then stored and used for // future error messages as well. this.raw = range .trim() .split(/\s+/) .join(' ') // First, split based on boolean or || this.set = this.raw.split('||').map(function (range) { return this.parseRange(range.trim()) }, this).filter(function (c) { // throw out any that are not relevant for whatever reason return c.length }) if (!this.set.length) { throw new TypeError('Invalid SemVer Range: ' + this.raw) } this.format() } Range.prototype.format = function () { this.range = this.set.map(function (comps) { return comps.join(' ').trim() }).join('||').trim() return this.range } Range.prototype.toString = function () { return this.range } Range.prototype.parseRange = function (range) { var loose = this.options.loose // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` var hr = loose ? safeRe[t.HYPHENRANGELOOSE] : safeRe[t.HYPHENRANGE] range = range.replace(hr, hyphenReplace) debug('hyphen replace', range) // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` range = range.replace(safeRe[t.COMPARATORTRIM], comparatorTrimReplace) debug('comparator trim', range, safeRe[t.COMPARATORTRIM]) // `~ 1.2.3` => `~1.2.3` range = range.replace(safeRe[t.TILDETRIM], tildeTrimReplace) // `^ 1.2.3` => `^1.2.3` range = range.replace(safeRe[t.CARETTRIM], caretTrimReplace) // normalize spaces range = range.split(/\s+/).join(' ') // At this point, the range is completely trimmed and // ready to be split into comparators. var compRe = loose ? safeRe[t.COMPARATORLOOSE] : safeRe[t.COMPARATOR] var set = range.split(' ').map(function (comp) { return parseComparator(comp, this.options) }, this).join(' ').split(/\s+/) if (this.options.loose) { // in loose mode, throw out any that are not valid comparators set = set.filter(function (comp) { return !!comp.match(compRe) }) } set = set.map(function (comp) { return new Comparator(comp, this.options) }, this) return set } Range.prototype.intersects = function (range, options) { if (!(range instanceof Range)) { throw new TypeError('a Range is required') } return this.set.some(function (thisComparators) { return ( isSatisfiable(thisComparators, options) && range.set.some(function (rangeComparators) { return ( isSatisfiable(rangeComparators, options) && thisComparators.every(function (thisComparator) { return rangeComparators.every(function (rangeComparator) { return thisComparator.intersects(rangeComparator, options) }) }) ) }) ) }) } // take a set of comparators and determine whether there // exists a version which can satisfy it function isSatisfiable (comparators, options) { var result = true var remainingComparators = comparators.slice() var testComparator = remainingComparators.pop() while (result && remainingComparators.length) { result = remainingComparators.every(function (otherComparator) { return testComparator.intersects(otherComparator, options) }) testComparator = remainingComparators.pop() } return result } // Mostly just for testing and legacy API reasons exports.toComparators = toComparators function toComparators (range, options) { return new Range(range, options).set.map(function (comp) { return comp.map(function (c) { return c.value }).join(' ').trim().split(' ') }) } // comprised of xranges, tildes, stars, and gtlt's at this point. // already replaced the hyphen ranges // turn into a set of JUST comparators. function parseComparator (comp, options) { debug('comp', comp, options) comp = replaceCarets(comp, options) debug('caret', comp) comp = replaceTildes(comp, options) debug('tildes', comp) comp = replaceXRanges(comp, options) debug('xrange', comp) comp = replaceStars(comp, options) debug('stars', comp) return comp } function isX (id) { return !id || id.toLowerCase() === 'x' || id === '*' } // ~, ~> --> * (any, kinda silly) // ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 // ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 // ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 // ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 // ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 function replaceTildes (comp, options) { return comp.trim().split(/\s+/).map(function (comp) { return replaceTilde(comp, options) }).join(' ') } function replaceTilde (comp, options) { var r = options.loose ? safeRe[t.TILDELOOSE] : safeRe[t.TILDE] return comp.replace(r, function (_, M, m, p, pr) { debug('tilde', comp, _, M, m, p, pr) var ret if (isX(M)) { ret = '' } else if (isX(m)) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' } else if (isX(p)) { // ~1.2 == >=1.2.0 <1.3.0 ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } else if (pr) { debug('replaceTilde pr', pr) ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + (+m + 1) + '.0' } else { // ~1.2.3 == >=1.2.3 <1.3.0 ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0' } debug('tilde return', ret) return ret }) } // ^ --> * (any, kinda silly) // ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 // ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 // ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 // ^1.2.3 --> >=1.2.3 <2.0.0 // ^1.2.0 --> >=1.2.0 <2.0.0 function replaceCarets (comp, options) { return comp.trim().split(/\s+/).map(function (comp) { return replaceCaret(comp, options) }).join(' ') } function replaceCaret (comp, options) { debug('caret', comp, options) var r = options.loose ? safeRe[t.CARETLOOSE] : safeRe[t.CARET] return comp.replace(r, function (_, M, m, p, pr) { debug('caret', comp, _, M, m, p, pr) var ret if (isX(M)) { ret = '' } else if (isX(m)) { ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' } else if (isX(p)) { if (M === '0') { ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' } else { ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' } } else if (pr) { debug('replaceCaret pr', pr) if (M === '0') { if (m === '0') { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + m + '.' + (+p + 1) } else { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + M + '.' + (+m + 1) + '.0' } } else { ret = '>=' + M + '.' + m + '.' + p + '-' + pr + ' <' + (+M + 1) + '.0.0' } } else { debug('no pr') if (M === '0') { if (m === '0') { ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + m + '.' + (+p + 1) } else { ret = '>=' + M + '.' + m + '.' + p + ' <' + M + '.' + (+m + 1) + '.0' } } else { ret = '>=' + M + '.' + m + '.' + p + ' <' + (+M + 1) + '.0.0' } } debug('caret return', ret) return ret }) } function replaceXRanges (comp, options) { debug('replaceXRanges', comp, options) return comp.split(/\s+/).map(function (comp) { return replaceXRange(comp, options) }).join(' ') } function replaceXRange (comp, options) { comp = comp.trim() var r = options.loose ? safeRe[t.XRANGELOOSE] : safeRe[t.XRANGE] return comp.replace(r, function (ret, gtlt, M, m, p, pr) { debug('xRange', comp, ret, gtlt, M, m, p, pr) var xM = isX(M) var xm = xM || isX(m) var xp = xm || isX(p) var anyX = xp if (gtlt === '=' && anyX) { gtlt = '' } // if we're including prereleases in the match, then we need // to fix this to -0, the lowest possible prerelease value pr = options.includePrerelease ? '-0' : '' if (xM) { if (gtlt === '>' || gtlt === '<') { // nothing is allowed ret = '<0.0.0-0' } else { // nothing is forbidden ret = '*' } } else if (gtlt && anyX) { // we know patch is an x, because we have any x at all. // replace X with 0 if (xm) { m = 0 } p = 0 if (gtlt === '>') { // >1 => >=2.0.0 // >1.2 => >=1.3.0 // >1.2.3 => >= 1.2.4 gtlt = '>=' if (xm) { M = +M + 1 m = 0 p = 0 } else { m = +m + 1 p = 0 } } else if (gtlt === '<=') { // <=0.7.x is actually <0.8.0, since any 0.7.x should // pass. Similarly, <=7.x is actually <8.0.0, etc. gtlt = '<' if (xm) { M = +M + 1 } else { m = +m + 1 } } ret = gtlt + M + '.' + m + '.' + p + pr } else if (xm) { ret = '>=' + M + '.0.0' + pr + ' <' + (+M + 1) + '.0.0' + pr } else if (xp) { ret = '>=' + M + '.' + m + '.0' + pr + ' <' + M + '.' + (+m + 1) + '.0' + pr } debug('xRange return', ret) return ret }) } // Because * is AND-ed with everything else in the comparator, // and '' means "any version", just remove the *s entirely. function replaceStars (comp, options) { debug('replaceStars', comp, options) // Looseness is ignored here. star is always as loose as it gets! return comp.trim().replace(safeRe[t.STAR], '') } // This function is passed to string.replace(re[t.HYPHENRANGE]) // M, m, patch, prerelease, build // 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 // 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do // 1.2 - 3.4 => >=1.2.0 <3.5.0 function hyphenReplace ($0, from, fM, fm, fp, fpr, fb, to, tM, tm, tp, tpr, tb) { if (isX(fM)) { from = '' } else if (isX(fm)) { from = '>=' + fM + '.0.0' } else if (isX(fp)) { from = '>=' + fM + '.' + fm + '.0' } else { from = '>=' + from } if (isX(tM)) { to = '' } else if (isX(tm)) { to = '<' + (+tM + 1) + '.0.0' } else if (isX(tp)) { to = '<' + tM + '.' + (+tm + 1) + '.0' } else if (tpr) { to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr } else { to = '<=' + to } return (from + ' ' + to).trim() } // if ANY of the sets match ALL of its comparators, then pass Range.prototype.test = function (version) { if (!version) { return false } if (typeof version === 'string') { try { version = new SemVer(version, this.options) } catch (er) { return false } } for (var i = 0; i < this.set.length; i++) { if (testSet(this.set[i], version, this.options)) { return true } } return false } function testSet (set, version, options) { for (var i = 0; i < set.length; i++) { if (!set[i].test(version)) { return false } } if (version.prerelease.length && !options.includePrerelease) { // Find the set of versions that are allowed to have prereleases // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 // That should allow `1.2.3-pr.2` to pass. // However, `1.2.4-alpha.notready` should NOT be allowed, // even though it's within the range set by the comparators. for (i = 0; i < set.length; i++) { debug(set[i].semver) if (set[i].semver === ANY) { continue } if (set[i].semver.prerelease.length > 0) { var allowed = set[i].semver if (allowed.major === version.major && allowed.minor === version.minor && allowed.patch === version.patch) { return true } } } // Version has a -pre, but it's not one of the ones we like. return false } return true } exports.satisfies = satisfies function satisfies (version, range, options) { try { range = new Range(range, options) } catch (er) { return false } return range.test(version) } exports.maxSatisfying = maxSatisfying function maxSatisfying (versions, range, options) { var max = null var maxSV = null try { var rangeObj = new Range(range, options) } catch (er) { return null } versions.forEach(function (v) { if (rangeObj.test(v)) { // satisfies(v, range, options) if (!max || maxSV.compare(v) === -1) { // compare(max, v, true) max = v maxSV = new SemVer(max, options) } } }) return max } exports.minSatisfying = minSatisfying function minSatisfying (versions, range, options) { var min = null var minSV = null try { var rangeObj = new Range(range, options) } catch (er) { return null } versions.forEach(function (v) { if (rangeObj.test(v)) { // satisfies(v, range, options) if (!min || minSV.compare(v) === 1) { // compare(min, v, true) min = v minSV = new SemVer(min, options) } } }) return min } exports.minVersion = minVersion function minVersion (range, loose) { range = new Range(range, loose) var minver = new SemVer('0.0.0') if (range.test(minver)) { return minver } minver = new SemVer('0.0.0-0') if (range.test(minver)) { return minver } minver = null for (var i = 0; i < range.set.length; ++i) { var comparators = range.set[i] comparators.forEach(function (comparator) { // Clone to avoid manipulating the comparator's semver object. var compver = new SemVer(comparator.semver.version) switch (comparator.operator) { case '>': if (compver.prerelease.length === 0) { compver.patch++ } else { compver.prerelease.push(0) } compver.raw = compver.format() /* fallthrough */ case '': case '>=': if (!minver || gt(minver, compver)) { minver = compver } break case '<': case '<=': /* Ignore maximum versions */ break /* istanbul ignore next */ default: throw new Error('Unexpected operation: ' + comparator.operator) } }) } if (minver && range.test(minver)) { return minver } return null } exports.validRange = validRange function validRange (range, options) { try { // Return '*' instead of '' so that truthiness works. // This will throw if it's invalid anyway return new Range(range, options).range || '*' } catch (er) { return null } } // Determine if version is less than all the versions possible in the range exports.ltr = ltr function ltr (version, range, options) { return outside(version, range, '<', options) } // Determine if version is greater than all the versions possible in the range. exports.gtr = gtr function gtr (version, range, options) { return outside(version, range, '>', options) } exports.outside = outside function outside (version, range, hilo, options) { version = new SemVer(version, options) range = new Range(range, options) var gtfn, ltefn, ltfn, comp, ecomp switch (hilo) { case '>': gtfn = gt ltefn = lte ltfn = lt comp = '>' ecomp = '>=' break case '<': gtfn = lt ltefn = gte ltfn = gt comp = '<' ecomp = '<=' break default: throw new TypeError('Must provide a hilo val of "<" or ">"') } // If it satisifes the range it is not outside if (satisfies(version, range, options)) { return false } // From now on, variable terms are as if we're in "gtr" mode. // but note that everything is flipped for the "ltr" function. for (var i = 0; i < range.set.length; ++i) { var comparators = range.set[i] var high = null var low = null comparators.forEach(function (comparator) { if (comparator.semver === ANY) { comparator = new Comparator('>=0.0.0') } high = high || comparator low = low || comparator if (gtfn(comparator.semver, high.semver, options)) { high = comparator } else if (ltfn(comparator.semver, low.semver, options)) { low = comparator } }) // If the edge version comparator has a operator then our version // isn't outside it if (high.operator === comp || high.operator === ecomp) { return false } // If the lowest version comparator has an operator and our version // is less than it then it isn't higher than the range if ((!low.operator || low.operator === comp) && ltefn(version, low.semver)) { return false } else if (low.operator === ecomp && ltfn(version, low.semver)) { return false } } return true } exports.prerelease = prerelease function prerelease (version, options) { var parsed = parse(version, options) return (parsed && parsed.prerelease.length) ? parsed.prerelease : null } exports.intersects = intersects function intersects (r1, r2, options) { r1 = new Range(r1, options) r2 = new Range(r2, options) return r1.intersects(r2) } exports.coerce = coerce function coerce (version, options) { if (version instanceof SemVer) { return version } if (typeof version === 'number') { version = String(version) } if (typeof version !== 'string') { return null } options = options || {} var match = null if (!options.rtl) { match = version.match(safeRe[t.COERCE]) } else { // Find the right-most coercible string that does not share // a terminus with a more left-ward coercible string. // Eg, '1.2.3.4' wants to coerce '2.3.4', not '3.4' or '4' // // Walk through the string checking with a /g regexp // Manually set the index so as to pick up overlapping matches. // Stop when we get a match that ends at the string end, since no // coercible string can be more right-ward without the same terminus. var next while ((next = safeRe[t.COERCERTL].exec(version)) && (!match || match.index + match[0].length !== version.length) ) { if (!match || next.index + next[0].length !== match.index + match[0].length) { match = next } safeRe[t.COERCERTL].lastIndex = next.index + next[1].length + next[2].length } // leave it in a clean state safeRe[t.COERCERTL].lastIndex = -1 } if (match === null) { return null } return parse(match[2] + '.' + (match[3] || '0') + '.' + (match[4] || '0'), options) } /***/ }), /***/ 71546: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var stubs = __nccwpck_require__(33897) /* * StreamEvents can be used 2 ways: * * 1: * function MyStream() { * require('stream-events').call(this) * } * * 2: * require('stream-events')(myStream) */ function StreamEvents(stream) { stream = stream || this var cfg = { callthrough: true, calls: 1 } stubs(stream, '_read', cfg, stream.emit.bind(stream, 'reading')) stubs(stream, '_write', cfg, stream.emit.bind(stream, 'writing')) return stream } module.exports = StreamEvents /***/ }), /***/ 34633: /***/ ((module) => { module.exports = shift function shift (stream) { var rs = stream._readableState if (!rs) return null return (rs.objectMode || typeof stream._duplexState === 'number') ? stream.read() : stream.read(getStateLength(rs)) } function getStateLength (state) { if (state.buffer.length) { var idx = state.bufferIndex || 0 // Since node 6.3.0 state.buffer is a BufferList not an array if (state.buffer.head) { return state.buffer.head.data.length } else if (state.buffer.length - idx > 0 && state.buffer[idx]) { return state.buffer[idx].length } } return state.length } /***/ }), /***/ 80634: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright Joyent, Inc. and other Node contributors. // // Permission is hereby granted, free of charge, to any person obtaining a // copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to permit // persons to whom the Software is furnished to do so, subject to the // following conditions: // // The above copyright notice and this permission notice shall be included // in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS // OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN // NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, // DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR // OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE // USE OR OTHER DEALINGS IN THE SOFTWARE. /**/ var Buffer = (__nccwpck_require__(93058).Buffer); /**/ var isEncoding = Buffer.isEncoding || function (encoding) { encoding = '' + encoding; switch (encoding && encoding.toLowerCase()) { case 'hex':case 'utf8':case 'utf-8':case 'ascii':case 'binary':case 'base64':case 'ucs2':case 'ucs-2':case 'utf16le':case 'utf-16le':case 'raw': return true; default: return false; } }; function _normalizeEncoding(enc) { if (!enc) return 'utf8'; var retried; while (true) { switch (enc) { case 'utf8': case 'utf-8': return 'utf8'; case 'ucs2': case 'ucs-2': case 'utf16le': case 'utf-16le': return 'utf16le'; case 'latin1': case 'binary': return 'latin1'; case 'base64': case 'ascii': case 'hex': return enc; default: if (retried) return; // undefined enc = ('' + enc).toLowerCase(); retried = true; } } }; // Do not cache `Buffer.isEncoding` when checking encoding names as some // modules monkey-patch it to support additional encodings function normalizeEncoding(enc) { var nenc = _normalizeEncoding(enc); if (typeof nenc !== 'string' && (Buffer.isEncoding === isEncoding || !isEncoding(enc))) throw new Error('Unknown encoding: ' + enc); return nenc || enc; } // StringDecoder provides an interface for efficiently splitting a series of // buffers into a series of JS strings without breaking apart multi-byte // characters. exports.I = StringDecoder; function StringDecoder(encoding) { this.encoding = normalizeEncoding(encoding); var nb; switch (this.encoding) { case 'utf16le': this.text = utf16Text; this.end = utf16End; nb = 4; break; case 'utf8': this.fillLast = utf8FillLast; nb = 4; break; case 'base64': this.text = base64Text; this.end = base64End; nb = 3; break; default: this.write = simpleWrite; this.end = simpleEnd; return; } this.lastNeed = 0; this.lastTotal = 0; this.lastChar = Buffer.allocUnsafe(nb); } StringDecoder.prototype.write = function (buf) { if (buf.length === 0) return ''; var r; var i; if (this.lastNeed) { r = this.fillLast(buf); if (r === undefined) return ''; i = this.lastNeed; this.lastNeed = 0; } else { i = 0; } if (i < buf.length) return r ? r + this.text(buf, i) : this.text(buf, i); return r || ''; }; StringDecoder.prototype.end = utf8End; // Returns only complete characters in a Buffer StringDecoder.prototype.text = utf8Text; // Attempts to complete a partial non-UTF-8 character using bytes from a Buffer StringDecoder.prototype.fillLast = function (buf) { if (this.lastNeed <= buf.length) { buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, this.lastNeed); return this.lastChar.toString(this.encoding, 0, this.lastTotal); } buf.copy(this.lastChar, this.lastTotal - this.lastNeed, 0, buf.length); this.lastNeed -= buf.length; }; // Checks the type of a UTF-8 byte, whether it's ASCII, a leading byte, or a // continuation byte. If an invalid byte is detected, -2 is returned. function utf8CheckByte(byte) { if (byte <= 0x7F) return 0;else if (byte >> 5 === 0x06) return 2;else if (byte >> 4 === 0x0E) return 3;else if (byte >> 3 === 0x1E) return 4; return byte >> 6 === 0x02 ? -1 : -2; } // Checks at most 3 bytes at the end of a Buffer in order to detect an // incomplete multi-byte UTF-8 character. The total number of bytes (2, 3, or 4) // needed to complete the UTF-8 character (if applicable) are returned. function utf8CheckIncomplete(self, buf, i) { var j = buf.length - 1; if (j < i) return 0; var nb = utf8CheckByte(buf[j]); if (nb >= 0) { if (nb > 0) self.lastNeed = nb - 1; return nb; } if (--j < i || nb === -2) return 0; nb = utf8CheckByte(buf[j]); if (nb >= 0) { if (nb > 0) self.lastNeed = nb - 2; return nb; } if (--j < i || nb === -2) return 0; nb = utf8CheckByte(buf[j]); if (nb >= 0) { if (nb > 0) { if (nb === 2) nb = 0;else self.lastNeed = nb - 3; } return nb; } return 0; } // Validates as many continuation bytes for a multi-byte UTF-8 character as // needed or are available. If we see a non-continuation byte where we expect // one, we "replace" the validated continuation bytes we've seen so far with // a single UTF-8 replacement character ('\ufffd'), to match v8's UTF-8 decoding // behavior. The continuation byte check is included three times in the case // where all of the continuation bytes for a character exist in the same buffer. // It is also done this way as a slight performance increase instead of using a // loop. function utf8CheckExtraBytes(self, buf, p) { if ((buf[0] & 0xC0) !== 0x80) { self.lastNeed = 0; return '\ufffd'; } if (self.lastNeed > 1 && buf.length > 1) { if ((buf[1] & 0xC0) !== 0x80) { self.lastNeed = 1; return '\ufffd'; } if (self.lastNeed > 2 && buf.length > 2) { if ((buf[2] & 0xC0) !== 0x80) { self.lastNeed = 2; return '\ufffd'; } } } } // Attempts to complete a multi-byte UTF-8 character using bytes from a Buffer. function utf8FillLast(buf) { var p = this.lastTotal - this.lastNeed; var r = utf8CheckExtraBytes(this, buf, p); if (r !== undefined) return r; if (this.lastNeed <= buf.length) { buf.copy(this.lastChar, p, 0, this.lastNeed); return this.lastChar.toString(this.encoding, 0, this.lastTotal); } buf.copy(this.lastChar, p, 0, buf.length); this.lastNeed -= buf.length; } // Returns all complete UTF-8 characters in a Buffer. If the Buffer ended on a // partial character, the character's bytes are buffered until the required // number of bytes are available. function utf8Text(buf, i) { var total = utf8CheckIncomplete(this, buf, i); if (!this.lastNeed) return buf.toString('utf8', i); this.lastTotal = total; var end = buf.length - (total - this.lastNeed); buf.copy(this.lastChar, 0, end); return buf.toString('utf8', i, end); } // For UTF-8, a replacement character is added when ending on a partial // character. function utf8End(buf) { var r = buf && buf.length ? this.write(buf) : ''; if (this.lastNeed) return r + '\ufffd'; return r; } // UTF-16LE typically needs two bytes per character, but even if we have an even // number of bytes available, we need to check if we end on a leading/high // surrogate. In that case, we need to wait for the next two bytes in order to // decode the last character properly. function utf16Text(buf, i) { if ((buf.length - i) % 2 === 0) { var r = buf.toString('utf16le', i); if (r) { var c = r.charCodeAt(r.length - 1); if (c >= 0xD800 && c <= 0xDBFF) { this.lastNeed = 2; this.lastTotal = 4; this.lastChar[0] = buf[buf.length - 2]; this.lastChar[1] = buf[buf.length - 1]; return r.slice(0, -1); } } return r; } this.lastNeed = 1; this.lastTotal = 2; this.lastChar[0] = buf[buf.length - 1]; return buf.toString('utf16le', i, buf.length - 1); } // For UTF-16LE we do not explicitly append special replacement characters if we // end on a partial character, we simply let v8 handle that. function utf16End(buf) { var r = buf && buf.length ? this.write(buf) : ''; if (this.lastNeed) { var end = this.lastTotal - this.lastNeed; return r + this.lastChar.toString('utf16le', 0, end); } return r; } function base64Text(buf, i) { var n = (buf.length - i) % 3; if (n === 0) return buf.toString('base64', i); this.lastNeed = 3 - n; this.lastTotal = 3; if (n === 1) { this.lastChar[0] = buf[buf.length - 1]; } else { this.lastChar[0] = buf[buf.length - 2]; this.lastChar[1] = buf[buf.length - 1]; } return buf.toString('base64', i, buf.length - n); } function base64End(buf) { var r = buf && buf.length ? this.write(buf) : ''; if (this.lastNeed) return r + this.lastChar.toString('base64', 0, 3 - this.lastNeed); return r; } // Pass bytes on through for single-byte encodings (e.g. ascii, latin1, hex) function simpleWrite(buf) { return buf.toString(this.encoding); } function simpleEnd(buf) { return buf && buf.length ? this.write(buf) : ''; } /***/ }), /***/ 56496: /***/ ((module) => { const hexRegex = /^[-+]?0x[a-fA-F0-9]+$/; const numRegex = /^([\-\+])?(0*)(\.[0-9]+([eE]\-?[0-9]+)?|[0-9]+(\.[0-9]+([eE]\-?[0-9]+)?)?)$/; // const octRegex = /0x[a-z0-9]+/; // const binRegex = /0x[a-z0-9]+/; //polyfill if (!Number.parseInt && window.parseInt) { Number.parseInt = window.parseInt; } if (!Number.parseFloat && window.parseFloat) { Number.parseFloat = window.parseFloat; } const consider = { hex : true, leadingZeros: true, decimalPoint: "\.", eNotation: true //skipLike: /regex/ }; function toNumber(str, options = {}){ // const options = Object.assign({}, consider); // if(opt.leadingZeros === false){ // options.leadingZeros = false; // }else if(opt.hex === false){ // options.hex = false; // } options = Object.assign({}, consider, options ); if(!str || typeof str !== "string" ) return str; let trimmedStr = str.trim(); // if(trimmedStr === "0.0") return 0; // else if(trimmedStr === "+0.0") return 0; // else if(trimmedStr === "-0.0") return -0; if(options.skipLike !== undefined && options.skipLike.test(trimmedStr)) return str; else if (options.hex && hexRegex.test(trimmedStr)) { return Number.parseInt(trimmedStr, 16); // } else if (options.parseOct && octRegex.test(str)) { // return Number.parseInt(val, 8); // }else if (options.parseBin && binRegex.test(str)) { // return Number.parseInt(val, 2); }else{ //separate negative sign, leading zeros, and rest number const match = numRegex.exec(trimmedStr); if(match){ const sign = match[1]; const leadingZeros = match[2]; let numTrimmedByZeros = trimZeros(match[3]); //complete num without leading zeros //trim ending zeros for floating number const eNotation = match[4] || match[6]; if(!options.leadingZeros && leadingZeros.length > 0 && sign && trimmedStr[2] !== ".") return str; //-0123 else if(!options.leadingZeros && leadingZeros.length > 0 && !sign && trimmedStr[1] !== ".") return str; //0123 else{//no leading zeros or leading zeros are allowed const num = Number(trimmedStr); const numStr = "" + num; if(numStr.search(/[eE]/) !== -1){ //given number is long and parsed to eNotation if(options.eNotation) return num; else return str; }else if(eNotation){ //given number has enotation if(options.eNotation) return num; else return str; }else if(trimmedStr.indexOf(".") !== -1){ //floating number // const decimalPart = match[5].substr(1); // const intPart = trimmedStr.substr(0,trimmedStr.indexOf(".")); // const p = numStr.indexOf("."); // const givenIntPart = numStr.substr(0,p); // const givenDecPart = numStr.substr(p+1); if(numStr === "0" && (numTrimmedByZeros === "") ) return num; //0.0 else if(numStr === numTrimmedByZeros) return num; //0.456. 0.79000 else if( sign && numStr === "-"+numTrimmedByZeros) return num; else return str; } if(leadingZeros){ // if(numTrimmedByZeros === numStr){ // if(options.leadingZeros) return num; // else return str; // }else return str; if(numTrimmedByZeros === numStr) return num; else if(sign+numTrimmedByZeros === numStr) return num; else return str; } if(trimmedStr === numStr) return num; else if(trimmedStr === sign+numStr) return num; // else{ // //number with +/- sign // trimmedStr.test(/[-+][0-9]); // } return str; } // else if(!eNotation && trimmedStr && trimmedStr !== Number(trimmedStr) ) return str; }else{ //non-numeric string return str; } } } /** * * @param {string} numStr without leading zeros * @returns */ function trimZeros(numStr){ if(numStr && numStr.indexOf(".") !== -1){//float numStr = numStr.replace(/0+$/, ""); //remove ending zeros if(numStr === ".") numStr = "0"; else if(numStr[0] === ".") numStr = "0"+numStr; else if(numStr[numStr.length-1] === ".") numStr = numStr.substr(0,numStr.length-1); return numStr; } return numStr; } module.exports = toNumber /***/ }), /***/ 33897: /***/ ((module) => { "use strict"; module.exports = function stubs(obj, method, cfg, stub) { if (!obj || !method || !obj[method]) throw new Error('You must provide an object and a key for an existing method') if (!stub) { stub = cfg cfg = {} } stub = stub || function() {} cfg.callthrough = cfg.callthrough || false cfg.calls = cfg.calls || 0 var norevert = cfg.calls === 0 var cached = obj[method].bind(obj) obj[method] = function() { var args = [].slice.call(arguments) var returnVal if (cfg.callthrough) returnVal = cached.apply(obj, args) returnVal = stub.apply(obj, args) || returnVal if (!norevert && --cfg.calls === 0) obj[method] = cached return returnVal } } /***/ }), /***/ 97745: /***/ ((__unused_webpack_module, exports) => { "use strict"; /** * @license * Copyright 2020 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TeenyStatistics = exports.TeenyStatisticsWarning = void 0; /** * @class TeenyStatisticsWarning * @extends Error * @description While an error, is used for emitting warnings when * meeting certain configured thresholds. * @see process.emitWarning */ class TeenyStatisticsWarning extends Error { /** * @param {string} message */ constructor(message) { super(message); this.threshold = 0; this.type = ''; this.value = 0; this.name = this.constructor.name; Error.captureStackTrace(this, this.constructor); } } exports.TeenyStatisticsWarning = TeenyStatisticsWarning; TeenyStatisticsWarning.CONCURRENT_REQUESTS = 'ConcurrentRequestsExceededWarning'; /** * @class TeenyStatistics * @description Maintain various statistics internal to teeny-request. Tracking * is not automatic and must be instrumented within teeny-request. */ class TeenyStatistics { /** * @param {TeenyStatisticsOptions} [opts] */ constructor(opts) { /** * @type {number} * @private * @default 0 */ this._concurrentRequests = 0; /** * @type {boolean} * @private * @default false */ this._didConcurrentRequestWarn = false; this._options = TeenyStatistics._prepareOptions(opts); } /** * Returns a copy of the current options. * @return {TeenyStatisticsOptions} */ getOptions() { return Object.assign({}, this._options); } /** * Change configured statistics options. This will not preserve unspecified * options that were previously specified, i.e. this is a reset of options. * @param {TeenyStatisticsOptions} [opts] * @returns {TeenyStatisticsConfig} The previous options. * @see _prepareOptions */ setOptions(opts) { const oldOpts = this._options; this._options = TeenyStatistics._prepareOptions(opts); return oldOpts; } /** * @readonly * @return {TeenyStatisticsCounters} */ get counters() { return { concurrentRequests: this._concurrentRequests, }; } /** * @description Should call this right before making a request. */ requestStarting() { this._concurrentRequests++; if (this._options.concurrentRequests > 0 && this._concurrentRequests >= this._options.concurrentRequests && !this._didConcurrentRequestWarn) { this._didConcurrentRequestWarn = true; const warning = new TeenyStatisticsWarning('Possible excessive concurrent requests detected. ' + this._concurrentRequests + ' requests in-flight, which exceeds the configured threshold of ' + this._options.concurrentRequests + '. Use the TEENY_REQUEST_WARN_CONCURRENT_REQUESTS environment ' + 'variable or the concurrentRequests option of teeny-request to ' + 'increase or disable (0) this warning.'); warning.type = TeenyStatisticsWarning.CONCURRENT_REQUESTS; warning.value = this._concurrentRequests; warning.threshold = this._options.concurrentRequests; process.emitWarning(warning); } } /** * @description When using `requestStarting`, call this after the request * has finished. */ requestFinished() { // TODO negative? this._concurrentRequests--; } /** * Configuration Precedence: * 1. Dependency inversion via defined option. * 2. Global numeric environment variable. * 3. Built-in default. * This will not preserve unspecified options previously specified. * @param {TeenyStatisticsOptions} [opts] * @returns {TeenyStatisticsOptions} * @private */ static _prepareOptions({ concurrentRequests: diConcurrentRequests, } = {}) { let concurrentRequests = this.DEFAULT_WARN_CONCURRENT_REQUESTS; const envConcurrentRequests = Number(process.env.TEENY_REQUEST_WARN_CONCURRENT_REQUESTS); if (diConcurrentRequests !== undefined) { concurrentRequests = diConcurrentRequests; } else if (!Number.isNaN(envConcurrentRequests)) { concurrentRequests = envConcurrentRequests; } return { concurrentRequests }; } } exports.TeenyStatistics = TeenyStatistics; /** * @description A default threshold representing when to warn about excessive * in-flight/concurrent requests. * @type {number} * @static * @readonly * @default 5000 */ TeenyStatistics.DEFAULT_WARN_CONCURRENT_REQUESTS = 5000; //# sourceMappingURL=TeenyStatistics.js.map /***/ }), /***/ 34003: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /** * @license * Copyright 2019 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getAgent = exports.pool = void 0; const http_1 = __nccwpck_require__(58611); const https_1 = __nccwpck_require__(65692); // eslint-disable-next-line node/no-deprecated-api const url_1 = __nccwpck_require__(87016); exports.pool = new Map(); /** * Determines if a proxy should be considered based on the environment. * * @param uri The request uri * @returns {boolean} */ function shouldUseProxyForURI(uri) { const noProxyEnv = process.env.NO_PROXY || process.env.no_proxy; if (!noProxyEnv) { return true; } const givenURI = new URL(uri); for (const noProxyRaw of noProxyEnv.split(',')) { const noProxy = noProxyRaw.trim(); if (noProxy === givenURI.origin || noProxy === givenURI.hostname) { return false; } else if (noProxy.startsWith('*.') || noProxy.startsWith('.')) { const noProxyWildcard = noProxy.replace(/^\*\./, '.'); if (givenURI.hostname.endsWith(noProxyWildcard)) { return false; } } } return true; } /** * Returns a custom request Agent if one is found, otherwise returns undefined * which will result in the global http(s) Agent being used. * @private * @param {string} uri The request uri * @param {Options} reqOpts The request options * @returns {HttpAnyAgent|undefined} */ function getAgent(uri, reqOpts) { const isHttp = uri.startsWith('http://'); const proxy = reqOpts.proxy || process.env.HTTP_PROXY || process.env.http_proxy || process.env.HTTPS_PROXY || process.env.https_proxy; const poolOptions = Object.assign({}, reqOpts.pool); const manuallyProvidedProxy = !!reqOpts.proxy; const shouldUseProxy = manuallyProvidedProxy || shouldUseProxyForURI(uri); if (proxy && shouldUseProxy) { // tslint:disable-next-line variable-name const Agent = isHttp ? __nccwpck_require__(875) : __nccwpck_require__(6518); const proxyOpts = { ...(0, url_1.parse)(proxy), ...poolOptions }; return new Agent(proxyOpts); } let key = isHttp ? 'http' : 'https'; if (reqOpts.forever) { key += ':forever'; if (!exports.pool.has(key)) { // tslint:disable-next-line variable-name const Agent = isHttp ? http_1.Agent : https_1.Agent; exports.pool.set(key, new Agent({ ...poolOptions, keepAlive: true })); } } return exports.pool.get(key); } exports.getAgent = getAgent; //# sourceMappingURL=agents.js.map /***/ }), /***/ 80321: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; /** * @license * Copyright 2018 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ Object.defineProperty(exports, "__esModule", ({ value: true })); exports.teenyRequest = exports.RequestError = void 0; const node_fetch_1 = __nccwpck_require__(26705); const stream_1 = __nccwpck_require__(2203); const uuid = __nccwpck_require__(58993); const agents_1 = __nccwpck_require__(34003); const TeenyStatistics_1 = __nccwpck_require__(97745); // eslint-disable-next-line @typescript-eslint/no-var-requires const streamEvents = __nccwpck_require__(71546); class RequestError extends Error { } exports.RequestError = RequestError; /** * Convert options from Request to Fetch format * @private * @param reqOpts Request options */ function requestToFetchOptions(reqOpts) { const options = { method: reqOpts.method || 'GET', ...(reqOpts.timeout && { timeout: reqOpts.timeout }), ...(typeof reqOpts.gzip === 'boolean' && { compress: reqOpts.gzip }), }; if (typeof reqOpts.json === 'object') { // Add Content-type: application/json header reqOpts.headers = reqOpts.headers || {}; reqOpts.headers['Content-Type'] = 'application/json'; // Set body to JSON representation of value options.body = JSON.stringify(reqOpts.json); } else { if (Buffer.isBuffer(reqOpts.body)) { options.body = reqOpts.body; } else if (typeof reqOpts.body !== 'string') { options.body = JSON.stringify(reqOpts.body); } else { options.body = reqOpts.body; } } // eslint-disable-next-line @typescript-eslint/no-explicit-any options.headers = reqOpts.headers; let uri = (reqOpts.uri || reqOpts.url); if (!uri) { throw new Error('Missing uri or url in reqOpts.'); } if (reqOpts.useQuerystring === true || typeof reqOpts.qs === 'object') { // eslint-disable-next-line @typescript-eslint/no-var-requires const qs = __nccwpck_require__(83480); const params = qs.stringify(reqOpts.qs); uri = uri + '?' + params; } options.agent = (0, agents_1.getAgent)(uri, reqOpts); return { uri, options }; } /** * Convert a response from `fetch` to `request` format. * @private * @param opts The `request` options used to create the request. * @param res The Fetch response * @returns A `request` response object */ function fetchToRequestResponse(opts, res) { const request = {}; request.agent = opts.agent || false; request.headers = (opts.headers || {}); request.href = res.url; // headers need to be converted from a map to an obj const resHeaders = {}; res.headers.forEach((value, key) => (resHeaders[key] = value)); const response = Object.assign(res.body, { statusCode: res.status, statusMessage: res.statusText, request, body: res.body, headers: resHeaders, toJSON: () => ({ headers: resHeaders }), }); return response; } /** * Create POST body from two parts as multipart/related content-type * @private * @param boundary * @param multipart */ function createMultipartStream(boundary, multipart) { const finale = `--${boundary}--`; const stream = new stream_1.PassThrough(); for (const part of multipart) { const preamble = `--${boundary}\r\nContent-Type: ${part['Content-Type']}\r\n\r\n`; stream.write(preamble); if (typeof part.body === 'string') { stream.write(part.body); stream.write('\r\n'); } else { part.body.pipe(stream, { end: false }); part.body.on('end', () => { stream.write('\r\n'); stream.write(finale); stream.end(); }); } } return stream; } function teenyRequest(reqOpts, callback) { const { uri, options } = requestToFetchOptions(reqOpts); const multipart = reqOpts.multipart; if (reqOpts.multipart && multipart.length === 2) { if (!callback) { // TODO: add support for multipart uploads through streaming throw new Error('Multipart without callback is not implemented.'); } const boundary = uuid.v4(); options.headers['Content-Type'] = `multipart/related; boundary=${boundary}`; options.body = createMultipartStream(boundary, multipart); // Multipart upload teenyRequest.stats.requestStarting(); (0, node_fetch_1.default)(uri, options).then(res => { teenyRequest.stats.requestFinished(); const header = res.headers.get('content-type'); const response = fetchToRequestResponse(options, res); const body = response.body; if (header === 'application/json' || header === 'application/json; charset=utf-8') { res.json().then(json => { response.body = json; callback(null, response, json); }, (err) => { callback(err, response, body); }); return; } res.text().then(text => { response.body = text; callback(null, response, text); }, err => { callback(err, response, body); }); }, err => { teenyRequest.stats.requestFinished(); callback(err, null, null); }); return; } if (callback === undefined) { // Stream mode const requestStream = streamEvents(new stream_1.PassThrough()); // eslint-disable-next-line @typescript-eslint/no-explicit-any let responseStream; requestStream.once('reading', () => { if (responseStream) { (0, stream_1.pipeline)(responseStream, requestStream, () => { }); } else { requestStream.once('response', () => { (0, stream_1.pipeline)(responseStream, requestStream, () => { }); }); } }); options.compress = false; teenyRequest.stats.requestStarting(); (0, node_fetch_1.default)(uri, options).then(res => { teenyRequest.stats.requestFinished(); responseStream = res.body; responseStream.on('error', (err) => { requestStream.emit('error', err); }); const response = fetchToRequestResponse(options, res); requestStream.emit('response', response); }, err => { teenyRequest.stats.requestFinished(); requestStream.emit('error', err); }); // fetch doesn't supply the raw HTTP stream, instead it // returns a PassThrough piped from the HTTP response // stream. return requestStream; } // GET or POST with callback teenyRequest.stats.requestStarting(); (0, node_fetch_1.default)(uri, options).then(res => { teenyRequest.stats.requestFinished(); const header = res.headers.get('content-type'); const response = fetchToRequestResponse(options, res); const body = response.body; if (header === 'application/json' || header === 'application/json; charset=utf-8') { if (response.statusCode === 204) { // Probably a DELETE callback(null, response, body); return; } res.json().then(json => { response.body = json; callback(null, response, json); }, err => { callback(err, response, body); }); return; } res.text().then(text => { const response = fetchToRequestResponse(options, res); response.body = text; callback(null, response, text); }, err => { callback(err, response, body); }); }, err => { teenyRequest.stats.requestFinished(); callback(err, null, null); }); return; } exports.teenyRequest = teenyRequest; teenyRequest.defaults = (defaults) => { return (reqOpts, callback) => { const opts = { ...defaults, ...reqOpts }; if (callback === undefined) { return teenyRequest(opts); } teenyRequest(opts, callback); }; }; /** * Single instance of an interface for keeping track of things. */ teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(); teenyRequest.resetStats = () => { teenyRequest.stats = new TeenyStatistics_1.TeenyStatistics(teenyRequest.stats.getOptions()); }; //# sourceMappingURL=index.js.map /***/ }), /***/ 77954: /***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; const events_1 = __nccwpck_require__(24434); const debug_1 = __importDefault(__nccwpck_require__(2830)); const promisify_1 = __importDefault(__nccwpck_require__(44446)); const debug = debug_1.default('agent-base'); function isAgent(v) { return Boolean(v) && typeof v.addRequest === 'function'; } function isSecureEndpoint() { const { stack } = new Error(); if (typeof stack !== 'string') return false; return stack.split('\n').some(l => l.indexOf('(https.js:') !== -1 || l.indexOf('node:https:') !== -1); } function createAgent(callback, opts) { return new createAgent.Agent(callback, opts); } (function (createAgent) { /** * Base `http.Agent` implementation. * No pooling/keep-alive is implemented by default. * * @param {Function} callback * @api public */ class Agent extends events_1.EventEmitter { constructor(callback, _opts) { super(); let opts = _opts; if (typeof callback === 'function') { this.callback = callback; } else if (callback) { opts = callback; } // Timeout for the socket to be returned from the callback this.timeout = null; if (opts && typeof opts.timeout === 'number') { this.timeout = opts.timeout; } // These aren't actually used by `agent-base`, but are required // for the TypeScript definition files in `@types/node` :/ this.maxFreeSockets = 1; this.maxSockets = 1; this.maxTotalSockets = Infinity; this.sockets = {}; this.freeSockets = {}; this.requests = {}; this.options = {}; } get defaultPort() { if (typeof this.explicitDefaultPort === 'number') { return this.explicitDefaultPort; } return isSecureEndpoint() ? 443 : 80; } set defaultPort(v) { this.explicitDefaultPort = v; } get protocol() { if (typeof this.explicitProtocol === 'string') { return this.explicitProtocol; } return isSecureEndpoint() ? 'https:' : 'http:'; } set protocol(v) { this.explicitProtocol = v; } callback(req, opts, fn) { throw new Error('"agent-base" has no default implementation, you must subclass and override `callback()`'); } /** * Called by node-core's "_http_client.js" module when creating * a new HTTP request with this Agent instance. * * @api public */ addRequest(req, _opts) { const opts = Object.assign({}, _opts); if (typeof opts.secureEndpoint !== 'boolean') { opts.secureEndpoint = isSecureEndpoint(); } if (opts.host == null) { opts.host = 'localhost'; } if (opts.port == null) { opts.port = opts.secureEndpoint ? 443 : 80; } if (opts.protocol == null) { opts.protocol = opts.secureEndpoint ? 'https:' : 'http:'; } if (opts.host && opts.path) { // If both a `host` and `path` are specified then it's most // likely the result of a `url.parse()` call... we need to // remove the `path` portion so that `net.connect()` doesn't // attempt to open that as a unix socket file. delete opts.path; } delete opts.agent; delete opts.hostname; delete opts._defaultAgent; delete opts.defaultPort; delete opts.createConnection; // Hint to use "Connection: close" // XXX: non-documented `http` module API :( req._last = true; req.shouldKeepAlive = false; let timedOut = false; let timeoutId = null; const timeoutMs = opts.timeout || this.timeout; const onerror = (err) => { if (req._hadError) return; req.emit('error', err); // For Safety. Some additional errors might fire later on // and we need to make sure we don't double-fire the error event. req._hadError = true; }; const ontimeout = () => { timeoutId = null; timedOut = true; const err = new Error(`A "socket" was not created for HTTP request before ${timeoutMs}ms`); err.code = 'ETIMEOUT'; onerror(err); }; const callbackError = (err) => { if (timedOut) return; if (timeoutId !== null) { clearTimeout(timeoutId); timeoutId = null; } onerror(err); }; const onsocket = (socket) => { if (timedOut) return; if (timeoutId != null) { clearTimeout(timeoutId); timeoutId = null; } if (isAgent(socket)) { // `socket` is actually an `http.Agent` instance, so // relinquish responsibility for this `req` to the Agent // from here on debug('Callback returned another Agent instance %o', socket.constructor.name); socket.addRequest(req, opts); return; } if (socket) { socket.once('free', () => { this.freeSocket(socket, opts); }); req.onSocket(socket); return; } const err = new Error(`no Duplex stream was returned to agent-base for \`${req.method} ${req.path}\``); onerror(err); }; if (typeof this.callback !== 'function') { onerror(new Error('`callback` is not defined')); return; } if (!this.promisifiedCallback) { if (this.callback.length >= 3) { debug('Converting legacy callback function to promise'); this.promisifiedCallback = promisify_1.default(this.callback); } else { this.promisifiedCallback = this.callback; } } if (typeof timeoutMs === 'number' && timeoutMs > 0) { timeoutId = setTimeout(ontimeout, timeoutMs); } if ('port' in opts && typeof opts.port !== 'number') { opts.port = Number(opts.port); } try { debug('Resolving socket for %o request: %o', opts.protocol, `${req.method} ${req.path}`); Promise.resolve(this.promisifiedCallback(req, opts)).then(onsocket, callbackError); } catch (err) { Promise.reject(err).catch(callbackError); } } freeSocket(socket, opts) { debug('Freeing socket %o %o', socket.constructor.name, opts); socket.destroy(); } destroy() { debug('Destroying agent %o', this.constructor.name); } } createAgent.Agent = Agent; // So that `instanceof` works correctly createAgent.prototype = createAgent.Agent.prototype; })(createAgent || (createAgent = {})); module.exports = createAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 44446: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function promisify(fn) { return function (req, opts) { return new Promise((resolve, reject) => { fn.call(this, req, opts, (err, rtn) => { if (err) { reject(err); } else { resolve(rtn); } }); }); }; } exports["default"] = promisify; //# sourceMappingURL=promisify.js.map /***/ }), /***/ 23034: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const net_1 = __importDefault(__nccwpck_require__(69278)); const tls_1 = __importDefault(__nccwpck_require__(64756)); const url_1 = __importDefault(__nccwpck_require__(87016)); const debug_1 = __importDefault(__nccwpck_require__(2830)); const once_1 = __importDefault(__nccwpck_require__(48662)); const agent_base_1 = __nccwpck_require__(77954); const debug = (0, debug_1.default)('http-proxy-agent'); function isHTTPS(protocol) { return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; } /** * The `HttpProxyAgent` implements an HTTP Agent subclass that connects * to the specified "HTTP proxy server" in order to proxy HTTP requests. * * @api public */ class HttpProxyAgent extends agent_base_1.Agent { constructor(_opts) { let opts; if (typeof _opts === 'string') { opts = url_1.default.parse(_opts); } else { opts = _opts; } if (!opts) { throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); } debug('Creating new HttpProxyAgent instance: %o', opts); super(opts); const proxy = Object.assign({}, opts); // If `true`, then connect to the proxy server over TLS. // Defaults to `false`. this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); // Prefer `hostname` over `host`, and set the `port` if needed. proxy.host = proxy.hostname || proxy.host; if (typeof proxy.port === 'string') { proxy.port = parseInt(proxy.port, 10); } if (!proxy.port && proxy.host) { proxy.port = this.secureProxy ? 443 : 80; } if (proxy.host && proxy.path) { // If both a `host` and `path` are specified then it's most likely // the result of a `url.parse()` call... we need to remove the // `path` portion so that `net.connect()` doesn't attempt to open // that as a Unix socket file. delete proxy.path; delete proxy.pathname; } this.proxy = proxy; } /** * Called when the node-core HTTP client library is creating a * new HTTP request. * * @api protected */ callback(req, opts) { return __awaiter(this, void 0, void 0, function* () { const { proxy, secureProxy } = this; const parsed = url_1.default.parse(req.path); if (!parsed.protocol) { parsed.protocol = 'http:'; } if (!parsed.hostname) { parsed.hostname = opts.hostname || opts.host || null; } if (parsed.port == null && typeof opts.port) { parsed.port = String(opts.port); } if (parsed.port === '80') { // if port is 80, then we can remove the port so that the // ":80" portion is not on the produced URL parsed.port = ''; } // Change the `http.ClientRequest` instance's "path" field // to the absolute path of the URL that will be requested. req.path = url_1.default.format(parsed); // Inject the `Proxy-Authorization` header if necessary. if (proxy.auth) { req.setHeader('Proxy-Authorization', `Basic ${Buffer.from(proxy.auth).toString('base64')}`); } // Create a socket connection to the proxy server. let socket; if (secureProxy) { debug('Creating `tls.Socket`: %o', proxy); socket = tls_1.default.connect(proxy); } else { debug('Creating `net.Socket`: %o', proxy); socket = net_1.default.connect(proxy); } // At this point, the http ClientRequest's internal `_header` field // might have already been set. If this is the case then we'll need // to re-generate the string since we just changed the `req.path`. if (req._header) { let first; let endOfHeaders; debug('Regenerating stored HTTP header string for request'); req._header = null; req._implicitHeader(); if (req.output && req.output.length > 0) { // Node < 12 debug('Patching connection write() output buffer with updated header'); first = req.output[0]; endOfHeaders = first.indexOf('\r\n\r\n') + 4; req.output[0] = req._header + first.substring(endOfHeaders); debug('Output buffer: %o', req.output); } else if (req.outputData && req.outputData.length > 0) { // Node >= 12 debug('Patching connection write() output buffer with updated header'); first = req.outputData[0].data; endOfHeaders = first.indexOf('\r\n\r\n') + 4; req.outputData[0].data = req._header + first.substring(endOfHeaders); debug('Output buffer: %o', req.outputData[0].data); } } // Wait for the socket's `connect` event, so that this `callback()` // function throws instead of the `http` request machinery. This is // important for i.e. `PacProxyAgent` which determines a failed proxy // connection via the `callback()` function throwing. yield (0, once_1.default)(socket, 'connect'); return socket; }); } } exports["default"] = HttpProxyAgent; //# sourceMappingURL=agent.js.map /***/ }), /***/ 875: /***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; const agent_1 = __importDefault(__nccwpck_require__(23034)); function createHttpProxyAgent(opts) { return new agent_1.default(opts); } (function (createHttpProxyAgent) { createHttpProxyAgent.HttpProxyAgent = agent_1.default; createHttpProxyAgent.prototype = agent_1.default.prototype; })(createHttpProxyAgent || (createHttpProxyAgent = {})); module.exports = createHttpProxyAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 15299: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const net_1 = __importDefault(__nccwpck_require__(69278)); const tls_1 = __importDefault(__nccwpck_require__(64756)); const url_1 = __importDefault(__nccwpck_require__(87016)); const assert_1 = __importDefault(__nccwpck_require__(42613)); const debug_1 = __importDefault(__nccwpck_require__(2830)); const agent_base_1 = __nccwpck_require__(77954); const parse_proxy_response_1 = __importDefault(__nccwpck_require__(27742)); const debug = debug_1.default('https-proxy-agent:agent'); /** * The `HttpsProxyAgent` implements an HTTP Agent subclass that connects to * the specified "HTTP(s) proxy server" in order to proxy HTTPS requests. * * Outgoing HTTP requests are first tunneled through the proxy server using the * `CONNECT` HTTP request method to establish a connection to the proxy server, * and then the proxy server connects to the destination target and issues the * HTTP request from the proxy server. * * `https:` requests have their socket connection upgraded to TLS once * the connection to the proxy server has been established. * * @api public */ class HttpsProxyAgent extends agent_base_1.Agent { constructor(_opts) { let opts; if (typeof _opts === 'string') { opts = url_1.default.parse(_opts); } else { opts = _opts; } if (!opts) { throw new Error('an HTTP(S) proxy server `host` and `port` must be specified!'); } debug('creating new HttpsProxyAgent instance: %o', opts); super(opts); const proxy = Object.assign({}, opts); // If `true`, then connect to the proxy server over TLS. // Defaults to `false`. this.secureProxy = opts.secureProxy || isHTTPS(proxy.protocol); // Prefer `hostname` over `host`, and set the `port` if needed. proxy.host = proxy.hostname || proxy.host; if (typeof proxy.port === 'string') { proxy.port = parseInt(proxy.port, 10); } if (!proxy.port && proxy.host) { proxy.port = this.secureProxy ? 443 : 80; } // ALPN is supported by Node.js >= v5. // attempt to negotiate http/1.1 for proxy servers that support http/2 if (this.secureProxy && !('ALPNProtocols' in proxy)) { proxy.ALPNProtocols = ['http 1.1']; } if (proxy.host && proxy.path) { // If both a `host` and `path` are specified then it's most likely // the result of a `url.parse()` call... we need to remove the // `path` portion so that `net.connect()` doesn't attempt to open // that as a Unix socket file. delete proxy.path; delete proxy.pathname; } this.proxy = proxy; } /** * Called when the node-core HTTP client library is creating a * new HTTP request. * * @api protected */ callback(req, opts) { return __awaiter(this, void 0, void 0, function* () { const { proxy, secureProxy } = this; // Create a socket connection to the proxy server. let socket; if (secureProxy) { debug('Creating `tls.Socket`: %o', proxy); socket = tls_1.default.connect(proxy); } else { debug('Creating `net.Socket`: %o', proxy); socket = net_1.default.connect(proxy); } const headers = Object.assign({}, proxy.headers); const hostname = `${opts.host}:${opts.port}`; let payload = `CONNECT ${hostname} HTTP/1.1\r\n`; // Inject the `Proxy-Authorization` header if necessary. if (proxy.auth) { headers['Proxy-Authorization'] = `Basic ${Buffer.from(proxy.auth).toString('base64')}`; } // The `Host` header should only include the port // number when it is not the default port. let { host, port, secureEndpoint } = opts; if (!isDefaultPort(port, secureEndpoint)) { host += `:${port}`; } headers.Host = host; headers.Connection = 'close'; for (const name of Object.keys(headers)) { payload += `${name}: ${headers[name]}\r\n`; } const proxyResponsePromise = parse_proxy_response_1.default(socket); socket.write(`${payload}\r\n`); const { statusCode, buffered } = yield proxyResponsePromise; if (statusCode === 200) { req.once('socket', resume); if (opts.secureEndpoint) { // The proxy is connecting to a TLS server, so upgrade // this socket connection to a TLS connection. debug('Upgrading socket connection to TLS'); const servername = opts.servername || opts.host; return tls_1.default.connect(Object.assign(Object.assign({}, omit(opts, 'host', 'hostname', 'path', 'port')), { socket, servername })); } return socket; } // Some other status code that's not 200... need to re-play the HTTP // header "data" events onto the socket once the HTTP machinery is // attached so that the node core `http` can parse and handle the // error status code. // Close the original socket, and a new "fake" socket is returned // instead, so that the proxy doesn't get the HTTP request // written to it (which may contain `Authorization` headers or other // sensitive data). // // See: https://hackerone.com/reports/541502 socket.destroy(); const fakeSocket = new net_1.default.Socket({ writable: false }); fakeSocket.readable = true; // Need to wait for the "socket" event to re-play the "data" events. req.once('socket', (s) => { debug('replaying proxy buffer for failed request'); assert_1.default(s.listenerCount('data') > 0); // Replay the "buffered" Buffer onto the fake `socket`, since at // this point the HTTP module machinery has been hooked up for // the user. s.push(buffered); s.push(null); }); return fakeSocket; }); } } exports["default"] = HttpsProxyAgent; function resume(socket) { socket.resume(); } function isDefaultPort(port, secure) { return Boolean((!secure && port === 80) || (secure && port === 443)); } function isHTTPS(protocol) { return typeof protocol === 'string' ? /^https:?$/i.test(protocol) : false; } function omit(obj, ...keys) { const ret = {}; let key; for (key in obj) { if (!keys.includes(key)) { ret[key] = obj[key]; } } return ret; } //# sourceMappingURL=agent.js.map /***/ }), /***/ 6518: /***/ (function(module, __unused_webpack_exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; const agent_1 = __importDefault(__nccwpck_require__(15299)); function createHttpsProxyAgent(opts) { return new agent_1.default(opts); } (function (createHttpsProxyAgent) { createHttpsProxyAgent.HttpsProxyAgent = agent_1.default; createHttpsProxyAgent.prototype = agent_1.default.prototype; })(createHttpsProxyAgent || (createHttpsProxyAgent = {})); module.exports = createHttpsProxyAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 27742: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); const debug_1 = __importDefault(__nccwpck_require__(2830)); const debug = debug_1.default('https-proxy-agent:parse-proxy-response'); function parseProxyResponse(socket) { return new Promise((resolve, reject) => { // we need to buffer any HTTP traffic that happens with the proxy before we get // the CONNECT response, so that if the response is anything other than an "200" // response code, then we can re-play the "data" events on the socket once the // HTTP parser is hooked up... let buffersLength = 0; const buffers = []; function read() { const b = socket.read(); if (b) ondata(b); else socket.once('readable', read); } function cleanup() { socket.removeListener('end', onend); socket.removeListener('error', onerror); socket.removeListener('close', onclose); socket.removeListener('readable', read); } function onclose(err) { debug('onclose had error %o', err); } function onend() { debug('onend'); } function onerror(err) { cleanup(); debug('onerror %o', err); reject(err); } function ondata(b) { buffers.push(b); buffersLength += b.length; const buffered = Buffer.concat(buffers, buffersLength); const endOfHeaders = buffered.indexOf('\r\n\r\n'); if (endOfHeaders === -1) { // keep buffering debug('have not received end of HTTP headers yet...'); read(); return; } const firstLine = buffered.toString('ascii', 0, buffered.indexOf('\r\n')); const statusCode = +firstLine.split(' ')[1]; debug('got proxy server response: %o', firstLine); resolve({ statusCode, buffered }); } socket.on('error', onerror); socket.on('close', onclose); socket.on('end', onend); read(); }); } exports["default"] = parseProxyResponse; //# sourceMappingURL=parse-proxy-response.js.map /***/ }), /***/ 58993: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "NIL", ({ enumerable: true, get: function () { return _nil.default; } })); Object.defineProperty(exports, "parse", ({ enumerable: true, get: function () { return _parse.default; } })); Object.defineProperty(exports, "stringify", ({ enumerable: true, get: function () { return _stringify.default; } })); Object.defineProperty(exports, "v1", ({ enumerable: true, get: function () { return _v.default; } })); Object.defineProperty(exports, "v3", ({ enumerable: true, get: function () { return _v2.default; } })); Object.defineProperty(exports, "v4", ({ enumerable: true, get: function () { return _v3.default; } })); Object.defineProperty(exports, "v5", ({ enumerable: true, get: function () { return _v4.default; } })); Object.defineProperty(exports, "validate", ({ enumerable: true, get: function () { return _validate.default; } })); Object.defineProperty(exports, "version", ({ enumerable: true, get: function () { return _version.default; } })); var _v = _interopRequireDefault(__nccwpck_require__(64684)); var _v2 = _interopRequireDefault(__nccwpck_require__(53142)); var _v3 = _interopRequireDefault(__nccwpck_require__(69655)); var _v4 = _interopRequireDefault(__nccwpck_require__(70880)); var _nil = _interopRequireDefault(__nccwpck_require__(40194)); var _version = _interopRequireDefault(__nccwpck_require__(66257)); var _validate = _interopRequireDefault(__nccwpck_require__(61579)); var _stringify = _interopRequireDefault(__nccwpck_require__(71400)); var _parse = _interopRequireDefault(__nccwpck_require__(7814)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /***/ }), /***/ 88061: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function md5(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('md5').update(bytes).digest(); } var _default = md5; exports["default"] = _default; /***/ }), /***/ 7966: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } var _default = { randomUUID: _crypto.default.randomUUID }; exports["default"] = _default; /***/ }), /***/ 40194: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = '00000000-0000-0000-0000-000000000000'; exports["default"] = _default; /***/ }), /***/ 7814: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(61579)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function parse(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } let v; const arr = new Uint8Array(16); // Parse ########-....-....-....-............ arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; arr[1] = v >>> 16 & 0xff; arr[2] = v >>> 8 & 0xff; arr[3] = v & 0xff; // Parse ........-####-....-....-............ arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; arr[5] = v & 0xff; // Parse ........-....-####-....-............ arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; arr[7] = v & 0xff; // Parse ........-....-....-####-............ arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; arr[9] = v & 0xff; // Parse ........-....-....-....-############ // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; arr[11] = v / 0x100000000 & 0xff; arr[12] = v >>> 24 & 0xff; arr[13] = v >>> 16 & 0xff; arr[14] = v >>> 8 & 0xff; arr[15] = v & 0xff; return arr; } var _default = parse; exports["default"] = _default; /***/ }), /***/ 1118: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; exports["default"] = _default; /***/ }), /***/ 88540: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = rng; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate let poolPtr = rnds8Pool.length; function rng() { if (poolPtr > rnds8Pool.length - 16) { _crypto.default.randomFillSync(rnds8Pool); poolPtr = 0; } return rnds8Pool.slice(poolPtr, poolPtr += 16); } /***/ }), /***/ 11352: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _crypto = _interopRequireDefault(__nccwpck_require__(76982)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function sha1(bytes) { if (Array.isArray(bytes)) { bytes = Buffer.from(bytes); } else if (typeof bytes === 'string') { bytes = Buffer.from(bytes, 'utf8'); } return _crypto.default.createHash('sha1').update(bytes).digest(); } var _default = sha1; exports["default"] = _default; /***/ }), /***/ 71400: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; exports.unsafeStringify = unsafeStringify; var _validate = _interopRequireDefault(__nccwpck_require__(61579)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } /** * Convert array of 16 byte values to UUID string format of the form: * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ const byteToHex = []; for (let i = 0; i < 256; ++i) { byteToHex.push((i + 0x100).toString(16).slice(1)); } function unsafeStringify(arr, offset = 0) { // Note: Be careful editing this code! It's been tuned for performance // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 return byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]; } function stringify(arr, offset = 0) { const uuid = unsafeStringify(arr, offset); // Consistency check for valid UUID. If this throws, it's likely due to one // of the following: // - One or more input array values don't map to a hex octet (leading to // "undefined" in the uuid) // - Invalid input values for the RFC `version` or `variant` fields if (!(0, _validate.default)(uuid)) { throw TypeError('Stringified UUID is invalid'); } return uuid; } var _default = stringify; exports["default"] = _default; /***/ }), /***/ 64684: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _rng = _interopRequireDefault(__nccwpck_require__(88540)); var _stringify = __nccwpck_require__(71400); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } // **`v1()` - Generate time-based UUID** // // Inspired by https://github.com/LiosK/UUID.js // and http://docs.python.org/library/uuid.html let _nodeId; let _clockseq; // Previous uuid creation time let _lastMSecs = 0; let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details function v1(options, buf, offset) { let i = buf && offset || 0; const b = buf || new Array(16); options = options || {}; let node = options.node || _nodeId; let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not // specified. We do this lazily to minimize issues related to insufficient // system entropy. See #189 if (node == null || clockseq == null) { const seedBytes = options.random || (options.rng || _rng.default)(); if (node == null) { // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; } if (clockseq == null) { // Per 4.2.2, randomize (14 bit) clockseq clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } } // UUID timestamps are 100 nano-second units since the Gregorian epoch, // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock // cycle to simulate higher resolution clock let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression if (dt < 0 && options.clockseq === undefined) { clockseq = clockseq + 1 & 0x3fff; } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new // time interval if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { nsecs = 0; } // Per 4.2.1.2 Throw error if too many uuids are requested if (nsecs >= 10000) { throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); } _lastMSecs = msecs; _lastNSecs = nsecs; _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch msecs += 12219292800000; // `time_low` const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; b[i++] = tl >>> 24 & 0xff; b[i++] = tl >>> 16 & 0xff; b[i++] = tl >>> 8 & 0xff; b[i++] = tl & 0xff; // `time_mid` const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; b[i++] = tmh >>> 8 & 0xff; b[i++] = tmh & 0xff; // `time_high_and_version` b[i++] = tmh >>> 24 & 0xf | 0x10; // include version b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` b[i++] = clockseq & 0xff; // `node` for (let n = 0; n < 6; ++n) { b[i + n] = node[n]; } return buf || (0, _stringify.unsafeStringify)(b); } var _default = v1; exports["default"] = _default; /***/ }), /***/ 53142: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(44575)); var _md = _interopRequireDefault(__nccwpck_require__(88061)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v3 = (0, _v.default)('v3', 0x30, _md.default); var _default = v3; exports["default"] = _default; /***/ }), /***/ 44575: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.URL = exports.DNS = void 0; exports["default"] = v35; var _stringify = __nccwpck_require__(71400); var _parse = _interopRequireDefault(__nccwpck_require__(7814)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function stringToBytes(str) { str = unescape(encodeURIComponent(str)); // UTF8 escape const bytes = []; for (let i = 0; i < str.length; ++i) { bytes.push(str.charCodeAt(i)); } return bytes; } const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; exports.DNS = DNS; const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; exports.URL = URL; function v35(name, version, hashfunc) { function generateUUID(value, namespace, buf, offset) { var _namespace; if (typeof value === 'string') { value = stringToBytes(value); } if (typeof namespace === 'string') { namespace = (0, _parse.default)(namespace); } if (((_namespace = namespace) === null || _namespace === void 0 ? void 0 : _namespace.length) !== 16) { throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); } // Compute hash of namespace and value, Per 4.3 // Future: Use spread syntax when supported on all platforms, e.g. `bytes = // hashfunc([...namespace, ... value])` let bytes = new Uint8Array(16 + value.length); bytes.set(namespace); bytes.set(value, namespace.length); bytes = hashfunc(bytes); bytes[6] = bytes[6] & 0x0f | version; bytes[8] = bytes[8] & 0x3f | 0x80; if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = bytes[i]; } return buf; } return (0, _stringify.unsafeStringify)(bytes); } // Function#name is not settable on some platforms (#270) try { generateUUID.name = name; // eslint-disable-next-line no-empty } catch (err) {} // For CommonJS default export support generateUUID.DNS = DNS; generateUUID.URL = URL; return generateUUID; } /***/ }), /***/ 69655: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _native = _interopRequireDefault(__nccwpck_require__(7966)); var _rng = _interopRequireDefault(__nccwpck_require__(88540)); var _stringify = __nccwpck_require__(71400); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function v4(options, buf, offset) { if (_native.default.randomUUID && !buf && !options) { return _native.default.randomUUID(); } options = options || {}; const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` rnds[6] = rnds[6] & 0x0f | 0x40; rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided if (buf) { offset = offset || 0; for (let i = 0; i < 16; ++i) { buf[offset + i] = rnds[i]; } return buf; } return (0, _stringify.unsafeStringify)(rnds); } var _default = v4; exports["default"] = _default; /***/ }), /***/ 70880: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _v = _interopRequireDefault(__nccwpck_require__(44575)); var _sha = _interopRequireDefault(__nccwpck_require__(11352)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } const v5 = (0, _v.default)('v5', 0x50, _sha.default); var _default = v5; exports["default"] = _default; /***/ }), /***/ 61579: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _regex = _interopRequireDefault(__nccwpck_require__(1118)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function validate(uuid) { return typeof uuid === 'string' && _regex.default.test(uuid); } var _default = validate; exports["default"] = _default; /***/ }), /***/ 66257: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; var _validate = _interopRequireDefault(__nccwpck_require__(61579)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } function version(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } return parseInt(uuid.slice(14, 15), 16); } var _default = version; exports["default"] = _default; /***/ }), /***/ 1552: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; var punycode = __nccwpck_require__(24876); var mappingTable = __nccwpck_require__(92472); var PROCESSING_OPTIONS = { TRANSITIONAL: 0, NONTRANSITIONAL: 1 }; function normalize(str) { // fix bug in v8 return str.split('\u0000').map(function (s) { return s.normalize('NFC'); }).join('\u0000'); } function findStatus(val) { var start = 0; var end = mappingTable.length - 1; while (start <= end) { var mid = Math.floor((start + end) / 2); var target = mappingTable[mid]; if (target[0][0] <= val && target[0][1] >= val) { return target; } else if (target[0][0] > val) { end = mid - 1; } else { start = mid + 1; } } return null; } var regexAstralSymbols = /[\uD800-\uDBFF][\uDC00-\uDFFF]/g; function countSymbols(string) { return string // replace every surrogate pair with a BMP symbol .replace(regexAstralSymbols, '_') // then get the length .length; } function mapChars(domain_name, useSTD3, processing_option) { var hasError = false; var processed = ""; var len = countSymbols(domain_name); for (var i = 0; i < len; ++i) { var codePoint = domain_name.codePointAt(i); var status = findStatus(codePoint); switch (status[1]) { case "disallowed": hasError = true; processed += String.fromCodePoint(codePoint); break; case "ignored": break; case "mapped": processed += String.fromCodePoint.apply(String, status[2]); break; case "deviation": if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) { processed += String.fromCodePoint.apply(String, status[2]); } else { processed += String.fromCodePoint(codePoint); } break; case "valid": processed += String.fromCodePoint(codePoint); break; case "disallowed_STD3_mapped": if (useSTD3) { hasError = true; processed += String.fromCodePoint(codePoint); } else { processed += String.fromCodePoint.apply(String, status[2]); } break; case "disallowed_STD3_valid": if (useSTD3) { hasError = true; } processed += String.fromCodePoint(codePoint); break; } } return { string: processed, error: hasError }; } var combiningMarksRegex = /[\u0300-\u036F\u0483-\u0489\u0591-\u05BD\u05BF\u05C1\u05C2\u05C4\u05C5\u05C7\u0610-\u061A\u064B-\u065F\u0670\u06D6-\u06DC\u06DF-\u06E4\u06E7\u06E8\u06EA-\u06ED\u0711\u0730-\u074A\u07A6-\u07B0\u07EB-\u07F3\u0816-\u0819\u081B-\u0823\u0825-\u0827\u0829-\u082D\u0859-\u085B\u08E4-\u0903\u093A-\u093C\u093E-\u094F\u0951-\u0957\u0962\u0963\u0981-\u0983\u09BC\u09BE-\u09C4\u09C7\u09C8\u09CB-\u09CD\u09D7\u09E2\u09E3\u0A01-\u0A03\u0A3C\u0A3E-\u0A42\u0A47\u0A48\u0A4B-\u0A4D\u0A51\u0A70\u0A71\u0A75\u0A81-\u0A83\u0ABC\u0ABE-\u0AC5\u0AC7-\u0AC9\u0ACB-\u0ACD\u0AE2\u0AE3\u0B01-\u0B03\u0B3C\u0B3E-\u0B44\u0B47\u0B48\u0B4B-\u0B4D\u0B56\u0B57\u0B62\u0B63\u0B82\u0BBE-\u0BC2\u0BC6-\u0BC8\u0BCA-\u0BCD\u0BD7\u0C00-\u0C03\u0C3E-\u0C44\u0C46-\u0C48\u0C4A-\u0C4D\u0C55\u0C56\u0C62\u0C63\u0C81-\u0C83\u0CBC\u0CBE-\u0CC4\u0CC6-\u0CC8\u0CCA-\u0CCD\u0CD5\u0CD6\u0CE2\u0CE3\u0D01-\u0D03\u0D3E-\u0D44\u0D46-\u0D48\u0D4A-\u0D4D\u0D57\u0D62\u0D63\u0D82\u0D83\u0DCA\u0DCF-\u0DD4\u0DD6\u0DD8-\u0DDF\u0DF2\u0DF3\u0E31\u0E34-\u0E3A\u0E47-\u0E4E\u0EB1\u0EB4-\u0EB9\u0EBB\u0EBC\u0EC8-\u0ECD\u0F18\u0F19\u0F35\u0F37\u0F39\u0F3E\u0F3F\u0F71-\u0F84\u0F86\u0F87\u0F8D-\u0F97\u0F99-\u0FBC\u0FC6\u102B-\u103E\u1056-\u1059\u105E-\u1060\u1062-\u1064\u1067-\u106D\u1071-\u1074\u1082-\u108D\u108F\u109A-\u109D\u135D-\u135F\u1712-\u1714\u1732-\u1734\u1752\u1753\u1772\u1773\u17B4-\u17D3\u17DD\u180B-\u180D\u18A9\u1920-\u192B\u1930-\u193B\u19B0-\u19C0\u19C8\u19C9\u1A17-\u1A1B\u1A55-\u1A5E\u1A60-\u1A7C\u1A7F\u1AB0-\u1ABE\u1B00-\u1B04\u1B34-\u1B44\u1B6B-\u1B73\u1B80-\u1B82\u1BA1-\u1BAD\u1BE6-\u1BF3\u1C24-\u1C37\u1CD0-\u1CD2\u1CD4-\u1CE8\u1CED\u1CF2-\u1CF4\u1CF8\u1CF9\u1DC0-\u1DF5\u1DFC-\u1DFF\u20D0-\u20F0\u2CEF-\u2CF1\u2D7F\u2DE0-\u2DFF\u302A-\u302F\u3099\u309A\uA66F-\uA672\uA674-\uA67D\uA69F\uA6F0\uA6F1\uA802\uA806\uA80B\uA823-\uA827\uA880\uA881\uA8B4-\uA8C4\uA8E0-\uA8F1\uA926-\uA92D\uA947-\uA953\uA980-\uA983\uA9B3-\uA9C0\uA9E5\uAA29-\uAA36\uAA43\uAA4C\uAA4D\uAA7B-\uAA7D\uAAB0\uAAB2-\uAAB4\uAAB7\uAAB8\uAABE\uAABF\uAAC1\uAAEB-\uAAEF\uAAF5\uAAF6\uABE3-\uABEA\uABEC\uABED\uFB1E\uFE00-\uFE0F\uFE20-\uFE2D]|\uD800[\uDDFD\uDEE0\uDF76-\uDF7A]|\uD802[\uDE01-\uDE03\uDE05\uDE06\uDE0C-\uDE0F\uDE38-\uDE3A\uDE3F\uDEE5\uDEE6]|\uD804[\uDC00-\uDC02\uDC38-\uDC46\uDC7F-\uDC82\uDCB0-\uDCBA\uDD00-\uDD02\uDD27-\uDD34\uDD73\uDD80-\uDD82\uDDB3-\uDDC0\uDE2C-\uDE37\uDEDF-\uDEEA\uDF01-\uDF03\uDF3C\uDF3E-\uDF44\uDF47\uDF48\uDF4B-\uDF4D\uDF57\uDF62\uDF63\uDF66-\uDF6C\uDF70-\uDF74]|\uD805[\uDCB0-\uDCC3\uDDAF-\uDDB5\uDDB8-\uDDC0\uDE30-\uDE40\uDEAB-\uDEB7]|\uD81A[\uDEF0-\uDEF4\uDF30-\uDF36]|\uD81B[\uDF51-\uDF7E\uDF8F-\uDF92]|\uD82F[\uDC9D\uDC9E]|\uD834[\uDD65-\uDD69\uDD6D-\uDD72\uDD7B-\uDD82\uDD85-\uDD8B\uDDAA-\uDDAD\uDE42-\uDE44]|\uD83A[\uDCD0-\uDCD6]|\uDB40[\uDD00-\uDDEF]/; function validateLabel(label, processing_option) { if (label.substr(0, 4) === "xn--") { label = punycode.toUnicode(label); processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL; } var error = false; if (normalize(label) !== label || (label[3] === "-" && label[4] === "-") || label[0] === "-" || label[label.length - 1] === "-" || label.indexOf(".") !== -1 || label.search(combiningMarksRegex) === 0) { error = true; } var len = countSymbols(label); for (var i = 0; i < len; ++i) { var status = findStatus(label.codePointAt(i)); if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== "valid") || (processing === PROCESSING_OPTIONS.NONTRANSITIONAL && status[1] !== "valid" && status[1] !== "deviation")) { error = true; break; } } return { label: label, error: error }; } function processing(domain_name, useSTD3, processing_option) { var result = mapChars(domain_name, useSTD3, processing_option); result.string = normalize(result.string); var labels = result.string.split("."); for (var i = 0; i < labels.length; ++i) { try { var validation = validateLabel(labels[i]); labels[i] = validation.label; result.error = result.error || validation.error; } catch(e) { result.error = true; } } return { string: labels.join("."), error: result.error }; } module.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) { var result = processing(domain_name, useSTD3, processing_option); var labels = result.string.split("."); labels = labels.map(function(l) { try { return punycode.toASCII(l); } catch(e) { result.error = true; return l; } }); if (verifyDnsLength) { var total = labels.slice(0, labels.length - 1).join(".").length; if (total.length > 253 || total.length === 0) { result.error = true; } for (var i=0; i < labels.length; ++i) { if (labels.length > 63 || labels.length === 0) { result.error = true; break; } } } if (result.error) return null; return labels.join("."); }; module.exports.toUnicode = function(domain_name, useSTD3) { var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL); return { domain: result.string, error: result.error }; }; module.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS; /***/ }), /***/ 61860: /***/ ((module) => { /****************************************************************************** Copyright (c) Microsoft Corporation. Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted. THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. ***************************************************************************** */ /* global global, define, Symbol, Reflect, Promise, SuppressedError, Iterator */ var __extends; var __assign; var __rest; var __decorate; var __param; var __esDecorate; var __runInitializers; var __propKey; var __setFunctionName; var __metadata; var __awaiter; var __generator; var __exportStar; var __values; var __read; var __spread; var __spreadArrays; var __spreadArray; var __await; var __asyncGenerator; var __asyncDelegator; var __asyncValues; var __makeTemplateObject; var __importStar; var __importDefault; var __classPrivateFieldGet; var __classPrivateFieldSet; var __classPrivateFieldIn; var __createBinding; var __addDisposableResource; var __disposeResources; var __rewriteRelativeImportExtension; (function (factory) { var root = typeof global === "object" ? global : typeof self === "object" ? self : typeof this === "object" ? this : {}; if (typeof define === "function" && define.amd) { define("tslib", ["exports"], function (exports) { factory(createExporter(root, createExporter(exports))); }); } else if ( true && typeof module.exports === "object") { factory(createExporter(root, createExporter(module.exports))); } else { factory(createExporter(root)); } function createExporter(exports, previous) { if (exports !== root) { if (typeof Object.create === "function") { Object.defineProperty(exports, "__esModule", { value: true }); } else { exports.__esModule = true; } } return function (id, v) { return exports[id] = previous ? previous(id, v) : v; }; } }) (function (exporter) { var extendStatics = Object.setPrototypeOf || ({ __proto__: [] } instanceof Array && function (d, b) { d.__proto__ = b; }) || function (d, b) { for (var p in b) if (Object.prototype.hasOwnProperty.call(b, p)) d[p] = b[p]; }; __extends = function (d, b) { if (typeof b !== "function" && b !== null) throw new TypeError("Class extends value " + String(b) + " is not a constructor or null"); extendStatics(d, b); function __() { this.constructor = d; } d.prototype = b === null ? Object.create(b) : (__.prototype = b.prototype, new __()); }; __assign = Object.assign || function (t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; __rest = function (s, e) { var t = {}; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p]; if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]]; } return t; }; __decorate = function (decorators, target, key, desc) { var c = arguments.length, r = c < 3 ? target : desc === null ? desc = Object.getOwnPropertyDescriptor(target, key) : desc, d; if (typeof Reflect === "object" && typeof Reflect.decorate === "function") r = Reflect.decorate(decorators, target, key, desc); else for (var i = decorators.length - 1; i >= 0; i--) if (d = decorators[i]) r = (c < 3 ? d(r) : c > 3 ? d(target, key, r) : d(target, key)) || r; return c > 3 && r && Object.defineProperty(target, key, r), r; }; __param = function (paramIndex, decorator) { return function (target, key) { decorator(target, key, paramIndex); } }; __esDecorate = function (ctor, descriptorIn, decorators, contextIn, initializers, extraInitializers) { function accept(f) { if (f !== void 0 && typeof f !== "function") throw new TypeError("Function expected"); return f; } var kind = contextIn.kind, key = kind === "getter" ? "get" : kind === "setter" ? "set" : "value"; var target = !descriptorIn && ctor ? contextIn["static"] ? ctor : ctor.prototype : null; var descriptor = descriptorIn || (target ? Object.getOwnPropertyDescriptor(target, contextIn.name) : {}); var _, done = false; for (var i = decorators.length - 1; i >= 0; i--) { var context = {}; for (var p in contextIn) context[p] = p === "access" ? {} : contextIn[p]; for (var p in contextIn.access) context.access[p] = contextIn.access[p]; context.addInitializer = function (f) { if (done) throw new TypeError("Cannot add initializers after decoration has completed"); extraInitializers.push(accept(f || null)); }; var result = (0, decorators[i])(kind === "accessor" ? { get: descriptor.get, set: descriptor.set } : descriptor[key], context); if (kind === "accessor") { if (result === void 0) continue; if (result === null || typeof result !== "object") throw new TypeError("Object expected"); if (_ = accept(result.get)) descriptor.get = _; if (_ = accept(result.set)) descriptor.set = _; if (_ = accept(result.init)) initializers.unshift(_); } else if (_ = accept(result)) { if (kind === "field") initializers.unshift(_); else descriptor[key] = _; } } if (target) Object.defineProperty(target, contextIn.name, descriptor); done = true; }; __runInitializers = function (thisArg, initializers, value) { var useValue = arguments.length > 2; for (var i = 0; i < initializers.length; i++) { value = useValue ? initializers[i].call(thisArg, value) : initializers[i].call(thisArg); } return useValue ? value : void 0; }; __propKey = function (x) { return typeof x === "symbol" ? x : "".concat(x); }; __setFunctionName = function (f, name, prefix) { if (typeof name === "symbol") name = name.description ? "[".concat(name.description, "]") : ""; return Object.defineProperty(f, "name", { configurable: true, value: prefix ? "".concat(prefix, " ", name) : name }); }; __metadata = function (metadataKey, metadataValue) { if (typeof Reflect === "object" && typeof Reflect.metadata === "function") return Reflect.metadata(metadataKey, metadataValue); }; __awaiter = function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; __generator = function (thisArg, body) { var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g = Object.create((typeof Iterator === "function" ? Iterator : Object).prototype); return g.next = verb(0), g["throw"] = verb(1), g["return"] = verb(2), typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g; function verb(n) { return function (v) { return step([n, v]); }; } function step(op) { if (f) throw new TypeError("Generator is already executing."); while (g && (g = 0, op[0] && (_ = 0)), _) try { if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t; if (y = 0, t) op = [op[0] & 2, t.value]; switch (op[0]) { case 0: case 1: t = op; break; case 4: _.label++; return { value: op[1], done: false }; case 5: _.label++; y = op[1]; op = [0]; continue; case 7: op = _.ops.pop(); _.trys.pop(); continue; default: if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; } if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; } if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; } if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; } if (t[2]) _.ops.pop(); _.trys.pop(); continue; } op = body.call(thisArg, _); } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; } if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true }; } }; __exportStar = function(m, o) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(o, p)) __createBinding(o, m, p); }; __createBinding = Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; }); __values = function (o) { var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0; if (m) return m.call(o); if (o && typeof o.length === "number") return { next: function () { if (o && i >= o.length) o = void 0; return { value: o && o[i++], done: !o }; } }; throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined."); }; __read = function (o, n) { var m = typeof Symbol === "function" && o[Symbol.iterator]; if (!m) return o; var i = m.call(o), r, ar = [], e; try { while ((n === void 0 || n-- > 0) && !(r = i.next()).done) ar.push(r.value); } catch (error) { e = { error: error }; } finally { try { if (r && !r.done && (m = i["return"])) m.call(i); } finally { if (e) throw e.error; } } return ar; }; /** @deprecated */ __spread = function () { for (var ar = [], i = 0; i < arguments.length; i++) ar = ar.concat(__read(arguments[i])); return ar; }; /** @deprecated */ __spreadArrays = function () { for (var s = 0, i = 0, il = arguments.length; i < il; i++) s += arguments[i].length; for (var r = Array(s), k = 0, i = 0; i < il; i++) for (var a = arguments[i], j = 0, jl = a.length; j < jl; j++, k++) r[k] = a[j]; return r; }; __spreadArray = function (to, from, pack) { if (pack || arguments.length === 2) for (var i = 0, l = from.length, ar; i < l; i++) { if (ar || !(i in from)) { if (!ar) ar = Array.prototype.slice.call(from, 0, i); ar[i] = from[i]; } } return to.concat(ar || Array.prototype.slice.call(from)); }; __await = function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); }; __asyncGenerator = function (thisArg, _arguments, generator) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var g = generator.apply(thisArg, _arguments || []), i, q = []; return i = Object.create((typeof AsyncIterator === "function" ? AsyncIterator : Object).prototype), verb("next"), verb("throw"), verb("return", awaitReturn), i[Symbol.asyncIterator] = function () { return this; }, i; function awaitReturn(f) { return function (v) { return Promise.resolve(v).then(f, reject); }; } function verb(n, f) { if (g[n]) { i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; if (f) i[n] = f(i[n]); } } function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } function fulfill(value) { resume("next", value); } function reject(value) { resume("throw", value); } function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } }; __asyncDelegator = function (o) { var i, p; return i = {}, verb("next"), verb("throw", function (e) { throw e; }), verb("return"), i[Symbol.iterator] = function () { return this; }, i; function verb(n, f) { i[n] = o[n] ? function (v) { return (p = !p) ? { value: __await(o[n](v)), done: false } : f ? f(v) : v; } : f; } }; __asyncValues = function (o) { if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); var m = o[Symbol.asyncIterator], i; return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } }; __makeTemplateObject = function (cooked, raw) { if (Object.defineProperty) { Object.defineProperty(cooked, "raw", { value: raw }); } else { cooked.raw = raw; } return cooked; }; var __setModuleDefault = Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }; var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; __importStar = function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; __importDefault = function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; __classPrivateFieldGet = function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; __classPrivateFieldSet = function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; __classPrivateFieldIn = function (state, receiver) { if (receiver === null || (typeof receiver !== "object" && typeof receiver !== "function")) throw new TypeError("Cannot use 'in' operator on non-object"); return typeof state === "function" ? receiver === state : state.has(receiver); }; __addDisposableResource = function (env, value, async) { if (value !== null && value !== void 0) { if (typeof value !== "object" && typeof value !== "function") throw new TypeError("Object expected."); var dispose, inner; if (async) { if (!Symbol.asyncDispose) throw new TypeError("Symbol.asyncDispose is not defined."); dispose = value[Symbol.asyncDispose]; } if (dispose === void 0) { if (!Symbol.dispose) throw new TypeError("Symbol.dispose is not defined."); dispose = value[Symbol.dispose]; if (async) inner = dispose; } if (typeof dispose !== "function") throw new TypeError("Object not disposable."); if (inner) dispose = function() { try { inner.call(this); } catch (e) { return Promise.reject(e); } }; env.stack.push({ value: value, dispose: dispose, async: async }); } else if (async) { env.stack.push({ async: true }); } return value; }; var _SuppressedError = typeof SuppressedError === "function" ? SuppressedError : function (error, suppressed, message) { var e = new Error(message); return e.name = "SuppressedError", e.error = error, e.suppressed = suppressed, e; }; __disposeResources = function (env) { function fail(e) { env.error = env.hasError ? new _SuppressedError(e, env.error, "An error was suppressed during disposal.") : e; env.hasError = true; } var r, s = 0; function next() { while (r = env.stack.pop()) { try { if (!r.async && s === 1) return s = 0, env.stack.push(r), Promise.resolve().then(next); if (r.dispose) { var result = r.dispose.call(r.value); if (r.async) return s |= 2, Promise.resolve(result).then(next, function(e) { fail(e); return next(); }); } else s |= 1; } catch (e) { fail(e); } } if (s === 1) return env.hasError ? Promise.reject(env.error) : Promise.resolve(); if (env.hasError) throw env.error; } return next(); }; __rewriteRelativeImportExtension = function (path, preserveJsx) { if (typeof path === "string" && /^\.\.?\//.test(path)) { return path.replace(/\.(tsx)$|((?:\.d)?)((?:\.[^./]+?)?)\.([cm]?)ts$/i, function (m, tsx, d, ext, cm) { return tsx ? preserveJsx ? ".jsx" : ".js" : d && (!ext || !cm) ? m : (d + ext + "." + cm.toLowerCase() + "js"); }); } return path; }; exporter("__extends", __extends); exporter("__assign", __assign); exporter("__rest", __rest); exporter("__decorate", __decorate); exporter("__param", __param); exporter("__esDecorate", __esDecorate); exporter("__runInitializers", __runInitializers); exporter("__propKey", __propKey); exporter("__setFunctionName", __setFunctionName); exporter("__metadata", __metadata); exporter("__awaiter", __awaiter); exporter("__generator", __generator); exporter("__exportStar", __exportStar); exporter("__createBinding", __createBinding); exporter("__values", __values); exporter("__read", __read); exporter("__spread", __spread); exporter("__spreadArrays", __spreadArrays); exporter("__spreadArray", __spreadArray); exporter("__await", __await); exporter("__asyncGenerator", __asyncGenerator); exporter("__asyncDelegator", __asyncDelegator); exporter("__asyncValues", __asyncValues); exporter("__makeTemplateObject", __makeTemplateObject); exporter("__importStar", __importStar); exporter("__importDefault", __importDefault); exporter("__classPrivateFieldGet", __classPrivateFieldGet); exporter("__classPrivateFieldSet", __classPrivateFieldSet); exporter("__classPrivateFieldIn", __classPrivateFieldIn); exporter("__addDisposableResource", __addDisposableResource); exporter("__disposeResources", __disposeResources); exporter("__rewriteRelativeImportExtension", __rewriteRelativeImportExtension); }); 0 && (0); /***/ }), /***/ 20770: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { module.exports = __nccwpck_require__(20218); /***/ }), /***/ 20218: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; var net = __nccwpck_require__(69278); var tls = __nccwpck_require__(64756); var http = __nccwpck_require__(58611); var https = __nccwpck_require__(65692); var events = __nccwpck_require__(24434); var assert = __nccwpck_require__(42613); var util = __nccwpck_require__(39023); exports.httpOverHttp = httpOverHttp; exports.httpsOverHttp = httpsOverHttp; exports.httpOverHttps = httpOverHttps; exports.httpsOverHttps = httpsOverHttps; function httpOverHttp(options) { var agent = new TunnelingAgent(options); agent.request = http.request; return agent; } function httpsOverHttp(options) { var agent = new TunnelingAgent(options); agent.request = http.request; agent.createSocket = createSecureSocket; agent.defaultPort = 443; return agent; } function httpOverHttps(options) { var agent = new TunnelingAgent(options); agent.request = https.request; return agent; } function httpsOverHttps(options) { var agent = new TunnelingAgent(options); agent.request = https.request; agent.createSocket = createSecureSocket; agent.defaultPort = 443; return agent; } function TunnelingAgent(options) { var self = this; self.options = options || {}; self.proxyOptions = self.options.proxy || {}; self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; self.requests = []; self.sockets = []; self.on('free', function onFree(socket, host, port, localAddress) { var options = toOptions(host, port, localAddress); for (var i = 0, len = self.requests.length; i < len; ++i) { var pending = self.requests[i]; if (pending.host === options.host && pending.port === options.port) { // Detect the request to connect same origin server, // reuse the connection. self.requests.splice(i, 1); pending.request.onSocket(socket); return; } } socket.destroy(); self.removeSocket(socket); }); } util.inherits(TunnelingAgent, events.EventEmitter); TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { var self = this; var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); if (self.sockets.length >= this.maxSockets) { // We are over limit so we'll add it to the queue. self.requests.push(options); return; } // If we are under maxSockets create a new one. self.createSocket(options, function(socket) { socket.on('free', onFree); socket.on('close', onCloseOrRemove); socket.on('agentRemove', onCloseOrRemove); req.onSocket(socket); function onFree() { self.emit('free', socket, options); } function onCloseOrRemove(err) { self.removeSocket(socket); socket.removeListener('free', onFree); socket.removeListener('close', onCloseOrRemove); socket.removeListener('agentRemove', onCloseOrRemove); } }); }; TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { var self = this; var placeholder = {}; self.sockets.push(placeholder); var connectOptions = mergeOptions({}, self.proxyOptions, { method: 'CONNECT', path: options.host + ':' + options.port, agent: false, headers: { host: options.host + ':' + options.port } }); if (options.localAddress) { connectOptions.localAddress = options.localAddress; } if (connectOptions.proxyAuth) { connectOptions.headers = connectOptions.headers || {}; connectOptions.headers['Proxy-Authorization'] = 'Basic ' + new Buffer(connectOptions.proxyAuth).toString('base64'); } debug('making CONNECT request'); var connectReq = self.request(connectOptions); connectReq.useChunkedEncodingByDefault = false; // for v0.6 connectReq.once('response', onResponse); // for v0.6 connectReq.once('upgrade', onUpgrade); // for v0.6 connectReq.once('connect', onConnect); // for v0.7 or later connectReq.once('error', onError); connectReq.end(); function onResponse(res) { // Very hacky. This is necessary to avoid http-parser leaks. res.upgrade = true; } function onUpgrade(res, socket, head) { // Hacky. process.nextTick(function() { onConnect(res, socket, head); }); } function onConnect(res, socket, head) { connectReq.removeAllListeners(); socket.removeAllListeners(); if (res.statusCode !== 200) { debug('tunneling socket could not be established, statusCode=%d', res.statusCode); socket.destroy(); var error = new Error('tunneling socket could not be established, ' + 'statusCode=' + res.statusCode); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); return; } if (head.length > 0) { debug('got illegal response body from proxy'); socket.destroy(); var error = new Error('got illegal response body from proxy'); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); return; } debug('tunneling connection has established'); self.sockets[self.sockets.indexOf(placeholder)] = socket; return cb(socket); } function onError(cause) { connectReq.removeAllListeners(); debug('tunneling socket could not be established, cause=%s\n', cause.message, cause.stack); var error = new Error('tunneling socket could not be established, ' + 'cause=' + cause.message); error.code = 'ECONNRESET'; options.request.emit('error', error); self.removeSocket(placeholder); } }; TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { var pos = this.sockets.indexOf(socket) if (pos === -1) { return; } this.sockets.splice(pos, 1); var pending = this.requests.shift(); if (pending) { // If we have pending requests and a socket gets closed a new one // needs to be created to take over in the pool for the one that closed. this.createSocket(pending, function(socket) { pending.request.onSocket(socket); }); } }; function createSecureSocket(options, cb) { var self = this; TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { var hostHeader = options.request.getHeader('host'); var tlsOptions = mergeOptions({}, self.options, { socket: socket, servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host }); // 0 is dummy port for v0.6 var secureSocket = tls.connect(0, tlsOptions); self.sockets[self.sockets.indexOf(socket)] = secureSocket; cb(secureSocket); }); } function toOptions(host, port, localAddress) { if (typeof host === 'string') { // since v0.10 return { host: host, port: port, localAddress: localAddress }; } return host; // for v0.11 or later } function mergeOptions(target) { for (var i = 1, len = arguments.length; i < len; ++i) { var overrides = arguments[i]; if (typeof overrides === 'object') { var keys = Object.keys(overrides); for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { var k = keys[j]; if (overrides[k] !== undefined) { target[k] = overrides[k]; } } } } return target; } var debug; if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { debug = function() { var args = Array.prototype.slice.call(arguments); if (typeof args[0] === 'string') { args[0] = 'TUNNEL: ' + args[0]; } else { args.unshift('TUNNEL:'); } console.error.apply(console, args); } } else { debug = function() {}; } exports.debug = debug; // for test /***/ }), /***/ 45497: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); /***/ }), /***/ 13315: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isValidErrorCode = exports.httpStatusFromErrorCode = exports.TwirpErrorCode = exports.BadRouteError = exports.InternalServerErrorWith = exports.InternalServerError = exports.RequiredArgumentError = exports.InvalidArgumentError = exports.NotFoundError = exports.TwirpError = void 0; /** * Represents a twirp error */ class TwirpError extends Error { constructor(code, msg) { super(msg); this.code = TwirpErrorCode.Internal; this.meta = {}; this.code = code; this.msg = msg; Object.setPrototypeOf(this, TwirpError.prototype); } /** * Adds a metadata kv to the error * @param key * @param value */ withMeta(key, value) { this.meta[key] = value; return this; } /** * Returns a single metadata value * return "" if not found * @param key */ getMeta(key) { return this.meta[key] || ""; } /** * Add the original error cause * @param err * @param addMeta */ withCause(err, addMeta = false) { this._originalCause = err; if (addMeta) { this.withMeta("cause", err.message); } return this; } cause() { return this._originalCause; } /** * Returns the error representation to JSON */ toJSON() { try { return JSON.stringify({ code: this.code, msg: this.msg, meta: this.meta, }); } catch (e) { return `{"code": "internal", "msg": "There was an error but it could not be serialized into JSON"}`; } } /** * Create a twirp error from an object * @param obj */ static fromObject(obj) { const code = obj["code"] || TwirpErrorCode.Unknown; const msg = obj["msg"] || "unknown"; const error = new TwirpError(code, msg); if (obj["meta"]) { Object.keys(obj["meta"]).forEach((key) => { error.withMeta(key, obj["meta"][key]); }); } return error; } } exports.TwirpError = TwirpError; /** * NotFoundError constructor for the common NotFound error. */ class NotFoundError extends TwirpError { constructor(msg) { super(TwirpErrorCode.NotFound, msg); } } exports.NotFoundError = NotFoundError; /** * InvalidArgumentError constructor for the common InvalidArgument error. Can be * used when an argument has invalid format, is a number out of range, is a bad * option, etc). */ class InvalidArgumentError extends TwirpError { constructor(argument, validationMsg) { super(TwirpErrorCode.InvalidArgument, argument + " " + validationMsg); this.withMeta("argument", argument); } } exports.InvalidArgumentError = InvalidArgumentError; /** * RequiredArgumentError is a more specific constructor for InvalidArgument * error. Should be used when the argument is required (expected to have a * non-zero value). */ class RequiredArgumentError extends InvalidArgumentError { constructor(argument) { super(argument, "is required"); } } exports.RequiredArgumentError = RequiredArgumentError; /** * InternalError constructor for the common Internal error. Should be used to * specify that something bad or unexpected happened. */ class InternalServerError extends TwirpError { constructor(msg) { super(TwirpErrorCode.Internal, msg); } } exports.InternalServerError = InternalServerError; /** * InternalErrorWith makes an internal error, wrapping the original error and using it * for the error message, and with metadata "cause" with the original error type. * This function is used by Twirp services to wrap non-Twirp errors as internal errors. * The wrapped error can be extracted later with err.cause() */ class InternalServerErrorWith extends InternalServerError { constructor(err) { super(err.message); this.withMeta("cause", err.name); this.withCause(err); } } exports.InternalServerErrorWith = InternalServerErrorWith; /** * A standard BadRoute Error */ class BadRouteError extends TwirpError { constructor(msg, method, url) { super(TwirpErrorCode.BadRoute, msg); this.withMeta("twirp_invalid_route", method + " " + url); } } exports.BadRouteError = BadRouteError; var TwirpErrorCode; (function (TwirpErrorCode) { // Canceled indicates the operation was cancelled (typically by the caller). TwirpErrorCode["Canceled"] = "canceled"; // Unknown error. For example when handling errors raised by APIs that do not // return enough error information. TwirpErrorCode["Unknown"] = "unknown"; // InvalidArgument indicates client specified an invalid argument. It // indicates arguments that are problematic regardless of the state of the // system (i.e. a malformed file name, required argument, number out of range, // etc.). TwirpErrorCode["InvalidArgument"] = "invalid_argument"; // Malformed indicates an error occurred while decoding the client's request. // This may mean that the message was encoded improperly, or that there is a // disagreement in message format between the client and server. TwirpErrorCode["Malformed"] = "malformed"; // DeadlineExceeded means operation expired before completion. For operations // that change the state of the system, this error may be returned even if the // operation has completed successfully (timeout). TwirpErrorCode["DeadlineExceeded"] = "deadline_exceeded"; // NotFound means some requested entity was not found. TwirpErrorCode["NotFound"] = "not_found"; // BadRoute means that the requested URL path wasn't routable to a Twirp // service and method. This is returned by the generated server, and usually // shouldn't be returned by applications. Instead, applications should use // NotFound or Unimplemented. TwirpErrorCode["BadRoute"] = "bad_route"; // AlreadyExists means an attempt to create an entity failed because one // already exists. TwirpErrorCode["AlreadyExists"] = "already_exists"; // PermissionDenied indicates the caller does not have permission to execute // the specified operation. It must not be used if the caller cannot be // identified (Unauthenticated). TwirpErrorCode["PermissionDenied"] = "permission_denied"; // Unauthenticated indicates the request does not have valid authentication // credentials for the operation. TwirpErrorCode["Unauthenticated"] = "unauthenticated"; // ResourceExhausted indicates some resource has been exhausted, perhaps a // per-user quota, or perhaps the entire file system is out of space. TwirpErrorCode["ResourceExhausted"] = "resource_exhausted"; // FailedPrecondition indicates operation was rejected because the system is // not in a state required for the operation's execution. For example, doing // an rmdir operation on a directory that is non-empty, or on a non-directory // object, or when having conflicting read-modify-write on the same resource. TwirpErrorCode["FailedPrecondition"] = "failed_precondition"; // Aborted indicates the operation was aborted, typically due to a concurrency // issue like sequencer check failures, transaction aborts, etc. TwirpErrorCode["Aborted"] = "aborted"; // OutOfRange means operation was attempted past the valid range. For example, // seeking or reading past end of a paginated collection. // // Unlike InvalidArgument, this error indicates a problem that may be fixed if // the system state changes (i.e. adding more items to the collection). // // There is a fair bit of overlap between FailedPrecondition and OutOfRange. // We recommend using OutOfRange (the more specific error) when it applies so // that callers who are iterating through a space can easily look for an // OutOfRange error to detect when they are done. TwirpErrorCode["OutOfRange"] = "out_of_range"; // Unimplemented indicates operation is not implemented or not // supported/enabled in this service. TwirpErrorCode["Unimplemented"] = "unimplemented"; // Internal errors. When some invariants expected by the underlying system // have been broken. In other words, something bad happened in the library or // backend service. Do not confuse with HTTP Internal Server Error; an // Internal error could also happen on the client code, i.e. when parsing a // server response. TwirpErrorCode["Internal"] = "internal"; // Unavailable indicates the service is currently unavailable. This is a most // likely a transient condition and may be corrected by retrying with a // backoff. TwirpErrorCode["Unavailable"] = "unavailable"; // DataLoss indicates unrecoverable data loss or corruption. TwirpErrorCode["DataLoss"] = "data_loss"; })(TwirpErrorCode = exports.TwirpErrorCode || (exports.TwirpErrorCode = {})); // ServerHTTPStatusFromErrorCode maps a Twirp error type into a similar HTTP // response status. It is used by the Twirp server handler to set the HTTP // response status code. Returns 0 if the ErrorCode is invalid. function httpStatusFromErrorCode(code) { switch (code) { case TwirpErrorCode.Canceled: return 408; // RequestTimeout case TwirpErrorCode.Unknown: return 500; // Internal Server Error case TwirpErrorCode.InvalidArgument: return 400; // BadRequest case TwirpErrorCode.Malformed: return 400; // BadRequest case TwirpErrorCode.DeadlineExceeded: return 408; // RequestTimeout case TwirpErrorCode.NotFound: return 404; // Not Found case TwirpErrorCode.BadRoute: return 404; // Not Found case TwirpErrorCode.AlreadyExists: return 409; // Conflict case TwirpErrorCode.PermissionDenied: return 403; // Forbidden case TwirpErrorCode.Unauthenticated: return 401; // Unauthorized case TwirpErrorCode.ResourceExhausted: return 429; // Too Many Requests case TwirpErrorCode.FailedPrecondition: return 412; // Precondition Failed case TwirpErrorCode.Aborted: return 409; // Conflict case TwirpErrorCode.OutOfRange: return 400; // Bad Request case TwirpErrorCode.Unimplemented: return 501; // Not Implemented case TwirpErrorCode.Internal: return 500; // Internal Server Error case TwirpErrorCode.Unavailable: return 503; // Service Unavailable case TwirpErrorCode.DataLoss: return 500; // Internal Server Error default: return 0; // Invalid! } } exports.httpStatusFromErrorCode = httpStatusFromErrorCode; // IsValidErrorCode returns true if is one of the valid predefined constants. function isValidErrorCode(code) { return httpStatusFromErrorCode(code) != 0; } exports.isValidErrorCode = isValidErrorCode; /***/ }), /***/ 69636: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __rest = (this && this.__rest) || function (s, e) { var t = {}; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p) && e.indexOf(p) < 0) t[p] = s[p]; if (s != null && typeof Object.getOwnPropertySymbols === "function") for (var i = 0, p = Object.getOwnPropertySymbols(s); i < p.length; i++) { if (e.indexOf(p[i]) < 0 && Object.prototype.propertyIsEnumerable.call(s, p[i])) t[p[i]] = s[p[i]]; } return t; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Gateway = exports.Pattern = void 0; const querystring_1 = __nccwpck_require__(83480); const dotObject = __importStar(__nccwpck_require__(95129)); const request_1 = __nccwpck_require__(9647); const errors_1 = __nccwpck_require__(13315); const http_client_1 = __nccwpck_require__(15683); const server_1 = __nccwpck_require__(11035); var Pattern; (function (Pattern) { Pattern["POST"] = "post"; Pattern["GET"] = "get"; Pattern["PATCH"] = "patch"; Pattern["PUT"] = "put"; Pattern["DELETE"] = "delete"; })(Pattern = exports.Pattern || (exports.Pattern = {})); /** * The Gateway proxies http requests to Twirp Compliant * handlers */ class Gateway { constructor(routes) { this.routes = routes; } /** * Middleware that rewrite the current request * to a Twirp compliant request */ twirpRewrite(prefix = "/twirp") { return (req, resp, next) => { this.rewrite(req, resp, prefix) .then(() => next()) .catch((e) => { if (e instanceof errors_1.TwirpError) { if (e.code !== errors_1.TwirpErrorCode.NotFound) { server_1.writeError(resp, e); } else { next(); } } }); }; } /** * Rewrite an incoming request to a Twirp compliant request * @param req * @param resp * @param prefix */ rewrite(req, resp, prefix = "/twirp") { return __awaiter(this, void 0, void 0, function* () { const [match, route] = this.matchRoute(req); const body = yield this.prepareTwirpBody(req, match, route); const twirpUrl = `${prefix}/${route.packageName}.${route.serviceName}/${route.methodName}`; req.url = twirpUrl; req.originalUrl = twirpUrl; req.method = "POST"; req.headers["content-type"] = "application/json"; req.rawBody = Buffer.from(JSON.stringify(body)); if (route.responseBodyKey) { const endFn = resp.end.bind(resp); resp.end = function (chunk) { if (resp.statusCode === 200) { endFn(`{ "${route.responseBodyKey}": ${chunk} }`); } else { endFn(chunk); } }; } }); } /** * Create a reverse proxy handler to * proxy http requests to Twirp Compliant handlers * @param httpClientOption */ reverseProxy(httpClientOption) { const client = http_client_1.NodeHttpRPC(httpClientOption); return (req, res) => __awaiter(this, void 0, void 0, function* () { try { const [match, route] = this.matchRoute(req); const body = yield this.prepareTwirpBody(req, match, route); const response = yield client.request(`${route.packageName}.${route.serviceName}`, route.methodName, "application/json", body); res.statusCode = 200; res.setHeader("content-type", "application/json"); let jsonResponse; if (route.responseBodyKey) { jsonResponse = JSON.stringify({ [route.responseBodyKey]: response }); } else { jsonResponse = JSON.stringify(response); } res.end(jsonResponse); } catch (e) { server_1.writeError(res, e); } }); } /** * Prepares twirp body requests using http.google.annotions * compliant spec * * @param req * @param match * @param route * @protected */ prepareTwirpBody(req, match, route) { return __awaiter(this, void 0, void 0, function* () { const _a = match.params, { query_string } = _a, params = __rest(_a, ["query_string"]); let requestBody = Object.assign({}, params); if (query_string && route.bodyKey !== "*") { const queryParams = this.parseQueryString(query_string); requestBody = Object.assign(Object.assign({}, queryParams), requestBody); } let body = {}; if (route.bodyKey) { const data = yield request_1.getRequestData(req); try { const jsonBody = JSON.parse(data.toString() || "{}"); if (route.bodyKey === "*") { body = jsonBody; } else { body[route.bodyKey] = jsonBody; } } catch (e) { const msg = "the json request could not be decoded"; throw new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, msg).withCause(e, true); } } return Object.assign(Object.assign({}, body), requestBody); }); } /** * Matches a route * @param req */ matchRoute(req) { var _a; const httpMethod = (_a = req.method) === null || _a === void 0 ? void 0 : _a.toLowerCase(); if (!httpMethod) { throw new errors_1.BadRouteError(`method not allowed`, req.method || "", req.url || ""); } const routes = this.routes[httpMethod]; for (const route of routes) { const match = route.matcher(req.url || "/"); if (match) { return [match, route]; } } throw new errors_1.NotFoundError(`url ${req.url} not found`); } /** * Parse query string * @param queryString */ parseQueryString(queryString) { const queryParams = querystring_1.parse(queryString.replace("?", "")); return dotObject.object(queryParams); } } exports.Gateway = Gateway; /***/ }), /***/ 3898: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isHook = exports.chainHooks = void 0; // ChainHooks creates a new ServerHook which chains the callbacks in // each of the constituent hooks passed in. Each hook function will be // called in the order of the ServerHooks values passed in. // // For the erroring hooks, RequestReceived and RequestRouted, any returned // errors prevent processing by later hooks. function chainHooks(...hooks) { if (hooks.length === 0) { return null; } if (hooks.length === 1) { return hooks[0]; } const serverHook = { requestReceived(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.requestReceived) { continue; } yield hook.requestReceived(ctx); } }); }, requestPrepared(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.requestPrepared) { continue; } console.warn("hook requestPrepared is deprecated and will be removed in the next release. " + "Please use responsePrepared instead."); yield hook.requestPrepared(ctx); } }); }, responsePrepared(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.responsePrepared) { continue; } yield hook.responsePrepared(ctx); } }); }, requestSent(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.requestSent) { continue; } console.warn("hook requestSent is deprecated and will be removed in the next release. " + "Please use responseSent instead."); yield hook.requestSent(ctx); } }); }, responseSent(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.responseSent) { continue; } yield hook.responseSent(ctx); } }); }, requestRouted(ctx) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.requestRouted) { continue; } yield hook.requestRouted(ctx); } }); }, error(ctx, err) { return __awaiter(this, void 0, void 0, function* () { for (const hook of hooks) { if (!hook.error) { continue; } yield hook.error(ctx, err); } }); }, }; return serverHook; } exports.chainHooks = chainHooks; function isHook(object) { return ("requestReceived" in object || "requestPrepared" in object || "requestSent" in object || "requestRouted" in object || "responsePrepared" in object || "responseSent" in object || "error" in object); } exports.isHook = isHook; /***/ }), /***/ 15683: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.FetchRPC = exports.wrapErrorResponseToTwirpError = exports.NodeHttpRPC = void 0; const http = __importStar(__nccwpck_require__(58611)); const https = __importStar(__nccwpck_require__(65692)); const url_1 = __nccwpck_require__(87016); const errors_1 = __nccwpck_require__(13315); /** * a node HTTP RPC implementation * @param options * @constructor */ const NodeHttpRPC = (options) => ({ request(service, method, contentType, data) { let client; return new Promise((resolve, rejected) => { const responseChunks = []; const requestData = contentType === "application/protobuf" ? Buffer.from(data) : JSON.stringify(data); const url = new url_1.URL(options.baseUrl); const isHttps = url.protocol === "https:"; if (isHttps) { client = https; } else { client = http; } const prefix = url.pathname !== "/" ? url.pathname : ""; const req = client .request(Object.assign(Object.assign({}, (options ? options : {})), { method: "POST", protocol: url.protocol, host: url.hostname, port: url.port ? url.port : isHttps ? 443 : 80, path: `${prefix}/${service}/${method}`, headers: Object.assign(Object.assign({}, (options.headers ? options.headers : {})), { "Content-Type": contentType, "Content-Length": contentType === "application/protobuf" ? Buffer.byteLength(requestData) : Buffer.from(requestData).byteLength }) }), (res) => { res.on("data", (chunk) => responseChunks.push(chunk)); res.on("end", () => { const data = Buffer.concat(responseChunks); if (res.statusCode != 200) { rejected(wrapErrorResponseToTwirpError(data.toString())); } else { if (contentType === "application/json") { resolve(JSON.parse(data.toString())); } else { resolve(data); } } }); res.on("error", (err) => { rejected(err); }); }) .on("error", (err) => { rejected(err); }); req.end(requestData); }); }, }); exports.NodeHttpRPC = NodeHttpRPC; function wrapErrorResponseToTwirpError(errorResponse) { return errors_1.TwirpError.fromObject(JSON.parse(errorResponse)); } exports.wrapErrorResponseToTwirpError = wrapErrorResponseToTwirpError; /** * a browser fetch RPC implementation */ const FetchRPC = (options) => ({ request(service, method, contentType, data) { return __awaiter(this, void 0, void 0, function* () { const headers = new Headers(options.headers); headers.set("content-type", contentType); const response = yield fetch(`${options.baseUrl}/${service}/${method}`, Object.assign(Object.assign({}, options), { method: "POST", headers, body: data instanceof Uint8Array ? data : JSON.stringify(data) })); if (response.status === 200) { if (contentType === "application/json") { return yield response.json(); } return new Uint8Array(yield response.arrayBuffer()); } throw errors_1.TwirpError.fromObject(yield response.json()); }); }, }); exports.FetchRPC = FetchRPC; /***/ }), /***/ 30430: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TwirpContentType = void 0; __exportStar(__nccwpck_require__(45497), exports); __exportStar(__nccwpck_require__(11035), exports); __exportStar(__nccwpck_require__(84036), exports); __exportStar(__nccwpck_require__(3898), exports); __exportStar(__nccwpck_require__(13315), exports); __exportStar(__nccwpck_require__(69636), exports); __exportStar(__nccwpck_require__(15683), exports); var request_1 = __nccwpck_require__(9647); Object.defineProperty(exports, "TwirpContentType", ({ enumerable: true, get: function () { return request_1.TwirpContentType; } })); /***/ }), /***/ 84036: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.chainInterceptors = void 0; // chains multiple Interceptors into a single Interceptor. // The first interceptor wraps the second one, and so on. // Returns null if interceptors is empty. function chainInterceptors(...interceptors) { if (interceptors.length === 0) { return; } if (interceptors.length === 1) { return interceptors[0]; } const first = interceptors[0]; return (ctx, request, handler) => __awaiter(this, void 0, void 0, function* () { let next = handler; for (let i = interceptors.length - 1; i > 0; i--) { next = ((next) => (ctx, typedRequest) => { return interceptors[i](ctx, typedRequest, next); })(next); } return first(ctx, request, next); }); } exports.chainInterceptors = chainInterceptors; /***/ }), /***/ 9647: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.parseTwirpPath = exports.getRequestData = exports.validateRequest = exports.getContentType = exports.TwirpContentType = void 0; const errors_1 = __nccwpck_require__(13315); /** * Supported Twirp Content-Type */ var TwirpContentType; (function (TwirpContentType) { TwirpContentType[TwirpContentType["Protobuf"] = 0] = "Protobuf"; TwirpContentType[TwirpContentType["JSON"] = 1] = "JSON"; TwirpContentType[TwirpContentType["Unknown"] = 2] = "Unknown"; })(TwirpContentType = exports.TwirpContentType || (exports.TwirpContentType = {})); /** * Get supported content-type * @param mimeType */ function getContentType(mimeType) { switch (mimeType) { case "application/protobuf": return TwirpContentType.Protobuf; case "application/json": return TwirpContentType.JSON; default: return TwirpContentType.Unknown; } } exports.getContentType = getContentType; /** * Validate a twirp request * @param ctx * @param request * @param pathPrefix */ function validateRequest(ctx, request, pathPrefix) { if (request.method !== "POST") { const msg = `unsupported method ${request.method} (only POST is allowed)`; throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } const path = parseTwirpPath(request.url || ""); if (path.pkgService !== (ctx.packageName ? ctx.packageName + "." : "") + ctx.serviceName) { const msg = `no handler for path ${request.url}`; throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } if (path.prefix !== pathPrefix) { const msg = `invalid path prefix ${path.prefix}, expected ${pathPrefix}, on path ${request.url}`; throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } const mimeContentType = request.headers["content-type"] || ""; if (ctx.contentType === TwirpContentType.Unknown) { const msg = `unexpected Content-Type: ${request.headers["content-type"]}`; throw new errors_1.BadRouteError(msg, request.method || "", request.url || ""); } return Object.assign(Object.assign({}, path), { mimeContentType, contentType: ctx.contentType }); } exports.validateRequest = validateRequest; /** * Get request data from the body * @param req */ function getRequestData(req) { return new Promise((resolve, reject) => { const reqWithRawBody = req; if (reqWithRawBody.rawBody instanceof Buffer) { resolve(reqWithRawBody.rawBody); return; } const chunks = []; req.on("data", (chunk) => chunks.push(chunk)); req.on("end", () => __awaiter(this, void 0, void 0, function* () { const data = Buffer.concat(chunks); resolve(data); })); req.on("error", (err) => { if (req.aborted) { reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.DeadlineExceeded, "failed to read request: deadline exceeded")); } else { reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Malformed, err.message).withCause(err)); } }); req.on("close", () => { reject(new errors_1.TwirpError(errors_1.TwirpErrorCode.Canceled, "failed to read request: context canceled")); }); }); } exports.getRequestData = getRequestData; /** * Parses twirp url path * @param path */ function parseTwirpPath(path) { const parts = path.split("/"); if (parts.length < 2) { return { pkgService: "", method: "", prefix: "", }; } return { method: parts[parts.length - 1], pkgService: parts[parts.length - 2], prefix: parts.slice(0, parts.length - 2).join("/"), }; } exports.parseTwirpPath = parseTwirpPath; /***/ }), /***/ 11035: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.writeError = exports.TwirpServer = void 0; const hooks_1 = __nccwpck_require__(3898); const request_1 = __nccwpck_require__(9647); const errors_1 = __nccwpck_require__(13315); /** * Runtime server implementation of a TwirpServer */ class TwirpServer { constructor(options) { this.pathPrefix = "/twirp"; this.hooks = []; this.interceptors = []; this.packageName = options.packageName; this.serviceName = options.serviceName; this.methodList = options.methodList; this.matchRoute = options.matchRoute; this.service = options.service; } /** * Returns the prefix for this server */ get prefix() { return this.pathPrefix; } /** * The http handler for twirp complaint endpoints * @param options */ httpHandler(options) { return (req, resp) => { // setup prefix if ((options === null || options === void 0 ? void 0 : options.prefix) !== undefined) { this.withPrefix(options.prefix); } return this._httpHandler(req, resp); }; } /** * Adds interceptors or hooks to the request stack * @param middlewares */ use(...middlewares) { middlewares.forEach((middleware) => { if (hooks_1.isHook(middleware)) { this.hooks.push(middleware); return this; } this.interceptors.push(middleware); }); return this; } /** * Adds a prefix to the service url path * @param prefix */ withPrefix(prefix) { if (prefix === false) { this.pathPrefix = ""; } else { this.pathPrefix = prefix; } return this; } /** * Returns the regex matching path for this twirp server */ matchingPath() { const baseRegex = this.baseURI().replace(/\./g, "\\."); return new RegExp(`${baseRegex}\/(${this.methodList.join("|")})`); } /** * Returns the base URI for this twirp server */ baseURI() { return `${this.pathPrefix}/${this.packageName ? this.packageName + "." : ""}${this.serviceName}`; } /** * Create a twirp context * @param req * @param res * @private */ createContext(req, res) { return { packageName: this.packageName, serviceName: this.serviceName, methodName: "", contentType: request_1.getContentType(req.headers["content-type"]), req: req, res: res, }; } /** * Twrip server http handler implementation * @param req * @param resp * @private */ _httpHandler(req, resp) { return __awaiter(this, void 0, void 0, function* () { const ctx = this.createContext(req, resp); try { yield this.invokeHook("requestReceived", ctx); const { method, mimeContentType } = request_1.validateRequest(ctx, req, this.pathPrefix || ""); const handler = this.matchRoute(method, { onMatch: (ctx) => { return this.invokeHook("requestRouted", ctx); }, onNotFound: () => { const msg = `no handler for path ${req.url}`; throw new errors_1.BadRouteError(msg, req.method || "", req.url || ""); }, }); const body = yield request_1.getRequestData(req); const response = yield handler(ctx, this.service, body, this.interceptors); yield Promise.all([ this.invokeHook("responsePrepared", ctx), // keep backwards compatibility till next release this.invokeHook("requestPrepared", ctx), ]); resp.statusCode = 200; resp.setHeader("Content-Type", mimeContentType); resp.end(response); } catch (e) { yield this.invokeHook("error", ctx, mustBeTwirpError(e)); if (!resp.headersSent) { writeError(resp, e); } } finally { yield Promise.all([ this.invokeHook("responseSent", ctx), // keep backwards compatibility till next release this.invokeHook("requestSent", ctx), ]); } }); } /** * Invoke a hook * @param hookName * @param ctx * @param err * @protected */ invokeHook(hookName, ctx, err) { return __awaiter(this, void 0, void 0, function* () { if (this.hooks.length === 0) { return; } const chainedHooks = hooks_1.chainHooks(...this.hooks); const hook = chainedHooks === null || chainedHooks === void 0 ? void 0 : chainedHooks[hookName]; if (hook) { yield hook(ctx, err || new errors_1.InternalServerError("internal server error")); } }); } } exports.TwirpServer = TwirpServer; /** * Write http error response * @param res * @param error */ function writeError(res, error) { const twirpError = mustBeTwirpError(error); res.setHeader("Content-Type", "application/json"); res.statusCode = errors_1.httpStatusFromErrorCode(twirpError.code); res.end(twirpError.toJSON()); } exports.writeError = writeError; /** * Make sure that the error passed is a TwirpError * otherwise it will wrap it into an InternalError * @param err */ function mustBeTwirpError(err) { if (err instanceof errors_1.TwirpError) { return err; } return new errors_1.InternalServerErrorWith(err); } /***/ }), /***/ 46752: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Client = __nccwpck_require__(86197) const Dispatcher = __nccwpck_require__(28611) const errors = __nccwpck_require__(68707) const Pool = __nccwpck_require__(35076) const BalancedPool = __nccwpck_require__(81093) const Agent = __nccwpck_require__(59965) const util = __nccwpck_require__(3440) const { InvalidArgumentError } = errors const api = __nccwpck_require__(56615) const buildConnector = __nccwpck_require__(59136) const MockClient = __nccwpck_require__(47365) const MockAgent = __nccwpck_require__(47501) const MockPool = __nccwpck_require__(94004) const mockErrors = __nccwpck_require__(52429) const ProxyAgent = __nccwpck_require__(22720) const RetryHandler = __nccwpck_require__(53573) const { getGlobalDispatcher, setGlobalDispatcher } = __nccwpck_require__(32581) const DecoratorHandler = __nccwpck_require__(78840) const RedirectHandler = __nccwpck_require__(48299) const createRedirectInterceptor = __nccwpck_require__(64415) let hasCrypto try { __nccwpck_require__(76982) hasCrypto = true } catch { hasCrypto = false } Object.assign(Dispatcher.prototype, api) module.exports.Dispatcher = Dispatcher module.exports.Client = Client module.exports.Pool = Pool module.exports.BalancedPool = BalancedPool module.exports.Agent = Agent module.exports.ProxyAgent = ProxyAgent module.exports.RetryHandler = RetryHandler module.exports.DecoratorHandler = DecoratorHandler module.exports.RedirectHandler = RedirectHandler module.exports.createRedirectInterceptor = createRedirectInterceptor module.exports.buildConnector = buildConnector module.exports.errors = errors function makeDispatcher (fn) { return (url, opts, handler) => { if (typeof opts === 'function') { handler = opts opts = null } if (!url || (typeof url !== 'string' && typeof url !== 'object' && !(url instanceof URL))) { throw new InvalidArgumentError('invalid url') } if (opts != null && typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } if (opts && opts.path != null) { if (typeof opts.path !== 'string') { throw new InvalidArgumentError('invalid opts.path') } let path = opts.path if (!opts.path.startsWith('/')) { path = `/${path}` } url = new URL(util.parseOrigin(url).origin + path) } else { if (!opts) { opts = typeof url === 'object' ? url : {} } url = util.parseURL(url) } const { agent, dispatcher = getGlobalDispatcher() } = opts if (agent) { throw new InvalidArgumentError('unsupported opts.agent. Did you mean opts.client?') } return fn.call(dispatcher, { ...opts, origin: url.origin, path: url.search ? `${url.pathname}${url.search}` : url.pathname, method: opts.method || (opts.body ? 'PUT' : 'GET') }, handler) } } module.exports.setGlobalDispatcher = setGlobalDispatcher module.exports.getGlobalDispatcher = getGlobalDispatcher if (util.nodeMajor > 16 || (util.nodeMajor === 16 && util.nodeMinor >= 8)) { let fetchImpl = null module.exports.fetch = async function fetch (resource) { if (!fetchImpl) { fetchImpl = (__nccwpck_require__(12315).fetch) } try { return await fetchImpl(...arguments) } catch (err) { if (typeof err === 'object') { Error.captureStackTrace(err, this) } throw err } } module.exports.Headers = __nccwpck_require__(26349).Headers module.exports.Response = __nccwpck_require__(48676).Response module.exports.Request = __nccwpck_require__(25194).Request module.exports.FormData = __nccwpck_require__(43073).FormData module.exports.File = __nccwpck_require__(63041).File module.exports.FileReader = __nccwpck_require__(82160).FileReader const { setGlobalOrigin, getGlobalOrigin } = __nccwpck_require__(75628) module.exports.setGlobalOrigin = setGlobalOrigin module.exports.getGlobalOrigin = getGlobalOrigin const { CacheStorage } = __nccwpck_require__(44738) const { kConstruct } = __nccwpck_require__(80296) // Cache & CacheStorage are tightly coupled with fetch. Even if it may run // in an older version of Node, it doesn't have any use without fetch. module.exports.caches = new CacheStorage(kConstruct) } if (util.nodeMajor >= 16) { const { deleteCookie, getCookies, getSetCookies, setCookie } = __nccwpck_require__(53168) module.exports.deleteCookie = deleteCookie module.exports.getCookies = getCookies module.exports.getSetCookies = getSetCookies module.exports.setCookie = setCookie const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(94322) module.exports.parseMIMEType = parseMIMEType module.exports.serializeAMimeType = serializeAMimeType } if (util.nodeMajor >= 18 && hasCrypto) { const { WebSocket } = __nccwpck_require__(55171) module.exports.WebSocket = WebSocket } module.exports.request = makeDispatcher(api.request) module.exports.stream = makeDispatcher(api.stream) module.exports.pipeline = makeDispatcher(api.pipeline) module.exports.connect = makeDispatcher(api.connect) module.exports.upgrade = makeDispatcher(api.upgrade) module.exports.MockClient = MockClient module.exports.MockPool = MockPool module.exports.MockAgent = MockAgent module.exports.mockErrors = mockErrors /***/ }), /***/ 59965: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { InvalidArgumentError } = __nccwpck_require__(68707) const { kClients, kRunning, kClose, kDestroy, kDispatch, kInterceptors } = __nccwpck_require__(36443) const DispatcherBase = __nccwpck_require__(50001) const Pool = __nccwpck_require__(35076) const Client = __nccwpck_require__(86197) const util = __nccwpck_require__(3440) const createRedirectInterceptor = __nccwpck_require__(64415) const { WeakRef, FinalizationRegistry } = __nccwpck_require__(13194)() const kOnConnect = Symbol('onConnect') const kOnDisconnect = Symbol('onDisconnect') const kOnConnectionError = Symbol('onConnectionError') const kMaxRedirections = Symbol('maxRedirections') const kOnDrain = Symbol('onDrain') const kFactory = Symbol('factory') const kFinalizer = Symbol('finalizer') const kOptions = Symbol('options') function defaultFactory (origin, opts) { return opts && opts.connections === 1 ? new Client(origin, opts) : new Pool(origin, opts) } class Agent extends DispatcherBase { constructor ({ factory = defaultFactory, maxRedirections = 0, connect, ...options } = {}) { super() if (typeof factory !== 'function') { throw new InvalidArgumentError('factory must be a function.') } if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { throw new InvalidArgumentError('connect must be a function or an object') } if (!Number.isInteger(maxRedirections) || maxRedirections < 0) { throw new InvalidArgumentError('maxRedirections must be a positive number') } if (connect && typeof connect !== 'function') { connect = { ...connect } } this[kInterceptors] = options.interceptors && options.interceptors.Agent && Array.isArray(options.interceptors.Agent) ? options.interceptors.Agent : [createRedirectInterceptor({ maxRedirections })] this[kOptions] = { ...util.deepClone(options), connect } this[kOptions].interceptors = options.interceptors ? { ...options.interceptors } : undefined this[kMaxRedirections] = maxRedirections this[kFactory] = factory this[kClients] = new Map() this[kFinalizer] = new FinalizationRegistry(/* istanbul ignore next: gc is undeterministic */ key => { const ref = this[kClients].get(key) if (ref !== undefined && ref.deref() === undefined) { this[kClients].delete(key) } }) const agent = this this[kOnDrain] = (origin, targets) => { agent.emit('drain', origin, [agent, ...targets]) } this[kOnConnect] = (origin, targets) => { agent.emit('connect', origin, [agent, ...targets]) } this[kOnDisconnect] = (origin, targets, err) => { agent.emit('disconnect', origin, [agent, ...targets], err) } this[kOnConnectionError] = (origin, targets, err) => { agent.emit('connectionError', origin, [agent, ...targets], err) } } get [kRunning] () { let ret = 0 for (const ref of this[kClients].values()) { const client = ref.deref() /* istanbul ignore next: gc is undeterministic */ if (client) { ret += client[kRunning] } } return ret } [kDispatch] (opts, handler) { let key if (opts.origin && (typeof opts.origin === 'string' || opts.origin instanceof URL)) { key = String(opts.origin) } else { throw new InvalidArgumentError('opts.origin must be a non-empty string or URL.') } const ref = this[kClients].get(key) let dispatcher = ref ? ref.deref() : null if (!dispatcher) { dispatcher = this[kFactory](opts.origin, this[kOptions]) .on('drain', this[kOnDrain]) .on('connect', this[kOnConnect]) .on('disconnect', this[kOnDisconnect]) .on('connectionError', this[kOnConnectionError]) this[kClients].set(key, new WeakRef(dispatcher)) this[kFinalizer].register(dispatcher, key) } return dispatcher.dispatch(opts, handler) } async [kClose] () { const closePromises = [] for (const ref of this[kClients].values()) { const client = ref.deref() /* istanbul ignore else: gc is undeterministic */ if (client) { closePromises.push(client.close()) } } await Promise.all(closePromises) } async [kDestroy] (err) { const destroyPromises = [] for (const ref of this[kClients].values()) { const client = ref.deref() /* istanbul ignore else: gc is undeterministic */ if (client) { destroyPromises.push(client.destroy(err)) } } await Promise.all(destroyPromises) } } module.exports = Agent /***/ }), /***/ 80158: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const { addAbortListener } = __nccwpck_require__(3440) const { RequestAbortedError } = __nccwpck_require__(68707) const kListener = Symbol('kListener') const kSignal = Symbol('kSignal') function abort (self) { if (self.abort) { self.abort() } else { self.onError(new RequestAbortedError()) } } function addSignal (self, signal) { self[kSignal] = null self[kListener] = null if (!signal) { return } if (signal.aborted) { abort(self) return } self[kSignal] = signal self[kListener] = () => { abort(self) } addAbortListener(self[kSignal], self[kListener]) } function removeSignal (self) { if (!self[kSignal]) { return } if ('removeEventListener' in self[kSignal]) { self[kSignal].removeEventListener('abort', self[kListener]) } else { self[kSignal].removeListener('abort', self[kListener]) } self[kSignal] = null self[kListener] = null } module.exports = { addSignal, removeSignal } /***/ }), /***/ 34660: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { AsyncResource } = __nccwpck_require__(90290) const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { addSignal, removeSignal } = __nccwpck_require__(80158) class ConnectHandler extends AsyncResource { constructor (opts, callback) { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } const { signal, opaque, responseHeaders } = opts if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } super('UNDICI_CONNECT') this.opaque = opaque || null this.responseHeaders = responseHeaders || null this.callback = callback this.abort = null addSignal(this, signal) } onConnect (abort, context) { if (!this.callback) { throw new RequestAbortedError() } this.abort = abort this.context = context } onHeaders () { throw new SocketError('bad connect', null) } onUpgrade (statusCode, rawHeaders, socket) { const { callback, opaque, context } = this removeSignal(this) this.callback = null let headers = rawHeaders // Indicates is an HTTP2Session if (headers != null) { headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) } this.runInAsyncScope(callback, null, null, { statusCode, headers, socket, opaque, context }) } onError (err) { const { callback, opaque } = this removeSignal(this) if (callback) { this.callback = null queueMicrotask(() => { this.runInAsyncScope(callback, null, err, { opaque }) }) } } } function connect (opts, callback) { if (callback === undefined) { return new Promise((resolve, reject) => { connect.call(this, opts, (err, data) => { return err ? reject(err) : resolve(data) }) }) } try { const connectHandler = new ConnectHandler(opts, callback) this.dispatch({ ...opts, method: 'CONNECT' }, connectHandler) } catch (err) { if (typeof callback !== 'function') { throw err } const opaque = opts && opts.opaque queueMicrotask(() => callback(err, { opaque })) } } module.exports = connect /***/ }), /***/ 76862: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { Readable, Duplex, PassThrough } = __nccwpck_require__(2203) const { InvalidArgumentError, InvalidReturnValueError, RequestAbortedError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { AsyncResource } = __nccwpck_require__(90290) const { addSignal, removeSignal } = __nccwpck_require__(80158) const assert = __nccwpck_require__(42613) const kResume = Symbol('resume') class PipelineRequest extends Readable { constructor () { super({ autoDestroy: true }) this[kResume] = null } _read () { const { [kResume]: resume } = this if (resume) { this[kResume] = null resume() } } _destroy (err, callback) { this._read() callback(err) } } class PipelineResponse extends Readable { constructor (resume) { super({ autoDestroy: true }) this[kResume] = resume } _read () { this[kResume]() } _destroy (err, callback) { if (!err && !this._readableState.endEmitted) { err = new RequestAbortedError() } callback(err) } } class PipelineHandler extends AsyncResource { constructor (opts, handler) { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } if (typeof handler !== 'function') { throw new InvalidArgumentError('invalid handler') } const { signal, method, opaque, onInfo, responseHeaders } = opts if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } if (method === 'CONNECT') { throw new InvalidArgumentError('invalid method') } if (onInfo && typeof onInfo !== 'function') { throw new InvalidArgumentError('invalid onInfo callback') } super('UNDICI_PIPELINE') this.opaque = opaque || null this.responseHeaders = responseHeaders || null this.handler = handler this.abort = null this.context = null this.onInfo = onInfo || null this.req = new PipelineRequest().on('error', util.nop) this.ret = new Duplex({ readableObjectMode: opts.objectMode, autoDestroy: true, read: () => { const { body } = this if (body && body.resume) { body.resume() } }, write: (chunk, encoding, callback) => { const { req } = this if (req.push(chunk, encoding) || req._readableState.destroyed) { callback() } else { req[kResume] = callback } }, destroy: (err, callback) => { const { body, req, res, ret, abort } = this if (!err && !ret._readableState.endEmitted) { err = new RequestAbortedError() } if (abort && err) { abort() } util.destroy(body, err) util.destroy(req, err) util.destroy(res, err) removeSignal(this) callback(err) } }).on('prefinish', () => { const { req } = this // Node < 15 does not call _final in same tick. req.push(null) }) this.res = null addSignal(this, signal) } onConnect (abort, context) { const { ret, res } = this assert(!res, 'pipeline cannot be retried') if (ret.destroyed) { throw new RequestAbortedError() } this.abort = abort this.context = context } onHeaders (statusCode, rawHeaders, resume) { const { opaque, handler, context } = this if (statusCode < 200) { if (this.onInfo) { const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) this.onInfo({ statusCode, headers }) } return } this.res = new PipelineResponse(resume) let body try { this.handler = null const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) body = this.runInAsyncScope(handler, null, { statusCode, headers, opaque, body: this.res, context }) } catch (err) { this.res.on('error', util.nop) throw err } if (!body || typeof body.on !== 'function') { throw new InvalidReturnValueError('expected Readable') } body .on('data', (chunk) => { const { ret, body } = this if (!ret.push(chunk) && body.pause) { body.pause() } }) .on('error', (err) => { const { ret } = this util.destroy(ret, err) }) .on('end', () => { const { ret } = this ret.push(null) }) .on('close', () => { const { ret } = this if (!ret._readableState.ended) { util.destroy(ret, new RequestAbortedError()) } }) this.body = body } onData (chunk) { const { res } = this return res.push(chunk) } onComplete (trailers) { const { res } = this res.push(null) } onError (err) { const { ret } = this this.handler = null util.destroy(ret, err) } } function pipeline (opts, handler) { try { const pipelineHandler = new PipelineHandler(opts, handler) this.dispatch({ ...opts, body: pipelineHandler.req }, pipelineHandler) return pipelineHandler.ret } catch (err) { return new PassThrough().destroy(err) } } module.exports = pipeline /***/ }), /***/ 14043: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Readable = __nccwpck_require__(49927) const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { getResolveErrorBodyCallback } = __nccwpck_require__(87655) const { AsyncResource } = __nccwpck_require__(90290) const { addSignal, removeSignal } = __nccwpck_require__(80158) class RequestHandler extends AsyncResource { constructor (opts, callback) { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError, highWaterMark } = opts try { if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } if (highWaterMark && (typeof highWaterMark !== 'number' || highWaterMark < 0)) { throw new InvalidArgumentError('invalid highWaterMark') } if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } if (method === 'CONNECT') { throw new InvalidArgumentError('invalid method') } if (onInfo && typeof onInfo !== 'function') { throw new InvalidArgumentError('invalid onInfo callback') } super('UNDICI_REQUEST') } catch (err) { if (util.isStream(body)) { util.destroy(body.on('error', util.nop), err) } throw err } this.responseHeaders = responseHeaders || null this.opaque = opaque || null this.callback = callback this.res = null this.abort = null this.body = body this.trailers = {} this.context = null this.onInfo = onInfo || null this.throwOnError = throwOnError this.highWaterMark = highWaterMark if (util.isStream(body)) { body.on('error', (err) => { this.onError(err) }) } addSignal(this, signal) } onConnect (abort, context) { if (!this.callback) { throw new RequestAbortedError() } this.abort = abort this.context = context } onHeaders (statusCode, rawHeaders, resume, statusMessage) { const { callback, opaque, abort, context, responseHeaders, highWaterMark } = this const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) if (statusCode < 200) { if (this.onInfo) { this.onInfo({ statusCode, headers }) } return } const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers const contentType = parsedHeaders['content-type'] const body = new Readable({ resume, abort, contentType, highWaterMark }) this.callback = null this.res = body if (callback !== null) { if (this.throwOnError && statusCode >= 400) { this.runInAsyncScope(getResolveErrorBodyCallback, null, { callback, body, contentType, statusCode, statusMessage, headers } ) } else { this.runInAsyncScope(callback, null, null, { statusCode, headers, trailers: this.trailers, opaque, body, context }) } } } onData (chunk) { const { res } = this return res.push(chunk) } onComplete (trailers) { const { res } = this removeSignal(this) util.parseHeaders(trailers, this.trailers) res.push(null) } onError (err) { const { res, callback, body, opaque } = this removeSignal(this) if (callback) { // TODO: Does this need queueMicrotask? this.callback = null queueMicrotask(() => { this.runInAsyncScope(callback, null, err, { opaque }) }) } if (res) { this.res = null // Ensure all queued handlers are invoked before destroying res. queueMicrotask(() => { util.destroy(res, err) }) } if (body) { this.body = null util.destroy(body, err) } } } function request (opts, callback) { if (callback === undefined) { return new Promise((resolve, reject) => { request.call(this, opts, (err, data) => { return err ? reject(err) : resolve(data) }) }) } try { this.dispatch(opts, new RequestHandler(opts, callback)) } catch (err) { if (typeof callback !== 'function') { throw err } const opaque = opts && opts.opaque queueMicrotask(() => callback(err, { opaque })) } } module.exports = request module.exports.RequestHandler = RequestHandler /***/ }), /***/ 3560: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { finished, PassThrough } = __nccwpck_require__(2203) const { InvalidArgumentError, InvalidReturnValueError, RequestAbortedError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { getResolveErrorBodyCallback } = __nccwpck_require__(87655) const { AsyncResource } = __nccwpck_require__(90290) const { addSignal, removeSignal } = __nccwpck_require__(80158) class StreamHandler extends AsyncResource { constructor (opts, factory, callback) { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } const { signal, method, opaque, body, onInfo, responseHeaders, throwOnError } = opts try { if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } if (typeof factory !== 'function') { throw new InvalidArgumentError('invalid factory') } if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } if (method === 'CONNECT') { throw new InvalidArgumentError('invalid method') } if (onInfo && typeof onInfo !== 'function') { throw new InvalidArgumentError('invalid onInfo callback') } super('UNDICI_STREAM') } catch (err) { if (util.isStream(body)) { util.destroy(body.on('error', util.nop), err) } throw err } this.responseHeaders = responseHeaders || null this.opaque = opaque || null this.factory = factory this.callback = callback this.res = null this.abort = null this.context = null this.trailers = null this.body = body this.onInfo = onInfo || null this.throwOnError = throwOnError || false if (util.isStream(body)) { body.on('error', (err) => { this.onError(err) }) } addSignal(this, signal) } onConnect (abort, context) { if (!this.callback) { throw new RequestAbortedError() } this.abort = abort this.context = context } onHeaders (statusCode, rawHeaders, resume, statusMessage) { const { factory, opaque, context, callback, responseHeaders } = this const headers = responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) if (statusCode < 200) { if (this.onInfo) { this.onInfo({ statusCode, headers }) } return } this.factory = null let res if (this.throwOnError && statusCode >= 400) { const parsedHeaders = responseHeaders === 'raw' ? util.parseHeaders(rawHeaders) : headers const contentType = parsedHeaders['content-type'] res = new PassThrough() this.callback = null this.runInAsyncScope(getResolveErrorBodyCallback, null, { callback, body: res, contentType, statusCode, statusMessage, headers } ) } else { if (factory === null) { return } res = this.runInAsyncScope(factory, null, { statusCode, headers, opaque, context }) if ( !res || typeof res.write !== 'function' || typeof res.end !== 'function' || typeof res.on !== 'function' ) { throw new InvalidReturnValueError('expected Writable') } // TODO: Avoid finished. It registers an unnecessary amount of listeners. finished(res, { readable: false }, (err) => { const { callback, res, opaque, trailers, abort } = this this.res = null if (err || !res.readable) { util.destroy(res, err) } this.callback = null this.runInAsyncScope(callback, null, err || null, { opaque, trailers }) if (err) { abort() } }) } res.on('drain', resume) this.res = res const needDrain = res.writableNeedDrain !== undefined ? res.writableNeedDrain : res._writableState && res._writableState.needDrain return needDrain !== true } onData (chunk) { const { res } = this return res ? res.write(chunk) : true } onComplete (trailers) { const { res } = this removeSignal(this) if (!res) { return } this.trailers = util.parseHeaders(trailers) res.end() } onError (err) { const { res, callback, opaque, body } = this removeSignal(this) this.factory = null if (res) { this.res = null util.destroy(res, err) } else if (callback) { this.callback = null queueMicrotask(() => { this.runInAsyncScope(callback, null, err, { opaque }) }) } if (body) { this.body = null util.destroy(body, err) } } } function stream (opts, factory, callback) { if (callback === undefined) { return new Promise((resolve, reject) => { stream.call(this, opts, factory, (err, data) => { return err ? reject(err) : resolve(data) }) }) } try { this.dispatch(opts, new StreamHandler(opts, factory, callback)) } catch (err) { if (typeof callback !== 'function') { throw err } const opaque = opts && opts.opaque queueMicrotask(() => callback(err, { opaque })) } } module.exports = stream /***/ }), /***/ 61882: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { InvalidArgumentError, RequestAbortedError, SocketError } = __nccwpck_require__(68707) const { AsyncResource } = __nccwpck_require__(90290) const util = __nccwpck_require__(3440) const { addSignal, removeSignal } = __nccwpck_require__(80158) const assert = __nccwpck_require__(42613) class UpgradeHandler extends AsyncResource { constructor (opts, callback) { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('invalid opts') } if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } const { signal, opaque, responseHeaders } = opts if (signal && typeof signal.on !== 'function' && typeof signal.addEventListener !== 'function') { throw new InvalidArgumentError('signal must be an EventEmitter or EventTarget') } super('UNDICI_UPGRADE') this.responseHeaders = responseHeaders || null this.opaque = opaque || null this.callback = callback this.abort = null this.context = null addSignal(this, signal) } onConnect (abort, context) { if (!this.callback) { throw new RequestAbortedError() } this.abort = abort this.context = null } onHeaders () { throw new SocketError('bad upgrade', null) } onUpgrade (statusCode, rawHeaders, socket) { const { callback, opaque, context } = this assert.strictEqual(statusCode, 101) removeSignal(this) this.callback = null const headers = this.responseHeaders === 'raw' ? util.parseRawHeaders(rawHeaders) : util.parseHeaders(rawHeaders) this.runInAsyncScope(callback, null, null, { headers, socket, opaque, context }) } onError (err) { const { callback, opaque } = this removeSignal(this) if (callback) { this.callback = null queueMicrotask(() => { this.runInAsyncScope(callback, null, err, { opaque }) }) } } } function upgrade (opts, callback) { if (callback === undefined) { return new Promise((resolve, reject) => { upgrade.call(this, opts, (err, data) => { return err ? reject(err) : resolve(data) }) }) } try { const upgradeHandler = new UpgradeHandler(opts, callback) this.dispatch({ ...opts, method: opts.method || 'GET', upgrade: opts.protocol || 'Websocket' }, upgradeHandler) } catch (err) { if (typeof callback !== 'function') { throw err } const opaque = opts && opts.opaque queueMicrotask(() => callback(err, { opaque })) } } module.exports = upgrade /***/ }), /***/ 56615: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports.request = __nccwpck_require__(14043) module.exports.stream = __nccwpck_require__(3560) module.exports.pipeline = __nccwpck_require__(76862) module.exports.upgrade = __nccwpck_require__(61882) module.exports.connect = __nccwpck_require__(34660) /***/ }), /***/ 49927: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // Ported from https://github.com/nodejs/undici/pull/907 const assert = __nccwpck_require__(42613) const { Readable } = __nccwpck_require__(2203) const { RequestAbortedError, NotSupportedError, InvalidArgumentError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { ReadableStreamFrom, toUSVString } = __nccwpck_require__(3440) let Blob const kConsume = Symbol('kConsume') const kReading = Symbol('kReading') const kBody = Symbol('kBody') const kAbort = Symbol('abort') const kContentType = Symbol('kContentType') const noop = () => {} module.exports = class BodyReadable extends Readable { constructor ({ resume, abort, contentType = '', highWaterMark = 64 * 1024 // Same as nodejs fs streams. }) { super({ autoDestroy: true, read: resume, highWaterMark }) this._readableState.dataEmitted = false this[kAbort] = abort this[kConsume] = null this[kBody] = null this[kContentType] = contentType // Is stream being consumed through Readable API? // This is an optimization so that we avoid checking // for 'data' and 'readable' listeners in the hot path // inside push(). this[kReading] = false } destroy (err) { if (this.destroyed) { // Node < 16 return this } if (!err && !this._readableState.endEmitted) { err = new RequestAbortedError() } if (err) { this[kAbort]() } return super.destroy(err) } emit (ev, ...args) { if (ev === 'data') { // Node < 16.7 this._readableState.dataEmitted = true } else if (ev === 'error') { // Node < 16 this._readableState.errorEmitted = true } return super.emit(ev, ...args) } on (ev, ...args) { if (ev === 'data' || ev === 'readable') { this[kReading] = true } return super.on(ev, ...args) } addListener (ev, ...args) { return this.on(ev, ...args) } off (ev, ...args) { const ret = super.off(ev, ...args) if (ev === 'data' || ev === 'readable') { this[kReading] = ( this.listenerCount('data') > 0 || this.listenerCount('readable') > 0 ) } return ret } removeListener (ev, ...args) { return this.off(ev, ...args) } push (chunk) { if (this[kConsume] && chunk !== null && this.readableLength === 0) { consumePush(this[kConsume], chunk) return this[kReading] ? super.push(chunk) : true } return super.push(chunk) } // https://fetch.spec.whatwg.org/#dom-body-text async text () { return consume(this, 'text') } // https://fetch.spec.whatwg.org/#dom-body-json async json () { return consume(this, 'json') } // https://fetch.spec.whatwg.org/#dom-body-blob async blob () { return consume(this, 'blob') } // https://fetch.spec.whatwg.org/#dom-body-arraybuffer async arrayBuffer () { return consume(this, 'arrayBuffer') } // https://fetch.spec.whatwg.org/#dom-body-formdata async formData () { // TODO: Implement. throw new NotSupportedError() } // https://fetch.spec.whatwg.org/#dom-body-bodyused get bodyUsed () { return util.isDisturbed(this) } // https://fetch.spec.whatwg.org/#dom-body-body get body () { if (!this[kBody]) { this[kBody] = ReadableStreamFrom(this) if (this[kConsume]) { // TODO: Is this the best way to force a lock? this[kBody].getReader() // Ensure stream is locked. assert(this[kBody].locked) } } return this[kBody] } dump (opts) { let limit = opts && Number.isFinite(opts.limit) ? opts.limit : 262144 const signal = opts && opts.signal if (signal) { try { if (typeof signal !== 'object' || !('aborted' in signal)) { throw new InvalidArgumentError('signal must be an AbortSignal') } util.throwIfAborted(signal) } catch (err) { return Promise.reject(err) } } if (this.closed) { return Promise.resolve(null) } return new Promise((resolve, reject) => { const signalListenerCleanup = signal ? util.addAbortListener(signal, () => { this.destroy() }) : noop this .on('close', function () { signalListenerCleanup() if (signal && signal.aborted) { reject(signal.reason || Object.assign(new Error('The operation was aborted'), { name: 'AbortError' })) } else { resolve(null) } }) .on('error', noop) .on('data', function (chunk) { limit -= chunk.length if (limit <= 0) { this.destroy() } }) .resume() }) } } // https://streams.spec.whatwg.org/#readablestream-locked function isLocked (self) { // Consume is an implicit lock. return (self[kBody] && self[kBody].locked === true) || self[kConsume] } // https://fetch.spec.whatwg.org/#body-unusable function isUnusable (self) { return util.isDisturbed(self) || isLocked(self) } async function consume (stream, type) { if (isUnusable(stream)) { throw new TypeError('unusable') } assert(!stream[kConsume]) return new Promise((resolve, reject) => { stream[kConsume] = { type, stream, resolve, reject, length: 0, body: [] } stream .on('error', function (err) { consumeFinish(this[kConsume], err) }) .on('close', function () { if (this[kConsume].body !== null) { consumeFinish(this[kConsume], new RequestAbortedError()) } }) process.nextTick(consumeStart, stream[kConsume]) }) } function consumeStart (consume) { if (consume.body === null) { return } const { _readableState: state } = consume.stream for (const chunk of state.buffer) { consumePush(consume, chunk) } if (state.endEmitted) { consumeEnd(this[kConsume]) } else { consume.stream.on('end', function () { consumeEnd(this[kConsume]) }) } consume.stream.resume() while (consume.stream.read() != null) { // Loop } } function consumeEnd (consume) { const { type, body, resolve, stream, length } = consume try { if (type === 'text') { resolve(toUSVString(Buffer.concat(body))) } else if (type === 'json') { resolve(JSON.parse(Buffer.concat(body))) } else if (type === 'arrayBuffer') { const dst = new Uint8Array(length) let pos = 0 for (const buf of body) { dst.set(buf, pos) pos += buf.byteLength } resolve(dst.buffer) } else if (type === 'blob') { if (!Blob) { Blob = (__nccwpck_require__(20181).Blob) } resolve(new Blob(body, { type: stream[kContentType] })) } consumeFinish(consume) } catch (err) { stream.destroy(err) } } function consumePush (consume, chunk) { consume.length += chunk.length consume.body.push(chunk) } function consumeFinish (consume, err) { if (consume.body === null) { return } if (err) { consume.reject(err) } else { consume.resolve() } consume.type = null consume.stream = null consume.resolve = null consume.reject = null consume.length = 0 consume.body = null } /***/ }), /***/ 87655: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const assert = __nccwpck_require__(42613) const { ResponseStatusCodeError } = __nccwpck_require__(68707) const { toUSVString } = __nccwpck_require__(3440) async function getResolveErrorBodyCallback ({ callback, body, contentType, statusCode, statusMessage, headers }) { assert(body) let chunks = [] let limit = 0 for await (const chunk of body) { chunks.push(chunk) limit += chunk.length if (limit > 128 * 1024) { chunks = null break } } if (statusCode === 204 || !contentType || !chunks) { process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) return } try { if (contentType.startsWith('application/json')) { const payload = JSON.parse(toUSVString(Buffer.concat(chunks))) process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) return } if (contentType.startsWith('text/')) { const payload = toUSVString(Buffer.concat(chunks)) process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers, payload)) return } } catch (err) { // Process in a fallback if error } process.nextTick(callback, new ResponseStatusCodeError(`Response status code ${statusCode}${statusMessage ? `: ${statusMessage}` : ''}`, statusCode, headers)) } module.exports = { getResolveErrorBodyCallback } /***/ }), /***/ 81093: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { BalancedPoolMissingUpstreamError, InvalidArgumentError } = __nccwpck_require__(68707) const { PoolBase, kClients, kNeedDrain, kAddClient, kRemoveClient, kGetDispatcher } = __nccwpck_require__(58640) const Pool = __nccwpck_require__(35076) const { kUrl, kInterceptors } = __nccwpck_require__(36443) const { parseOrigin } = __nccwpck_require__(3440) const kFactory = Symbol('factory') const kOptions = Symbol('options') const kGreatestCommonDivisor = Symbol('kGreatestCommonDivisor') const kCurrentWeight = Symbol('kCurrentWeight') const kIndex = Symbol('kIndex') const kWeight = Symbol('kWeight') const kMaxWeightPerServer = Symbol('kMaxWeightPerServer') const kErrorPenalty = Symbol('kErrorPenalty') function getGreatestCommonDivisor (a, b) { if (b === 0) return a return getGreatestCommonDivisor(b, a % b) } function defaultFactory (origin, opts) { return new Pool(origin, opts) } class BalancedPool extends PoolBase { constructor (upstreams = [], { factory = defaultFactory, ...opts } = {}) { super() this[kOptions] = opts this[kIndex] = -1 this[kCurrentWeight] = 0 this[kMaxWeightPerServer] = this[kOptions].maxWeightPerServer || 100 this[kErrorPenalty] = this[kOptions].errorPenalty || 15 if (!Array.isArray(upstreams)) { upstreams = [upstreams] } if (typeof factory !== 'function') { throw new InvalidArgumentError('factory must be a function.') } this[kInterceptors] = opts.interceptors && opts.interceptors.BalancedPool && Array.isArray(opts.interceptors.BalancedPool) ? opts.interceptors.BalancedPool : [] this[kFactory] = factory for (const upstream of upstreams) { this.addUpstream(upstream) } this._updateBalancedPoolStats() } addUpstream (upstream) { const upstreamOrigin = parseOrigin(upstream).origin if (this[kClients].find((pool) => ( pool[kUrl].origin === upstreamOrigin && pool.closed !== true && pool.destroyed !== true ))) { return this } const pool = this[kFactory](upstreamOrigin, Object.assign({}, this[kOptions])) this[kAddClient](pool) pool.on('connect', () => { pool[kWeight] = Math.min(this[kMaxWeightPerServer], pool[kWeight] + this[kErrorPenalty]) }) pool.on('connectionError', () => { pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) this._updateBalancedPoolStats() }) pool.on('disconnect', (...args) => { const err = args[2] if (err && err.code === 'UND_ERR_SOCKET') { // decrease the weight of the pool. pool[kWeight] = Math.max(1, pool[kWeight] - this[kErrorPenalty]) this._updateBalancedPoolStats() } }) for (const client of this[kClients]) { client[kWeight] = this[kMaxWeightPerServer] } this._updateBalancedPoolStats() return this } _updateBalancedPoolStats () { this[kGreatestCommonDivisor] = this[kClients].map(p => p[kWeight]).reduce(getGreatestCommonDivisor, 0) } removeUpstream (upstream) { const upstreamOrigin = parseOrigin(upstream).origin const pool = this[kClients].find((pool) => ( pool[kUrl].origin === upstreamOrigin && pool.closed !== true && pool.destroyed !== true )) if (pool) { this[kRemoveClient](pool) } return this } get upstreams () { return this[kClients] .filter(dispatcher => dispatcher.closed !== true && dispatcher.destroyed !== true) .map((p) => p[kUrl].origin) } [kGetDispatcher] () { // We validate that pools is greater than 0, // otherwise we would have to wait until an upstream // is added, which might never happen. if (this[kClients].length === 0) { throw new BalancedPoolMissingUpstreamError() } const dispatcher = this[kClients].find(dispatcher => ( !dispatcher[kNeedDrain] && dispatcher.closed !== true && dispatcher.destroyed !== true )) if (!dispatcher) { return } const allClientsBusy = this[kClients].map(pool => pool[kNeedDrain]).reduce((a, b) => a && b, true) if (allClientsBusy) { return } let counter = 0 let maxWeightIndex = this[kClients].findIndex(pool => !pool[kNeedDrain]) while (counter++ < this[kClients].length) { this[kIndex] = (this[kIndex] + 1) % this[kClients].length const pool = this[kClients][this[kIndex]] // find pool index with the largest weight if (pool[kWeight] > this[kClients][maxWeightIndex][kWeight] && !pool[kNeedDrain]) { maxWeightIndex = this[kIndex] } // decrease the current weight every `this[kClients].length`. if (this[kIndex] === 0) { // Set the current weight to the next lower weight. this[kCurrentWeight] = this[kCurrentWeight] - this[kGreatestCommonDivisor] if (this[kCurrentWeight] <= 0) { this[kCurrentWeight] = this[kMaxWeightPerServer] } } if (pool[kWeight] >= this[kCurrentWeight] && (!pool[kNeedDrain])) { return pool } } this[kCurrentWeight] = this[kClients][maxWeightIndex][kWeight] this[kIndex] = maxWeightIndex return this[kClients][maxWeightIndex] } } module.exports = BalancedPool /***/ }), /***/ 50479: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kConstruct } = __nccwpck_require__(80296) const { urlEquals, fieldValues: getFieldValues } = __nccwpck_require__(23993) const { kEnumerableProperty, isDisturbed } = __nccwpck_require__(3440) const { kHeadersList } = __nccwpck_require__(36443) const { webidl } = __nccwpck_require__(74222) const { Response, cloneResponse } = __nccwpck_require__(48676) const { Request } = __nccwpck_require__(25194) const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(89710) const { fetching } = __nccwpck_require__(12315) const { urlIsHttpHttpsScheme, createDeferredPromise, readAllBytes } = __nccwpck_require__(15523) const assert = __nccwpck_require__(42613) const { getGlobalDispatcher } = __nccwpck_require__(32581) /** * @see https://w3c.github.io/ServiceWorker/#dfn-cache-batch-operation * @typedef {Object} CacheBatchOperation * @property {'delete' | 'put'} type * @property {any} request * @property {any} response * @property {import('../../types/cache').CacheQueryOptions} options */ /** * @see https://w3c.github.io/ServiceWorker/#dfn-request-response-list * @typedef {[any, any][]} requestResponseList */ class Cache { /** * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-request-response-list * @type {requestResponseList} */ #relevantRequestResponseList constructor () { if (arguments[0] !== kConstruct) { webidl.illegalConstructor() } this.#relevantRequestResponseList = arguments[1] } async match (request, options = {}) { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.match' }) request = webidl.converters.RequestInfo(request) options = webidl.converters.CacheQueryOptions(options) const p = await this.matchAll(request, options) if (p.length === 0) { return } return p[0] } async matchAll (request = undefined, options = {}) { webidl.brandCheck(this, Cache) if (request !== undefined) request = webidl.converters.RequestInfo(request) options = webidl.converters.CacheQueryOptions(options) // 1. let r = null // 2. if (request !== undefined) { if (request instanceof Request) { // 2.1.1 r = request[kState] // 2.1.2 if (r.method !== 'GET' && !options.ignoreMethod) { return [] } } else if (typeof request === 'string') { // 2.2.1 r = new Request(request)[kState] } } // 5. // 5.1 const responses = [] // 5.2 if (request === undefined) { // 5.2.1 for (const requestResponse of this.#relevantRequestResponseList) { responses.push(requestResponse[1]) } } else { // 5.3 // 5.3.1 const requestResponses = this.#queryCache(r, options) // 5.3.2 for (const requestResponse of requestResponses) { responses.push(requestResponse[1]) } } // 5.4 // We don't implement CORs so we don't need to loop over the responses, yay! // 5.5.1 const responseList = [] // 5.5.2 for (const response of responses) { // 5.5.2.1 const responseObject = new Response(response.body?.source ?? null) const body = responseObject[kState].body responseObject[kState] = response responseObject[kState].body = body responseObject[kHeaders][kHeadersList] = response.headersList responseObject[kHeaders][kGuard] = 'immutable' responseList.push(responseObject) } // 6. return Object.freeze(responseList) } async add (request) { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.add' }) request = webidl.converters.RequestInfo(request) // 1. const requests = [request] // 2. const responseArrayPromise = this.addAll(requests) // 3. return await responseArrayPromise } async addAll (requests) { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.addAll' }) requests = webidl.converters['sequence'](requests) // 1. const responsePromises = [] // 2. const requestList = [] // 3. for (const request of requests) { if (typeof request === 'string') { continue } // 3.1 const r = request[kState] // 3.2 if (!urlIsHttpHttpsScheme(r.url) || r.method !== 'GET') { throw webidl.errors.exception({ header: 'Cache.addAll', message: 'Expected http/s scheme when method is not GET.' }) } } // 4. /** @type {ReturnType[]} */ const fetchControllers = [] // 5. for (const request of requests) { // 5.1 const r = new Request(request)[kState] // 5.2 if (!urlIsHttpHttpsScheme(r.url)) { throw webidl.errors.exception({ header: 'Cache.addAll', message: 'Expected http/s scheme.' }) } // 5.4 r.initiator = 'fetch' r.destination = 'subresource' // 5.5 requestList.push(r) // 5.6 const responsePromise = createDeferredPromise() // 5.7 fetchControllers.push(fetching({ request: r, dispatcher: getGlobalDispatcher(), processResponse (response) { // 1. if (response.type === 'error' || response.status === 206 || response.status < 200 || response.status > 299) { responsePromise.reject(webidl.errors.exception({ header: 'Cache.addAll', message: 'Received an invalid status code or the request failed.' })) } else if (response.headersList.contains('vary')) { // 2. // 2.1 const fieldValues = getFieldValues(response.headersList.get('vary')) // 2.2 for (const fieldValue of fieldValues) { // 2.2.1 if (fieldValue === '*') { responsePromise.reject(webidl.errors.exception({ header: 'Cache.addAll', message: 'invalid vary field value' })) for (const controller of fetchControllers) { controller.abort() } return } } } }, processResponseEndOfBody (response) { // 1. if (response.aborted) { responsePromise.reject(new DOMException('aborted', 'AbortError')) return } // 2. responsePromise.resolve(response) } })) // 5.8 responsePromises.push(responsePromise.promise) } // 6. const p = Promise.all(responsePromises) // 7. const responses = await p // 7.1 const operations = [] // 7.2 let index = 0 // 7.3 for (const response of responses) { // 7.3.1 /** @type {CacheBatchOperation} */ const operation = { type: 'put', // 7.3.2 request: requestList[index], // 7.3.3 response // 7.3.4 } operations.push(operation) // 7.3.5 index++ // 7.3.6 } // 7.5 const cacheJobPromise = createDeferredPromise() // 7.6.1 let errorData = null // 7.6.2 try { this.#batchCacheOperations(operations) } catch (e) { errorData = e } // 7.6.3 queueMicrotask(() => { // 7.6.3.1 if (errorData === null) { cacheJobPromise.resolve(undefined) } else { // 7.6.3.2 cacheJobPromise.reject(errorData) } }) // 7.7 return cacheJobPromise.promise } async put (request, response) { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 2, { header: 'Cache.put' }) request = webidl.converters.RequestInfo(request) response = webidl.converters.Response(response) // 1. let innerRequest = null // 2. if (request instanceof Request) { innerRequest = request[kState] } else { // 3. innerRequest = new Request(request)[kState] } // 4. if (!urlIsHttpHttpsScheme(innerRequest.url) || innerRequest.method !== 'GET') { throw webidl.errors.exception({ header: 'Cache.put', message: 'Expected an http/s scheme when method is not GET' }) } // 5. const innerResponse = response[kState] // 6. if (innerResponse.status === 206) { throw webidl.errors.exception({ header: 'Cache.put', message: 'Got 206 status' }) } // 7. if (innerResponse.headersList.contains('vary')) { // 7.1. const fieldValues = getFieldValues(innerResponse.headersList.get('vary')) // 7.2. for (const fieldValue of fieldValues) { // 7.2.1 if (fieldValue === '*') { throw webidl.errors.exception({ header: 'Cache.put', message: 'Got * vary field value' }) } } } // 8. if (innerResponse.body && (isDisturbed(innerResponse.body.stream) || innerResponse.body.stream.locked)) { throw webidl.errors.exception({ header: 'Cache.put', message: 'Response body is locked or disturbed' }) } // 9. const clonedResponse = cloneResponse(innerResponse) // 10. const bodyReadPromise = createDeferredPromise() // 11. if (innerResponse.body != null) { // 11.1 const stream = innerResponse.body.stream // 11.2 const reader = stream.getReader() // 11.3 readAllBytes(reader).then(bodyReadPromise.resolve, bodyReadPromise.reject) } else { bodyReadPromise.resolve(undefined) } // 12. /** @type {CacheBatchOperation[]} */ const operations = [] // 13. /** @type {CacheBatchOperation} */ const operation = { type: 'put', // 14. request: innerRequest, // 15. response: clonedResponse // 16. } // 17. operations.push(operation) // 19. const bytes = await bodyReadPromise.promise if (clonedResponse.body != null) { clonedResponse.body.source = bytes } // 19.1 const cacheJobPromise = createDeferredPromise() // 19.2.1 let errorData = null // 19.2.2 try { this.#batchCacheOperations(operations) } catch (e) { errorData = e } // 19.2.3 queueMicrotask(() => { // 19.2.3.1 if (errorData === null) { cacheJobPromise.resolve() } else { // 19.2.3.2 cacheJobPromise.reject(errorData) } }) return cacheJobPromise.promise } async delete (request, options = {}) { webidl.brandCheck(this, Cache) webidl.argumentLengthCheck(arguments, 1, { header: 'Cache.delete' }) request = webidl.converters.RequestInfo(request) options = webidl.converters.CacheQueryOptions(options) /** * @type {Request} */ let r = null if (request instanceof Request) { r = request[kState] if (r.method !== 'GET' && !options.ignoreMethod) { return false } } else { assert(typeof request === 'string') r = new Request(request)[kState] } /** @type {CacheBatchOperation[]} */ const operations = [] /** @type {CacheBatchOperation} */ const operation = { type: 'delete', request: r, options } operations.push(operation) const cacheJobPromise = createDeferredPromise() let errorData = null let requestResponses try { requestResponses = this.#batchCacheOperations(operations) } catch (e) { errorData = e } queueMicrotask(() => { if (errorData === null) { cacheJobPromise.resolve(!!requestResponses?.length) } else { cacheJobPromise.reject(errorData) } }) return cacheJobPromise.promise } /** * @see https://w3c.github.io/ServiceWorker/#dom-cache-keys * @param {any} request * @param {import('../../types/cache').CacheQueryOptions} options * @returns {readonly Request[]} */ async keys (request = undefined, options = {}) { webidl.brandCheck(this, Cache) if (request !== undefined) request = webidl.converters.RequestInfo(request) options = webidl.converters.CacheQueryOptions(options) // 1. let r = null // 2. if (request !== undefined) { // 2.1 if (request instanceof Request) { // 2.1.1 r = request[kState] // 2.1.2 if (r.method !== 'GET' && !options.ignoreMethod) { return [] } } else if (typeof request === 'string') { // 2.2 r = new Request(request)[kState] } } // 4. const promise = createDeferredPromise() // 5. // 5.1 const requests = [] // 5.2 if (request === undefined) { // 5.2.1 for (const requestResponse of this.#relevantRequestResponseList) { // 5.2.1.1 requests.push(requestResponse[0]) } } else { // 5.3 // 5.3.1 const requestResponses = this.#queryCache(r, options) // 5.3.2 for (const requestResponse of requestResponses) { // 5.3.2.1 requests.push(requestResponse[0]) } } // 5.4 queueMicrotask(() => { // 5.4.1 const requestList = [] // 5.4.2 for (const request of requests) { const requestObject = new Request('https://a') requestObject[kState] = request requestObject[kHeaders][kHeadersList] = request.headersList requestObject[kHeaders][kGuard] = 'immutable' requestObject[kRealm] = request.client // 5.4.2.1 requestList.push(requestObject) } // 5.4.3 promise.resolve(Object.freeze(requestList)) }) return promise.promise } /** * @see https://w3c.github.io/ServiceWorker/#batch-cache-operations-algorithm * @param {CacheBatchOperation[]} operations * @returns {requestResponseList} */ #batchCacheOperations (operations) { // 1. const cache = this.#relevantRequestResponseList // 2. const backupCache = [...cache] // 3. const addedItems = [] // 4.1 const resultList = [] try { // 4.2 for (const operation of operations) { // 4.2.1 if (operation.type !== 'delete' && operation.type !== 'put') { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'operation type does not match "delete" or "put"' }) } // 4.2.2 if (operation.type === 'delete' && operation.response != null) { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'delete operation should not have an associated response' }) } // 4.2.3 if (this.#queryCache(operation.request, operation.options, addedItems).length) { throw new DOMException('???', 'InvalidStateError') } // 4.2.4 let requestResponses // 4.2.5 if (operation.type === 'delete') { // 4.2.5.1 requestResponses = this.#queryCache(operation.request, operation.options) // TODO: the spec is wrong, this is needed to pass WPTs if (requestResponses.length === 0) { return [] } // 4.2.5.2 for (const requestResponse of requestResponses) { const idx = cache.indexOf(requestResponse) assert(idx !== -1) // 4.2.5.2.1 cache.splice(idx, 1) } } else if (operation.type === 'put') { // 4.2.6 // 4.2.6.1 if (operation.response == null) { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'put operation should have an associated response' }) } // 4.2.6.2 const r = operation.request // 4.2.6.3 if (!urlIsHttpHttpsScheme(r.url)) { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'expected http or https scheme' }) } // 4.2.6.4 if (r.method !== 'GET') { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'not get method' }) } // 4.2.6.5 if (operation.options != null) { throw webidl.errors.exception({ header: 'Cache.#batchCacheOperations', message: 'options must not be defined' }) } // 4.2.6.6 requestResponses = this.#queryCache(operation.request) // 4.2.6.7 for (const requestResponse of requestResponses) { const idx = cache.indexOf(requestResponse) assert(idx !== -1) // 4.2.6.7.1 cache.splice(idx, 1) } // 4.2.6.8 cache.push([operation.request, operation.response]) // 4.2.6.10 addedItems.push([operation.request, operation.response]) } // 4.2.7 resultList.push([operation.request, operation.response]) } // 4.3 return resultList } catch (e) { // 5. // 5.1 this.#relevantRequestResponseList.length = 0 // 5.2 this.#relevantRequestResponseList = backupCache // 5.3 throw e } } /** * @see https://w3c.github.io/ServiceWorker/#query-cache * @param {any} requestQuery * @param {import('../../types/cache').CacheQueryOptions} options * @param {requestResponseList} targetStorage * @returns {requestResponseList} */ #queryCache (requestQuery, options, targetStorage) { /** @type {requestResponseList} */ const resultList = [] const storage = targetStorage ?? this.#relevantRequestResponseList for (const requestResponse of storage) { const [cachedRequest, cachedResponse] = requestResponse if (this.#requestMatchesCachedItem(requestQuery, cachedRequest, cachedResponse, options)) { resultList.push(requestResponse) } } return resultList } /** * @see https://w3c.github.io/ServiceWorker/#request-matches-cached-item-algorithm * @param {any} requestQuery * @param {any} request * @param {any | null} response * @param {import('../../types/cache').CacheQueryOptions | undefined} options * @returns {boolean} */ #requestMatchesCachedItem (requestQuery, request, response = null, options) { // if (options?.ignoreMethod === false && request.method === 'GET') { // return false // } const queryURL = new URL(requestQuery.url) const cachedURL = new URL(request.url) if (options?.ignoreSearch) { cachedURL.search = '' queryURL.search = '' } if (!urlEquals(queryURL, cachedURL, true)) { return false } if ( response == null || options?.ignoreVary || !response.headersList.contains('vary') ) { return true } const fieldValues = getFieldValues(response.headersList.get('vary')) for (const fieldValue of fieldValues) { if (fieldValue === '*') { return false } const requestValue = request.headersList.get(fieldValue) const queryValue = requestQuery.headersList.get(fieldValue) // If one has the header and the other doesn't, or one has // a different value than the other, return false if (requestValue !== queryValue) { return false } } return true } } Object.defineProperties(Cache.prototype, { [Symbol.toStringTag]: { value: 'Cache', configurable: true }, match: kEnumerableProperty, matchAll: kEnumerableProperty, add: kEnumerableProperty, addAll: kEnumerableProperty, put: kEnumerableProperty, delete: kEnumerableProperty, keys: kEnumerableProperty }) const cacheQueryOptionConverters = [ { key: 'ignoreSearch', converter: webidl.converters.boolean, defaultValue: false }, { key: 'ignoreMethod', converter: webidl.converters.boolean, defaultValue: false }, { key: 'ignoreVary', converter: webidl.converters.boolean, defaultValue: false } ] webidl.converters.CacheQueryOptions = webidl.dictionaryConverter(cacheQueryOptionConverters) webidl.converters.MultiCacheQueryOptions = webidl.dictionaryConverter([ ...cacheQueryOptionConverters, { key: 'cacheName', converter: webidl.converters.DOMString } ]) webidl.converters.Response = webidl.interfaceConverter(Response) webidl.converters['sequence'] = webidl.sequenceConverter( webidl.converters.RequestInfo ) module.exports = { Cache } /***/ }), /***/ 44738: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kConstruct } = __nccwpck_require__(80296) const { Cache } = __nccwpck_require__(50479) const { webidl } = __nccwpck_require__(74222) const { kEnumerableProperty } = __nccwpck_require__(3440) class CacheStorage { /** * @see https://w3c.github.io/ServiceWorker/#dfn-relevant-name-to-cache-map * @type {Map} */ async has (cacheName) { webidl.brandCheck(this, CacheStorage) webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.has' }) cacheName = webidl.converters.DOMString(cacheName) // 2.1.1 // 2.2 return this.#caches.has(cacheName) } /** * @see https://w3c.github.io/ServiceWorker/#dom-cachestorage-open * @param {string} cacheName * @returns {Promise} */ async open (cacheName) { webidl.brandCheck(this, CacheStorage) webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.open' }) cacheName = webidl.converters.DOMString(cacheName) // 2.1 if (this.#caches.has(cacheName)) { // await caches.open('v1') !== await caches.open('v1') // 2.1.1 const cache = this.#caches.get(cacheName) // 2.1.1.1 return new Cache(kConstruct, cache) } // 2.2 const cache = [] // 2.3 this.#caches.set(cacheName, cache) // 2.4 return new Cache(kConstruct, cache) } /** * @see https://w3c.github.io/ServiceWorker/#cache-storage-delete * @param {string} cacheName * @returns {Promise} */ async delete (cacheName) { webidl.brandCheck(this, CacheStorage) webidl.argumentLengthCheck(arguments, 1, { header: 'CacheStorage.delete' }) cacheName = webidl.converters.DOMString(cacheName) return this.#caches.delete(cacheName) } /** * @see https://w3c.github.io/ServiceWorker/#cache-storage-keys * @returns {string[]} */ async keys () { webidl.brandCheck(this, CacheStorage) // 2.1 const keys = this.#caches.keys() // 2.2 return [...keys] } } Object.defineProperties(CacheStorage.prototype, { [Symbol.toStringTag]: { value: 'CacheStorage', configurable: true }, match: kEnumerableProperty, has: kEnumerableProperty, open: kEnumerableProperty, delete: kEnumerableProperty, keys: kEnumerableProperty }) module.exports = { CacheStorage } /***/ }), /***/ 80296: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; module.exports = { kConstruct: (__nccwpck_require__(36443).kConstruct) } /***/ }), /***/ 23993: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const assert = __nccwpck_require__(42613) const { URLSerializer } = __nccwpck_require__(94322) const { isValidHeaderName } = __nccwpck_require__(15523) /** * @see https://url.spec.whatwg.org/#concept-url-equals * @param {URL} A * @param {URL} B * @param {boolean | undefined} excludeFragment * @returns {boolean} */ function urlEquals (A, B, excludeFragment = false) { const serializedA = URLSerializer(A, excludeFragment) const serializedB = URLSerializer(B, excludeFragment) return serializedA === serializedB } /** * @see https://github.com/chromium/chromium/blob/694d20d134cb553d8d89e5500b9148012b1ba299/content/browser/cache_storage/cache_storage_cache.cc#L260-L262 * @param {string} header */ function fieldValues (header) { assert(header !== null) const values = [] for (let value of header.split(',')) { value = value.trim() if (!value.length) { continue } else if (!isValidHeaderName(value)) { continue } values.push(value) } return values } module.exports = { urlEquals, fieldValues } /***/ }), /***/ 86197: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // @ts-check /* global WebAssembly */ const assert = __nccwpck_require__(42613) const net = __nccwpck_require__(69278) const http = __nccwpck_require__(58611) const { pipeline } = __nccwpck_require__(2203) const util = __nccwpck_require__(3440) const timers = __nccwpck_require__(28804) const Request = __nccwpck_require__(44655) const DispatcherBase = __nccwpck_require__(50001) const { RequestContentLengthMismatchError, ResponseContentLengthMismatchError, InvalidArgumentError, RequestAbortedError, HeadersTimeoutError, HeadersOverflowError, SocketError, InformationalError, BodyTimeoutError, HTTPParserError, ResponseExceededMaxSizeError, ClientDestroyedError } = __nccwpck_require__(68707) const buildConnector = __nccwpck_require__(59136) const { kUrl, kReset, kServerName, kClient, kBusy, kParser, kConnect, kBlocking, kResuming, kRunning, kPending, kSize, kWriting, kQueue, kConnected, kConnecting, kNeedDrain, kNoRef, kKeepAliveDefaultTimeout, kHostHeader, kPendingIdx, kRunningIdx, kError, kPipelining, kSocket, kKeepAliveTimeoutValue, kMaxHeadersSize, kKeepAliveMaxTimeout, kKeepAliveTimeoutThreshold, kHeadersTimeout, kBodyTimeout, kStrictContentLength, kConnector, kMaxRedirections, kMaxRequests, kCounter, kClose, kDestroy, kDispatch, kInterceptors, kLocalAddress, kMaxResponseSize, kHTTPConnVersion, // HTTP2 kHost, kHTTP2Session, kHTTP2SessionState, kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(36443) /** @type {import('http2')} */ let http2 try { http2 = __nccwpck_require__(85675) } catch { // @ts-ignore http2 = { constants: {} } } const { constants: { HTTP2_HEADER_AUTHORITY, HTTP2_HEADER_METHOD, HTTP2_HEADER_PATH, HTTP2_HEADER_SCHEME, HTTP2_HEADER_CONTENT_LENGTH, HTTP2_HEADER_EXPECT, HTTP2_HEADER_STATUS } } = http2 // Experimental let h2ExperimentalWarned = false const FastBuffer = Buffer[Symbol.species] const kClosedResolve = Symbol('kClosedResolve') const channels = {} try { const diagnosticsChannel = __nccwpck_require__(31637) channels.sendHeaders = diagnosticsChannel.channel('undici:client:sendHeaders') channels.beforeConnect = diagnosticsChannel.channel('undici:client:beforeConnect') channels.connectError = diagnosticsChannel.channel('undici:client:connectError') channels.connected = diagnosticsChannel.channel('undici:client:connected') } catch { channels.sendHeaders = { hasSubscribers: false } channels.beforeConnect = { hasSubscribers: false } channels.connectError = { hasSubscribers: false } channels.connected = { hasSubscribers: false } } /** * @type {import('../types/client').default} */ class Client extends DispatcherBase { /** * * @param {string|URL} url * @param {import('../types/client').Client.Options} options */ constructor (url, { interceptors, maxHeaderSize, headersTimeout, socketTimeout, requestTimeout, connectTimeout, bodyTimeout, idleTimeout, keepAlive, keepAliveTimeout, maxKeepAliveTimeout, keepAliveMaxTimeout, keepAliveTimeoutThreshold, socketPath, pipelining, tls, strictContentLength, maxCachedSessions, maxRedirections, connect, maxRequestsPerClient, localAddress, maxResponseSize, autoSelectFamily, autoSelectFamilyAttemptTimeout, // h2 allowH2, maxConcurrentStreams } = {}) { super() if (keepAlive !== undefined) { throw new InvalidArgumentError('unsupported keepAlive, use pipelining=0 instead') } if (socketTimeout !== undefined) { throw new InvalidArgumentError('unsupported socketTimeout, use headersTimeout & bodyTimeout instead') } if (requestTimeout !== undefined) { throw new InvalidArgumentError('unsupported requestTimeout, use headersTimeout & bodyTimeout instead') } if (idleTimeout !== undefined) { throw new InvalidArgumentError('unsupported idleTimeout, use keepAliveTimeout instead') } if (maxKeepAliveTimeout !== undefined) { throw new InvalidArgumentError('unsupported maxKeepAliveTimeout, use keepAliveMaxTimeout instead') } if (maxHeaderSize != null && !Number.isFinite(maxHeaderSize)) { throw new InvalidArgumentError('invalid maxHeaderSize') } if (socketPath != null && typeof socketPath !== 'string') { throw new InvalidArgumentError('invalid socketPath') } if (connectTimeout != null && (!Number.isFinite(connectTimeout) || connectTimeout < 0)) { throw new InvalidArgumentError('invalid connectTimeout') } if (keepAliveTimeout != null && (!Number.isFinite(keepAliveTimeout) || keepAliveTimeout <= 0)) { throw new InvalidArgumentError('invalid keepAliveTimeout') } if (keepAliveMaxTimeout != null && (!Number.isFinite(keepAliveMaxTimeout) || keepAliveMaxTimeout <= 0)) { throw new InvalidArgumentError('invalid keepAliveMaxTimeout') } if (keepAliveTimeoutThreshold != null && !Number.isFinite(keepAliveTimeoutThreshold)) { throw new InvalidArgumentError('invalid keepAliveTimeoutThreshold') } if (headersTimeout != null && (!Number.isInteger(headersTimeout) || headersTimeout < 0)) { throw new InvalidArgumentError('headersTimeout must be a positive integer or zero') } if (bodyTimeout != null && (!Number.isInteger(bodyTimeout) || bodyTimeout < 0)) { throw new InvalidArgumentError('bodyTimeout must be a positive integer or zero') } if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { throw new InvalidArgumentError('connect must be a function or an object') } if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { throw new InvalidArgumentError('maxRedirections must be a positive number') } if (maxRequestsPerClient != null && (!Number.isInteger(maxRequestsPerClient) || maxRequestsPerClient < 0)) { throw new InvalidArgumentError('maxRequestsPerClient must be a positive number') } if (localAddress != null && (typeof localAddress !== 'string' || net.isIP(localAddress) === 0)) { throw new InvalidArgumentError('localAddress must be valid string IP address') } if (maxResponseSize != null && (!Number.isInteger(maxResponseSize) || maxResponseSize < -1)) { throw new InvalidArgumentError('maxResponseSize must be a positive number') } if ( autoSelectFamilyAttemptTimeout != null && (!Number.isInteger(autoSelectFamilyAttemptTimeout) || autoSelectFamilyAttemptTimeout < -1) ) { throw new InvalidArgumentError('autoSelectFamilyAttemptTimeout must be a positive number') } // h2 if (allowH2 != null && typeof allowH2 !== 'boolean') { throw new InvalidArgumentError('allowH2 must be a valid boolean value') } if (maxConcurrentStreams != null && (typeof maxConcurrentStreams !== 'number' || maxConcurrentStreams < 1)) { throw new InvalidArgumentError('maxConcurrentStreams must be a possitive integer, greater than 0') } if (typeof connect !== 'function') { connect = buildConnector({ ...tls, maxCachedSessions, allowH2, socketPath, timeout: connectTimeout, ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), ...connect }) } this[kInterceptors] = interceptors && interceptors.Client && Array.isArray(interceptors.Client) ? interceptors.Client : [createRedirectInterceptor({ maxRedirections })] this[kUrl] = util.parseOrigin(url) this[kConnector] = connect this[kSocket] = null this[kPipelining] = pipelining != null ? pipelining : 1 this[kMaxHeadersSize] = maxHeaderSize || http.maxHeaderSize this[kKeepAliveDefaultTimeout] = keepAliveTimeout == null ? 4e3 : keepAliveTimeout this[kKeepAliveMaxTimeout] = keepAliveMaxTimeout == null ? 600e3 : keepAliveMaxTimeout this[kKeepAliveTimeoutThreshold] = keepAliveTimeoutThreshold == null ? 1e3 : keepAliveTimeoutThreshold this[kKeepAliveTimeoutValue] = this[kKeepAliveDefaultTimeout] this[kServerName] = null this[kLocalAddress] = localAddress != null ? localAddress : null this[kResuming] = 0 // 0, idle, 1, scheduled, 2 resuming this[kNeedDrain] = 0 // 0, idle, 1, scheduled, 2 resuming this[kHostHeader] = `host: ${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}\r\n` this[kBodyTimeout] = bodyTimeout != null ? bodyTimeout : 300e3 this[kHeadersTimeout] = headersTimeout != null ? headersTimeout : 300e3 this[kStrictContentLength] = strictContentLength == null ? true : strictContentLength this[kMaxRedirections] = maxRedirections this[kMaxRequests] = maxRequestsPerClient this[kClosedResolve] = null this[kMaxResponseSize] = maxResponseSize > -1 ? maxResponseSize : -1 this[kHTTPConnVersion] = 'h1' // HTTP/2 this[kHTTP2Session] = null this[kHTTP2SessionState] = !allowH2 ? null : { // streams: null, // Fixed queue of streams - For future support of `push` openStreams: 0, // Keep track of them to decide wether or not unref the session maxConcurrentStreams: maxConcurrentStreams != null ? maxConcurrentStreams : 100 // Max peerConcurrentStreams for a Node h2 server } this[kHost] = `${this[kUrl].hostname}${this[kUrl].port ? `:${this[kUrl].port}` : ''}` // kQueue is built up of 3 sections separated by // the kRunningIdx and kPendingIdx indices. // | complete | running | pending | // ^ kRunningIdx ^ kPendingIdx ^ kQueue.length // kRunningIdx points to the first running element. // kPendingIdx points to the first pending element. // This implements a fast queue with an amortized // time of O(1). this[kQueue] = [] this[kRunningIdx] = 0 this[kPendingIdx] = 0 } get pipelining () { return this[kPipelining] } set pipelining (value) { this[kPipelining] = value resume(this, true) } get [kPending] () { return this[kQueue].length - this[kPendingIdx] } get [kRunning] () { return this[kPendingIdx] - this[kRunningIdx] } get [kSize] () { return this[kQueue].length - this[kRunningIdx] } get [kConnected] () { return !!this[kSocket] && !this[kConnecting] && !this[kSocket].destroyed } get [kBusy] () { const socket = this[kSocket] return ( (socket && (socket[kReset] || socket[kWriting] || socket[kBlocking])) || (this[kSize] >= (this[kPipelining] || 1)) || this[kPending] > 0 ) } /* istanbul ignore: only used for test */ [kConnect] (cb) { connect(this) this.once('connect', cb) } [kDispatch] (opts, handler) { const origin = opts.origin || this[kUrl].origin const request = this[kHTTPConnVersion] === 'h2' ? Request[kHTTP2BuildRequest](origin, opts, handler) : Request[kHTTP1BuildRequest](origin, opts, handler) this[kQueue].push(request) if (this[kResuming]) { // Do nothing. } else if (util.bodyLength(request.body) == null && util.isIterable(request.body)) { // Wait a tick in case stream/iterator is ended in the same tick. this[kResuming] = 1 process.nextTick(resume, this) } else { resume(this, true) } if (this[kResuming] && this[kNeedDrain] !== 2 && this[kBusy]) { this[kNeedDrain] = 2 } return this[kNeedDrain] < 2 } async [kClose] () { // TODO: for H2 we need to gracefully flush the remaining enqueued // request and close each stream. return new Promise((resolve) => { if (!this[kSize]) { resolve(null) } else { this[kClosedResolve] = resolve } }) } async [kDestroy] (err) { return new Promise((resolve) => { const requests = this[kQueue].splice(this[kPendingIdx]) for (let i = 0; i < requests.length; i++) { const request = requests[i] errorRequest(this, request, err) } const callback = () => { if (this[kClosedResolve]) { // TODO (fix): Should we error here with ClientDestroyedError? this[kClosedResolve]() this[kClosedResolve] = null } resolve() } if (this[kHTTP2Session] != null) { util.destroy(this[kHTTP2Session], err) this[kHTTP2Session] = null this[kHTTP2SessionState] = null } if (!this[kSocket]) { queueMicrotask(callback) } else { util.destroy(this[kSocket].on('close', callback), err) } resume(this) }) } } function onHttp2SessionError (err) { assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') this[kSocket][kError] = err onError(this[kClient], err) } function onHttp2FrameError (type, code, id) { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) if (id === 0) { this[kSocket][kError] = err onError(this[kClient], err) } } function onHttp2SessionEnd () { util.destroy(this, new SocketError('other side closed')) util.destroy(this[kSocket], new SocketError('other side closed')) } function onHTTP2GoAway (code) { const client = this[kClient] const err = new InformationalError(`HTTP/2: "GOAWAY" frame received with code ${code}`) client[kSocket] = null client[kHTTP2Session] = null if (client.destroyed) { assert(this[kPending] === 0) // Fail entire queue. const requests = client[kQueue].splice(client[kRunningIdx]) for (let i = 0; i < requests.length; i++) { const request = requests[i] errorRequest(this, request, err) } } else if (client[kRunning] > 0) { // Fail head of pipeline. const request = client[kQueue][client[kRunningIdx]] client[kQueue][client[kRunningIdx]++] = null errorRequest(client, request, err) } client[kPendingIdx] = client[kRunningIdx] assert(client[kRunning] === 0) client.emit('disconnect', client[kUrl], [client], err ) resume(client) } const constants = __nccwpck_require__(52824) const createRedirectInterceptor = __nccwpck_require__(64415) const EMPTY_BUF = Buffer.alloc(0) async function lazyllhttp () { const llhttpWasmData = process.env.JEST_WORKER_ID ? __nccwpck_require__(63870) : undefined let mod try { mod = await WebAssembly.compile(Buffer.from(__nccwpck_require__(53434), 'base64')) } catch (e) { /* istanbul ignore next */ // We could check if the error was caused by the simd option not // being enabled, but the occurring of this other error // * https://github.com/emscripten-core/emscripten/issues/11495 // got me to remove that check to avoid breaking Node 12. mod = await WebAssembly.compile(Buffer.from(llhttpWasmData || __nccwpck_require__(63870), 'base64')) } return await WebAssembly.instantiate(mod, { env: { /* eslint-disable camelcase */ wasm_on_url: (p, at, len) => { /* istanbul ignore next */ return 0 }, wasm_on_status: (p, at, len) => { assert.strictEqual(currentParser.ptr, p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_message_begin: (p) => { assert.strictEqual(currentParser.ptr, p) return currentParser.onMessageBegin() || 0 }, wasm_on_header_field: (p, at, len) => { assert.strictEqual(currentParser.ptr, p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_header_value: (p, at, len) => { assert.strictEqual(currentParser.ptr, p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { assert.strictEqual(currentParser.ptr, p) return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 }, wasm_on_body: (p, at, len) => { assert.strictEqual(currentParser.ptr, p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_message_complete: (p) => { assert.strictEqual(currentParser.ptr, p) return currentParser.onMessageComplete() || 0 } /* eslint-enable camelcase */ } }) } let llhttpInstance = null let llhttpPromise = lazyllhttp() llhttpPromise.catch() let currentParser = null let currentBufferRef = null let currentBufferSize = 0 let currentBufferPtr = null const TIMEOUT_HEADERS = 1 const TIMEOUT_BODY = 2 const TIMEOUT_IDLE = 3 class Parser { constructor (client, socket, { exports }) { assert(Number.isFinite(client[kMaxHeadersSize]) && client[kMaxHeadersSize] > 0) this.llhttp = exports this.ptr = this.llhttp.llhttp_alloc(constants.TYPE.RESPONSE) this.client = client this.socket = socket this.timeout = null this.timeoutValue = null this.timeoutType = null this.statusCode = null this.statusText = '' this.upgrade = false this.headers = [] this.headersSize = 0 this.headersMaxSize = client[kMaxHeadersSize] this.shouldKeepAlive = false this.paused = false this.resume = this.resume.bind(this) this.bytesRead = 0 this.keepAlive = '' this.contentLength = '' this.connection = '' this.maxResponseSize = client[kMaxResponseSize] } setTimeout (value, type) { this.timeoutType = type if (value !== this.timeoutValue) { timers.clearTimeout(this.timeout) if (value) { this.timeout = timers.setTimeout(onParserTimeout, value, this) // istanbul ignore else: only for jest if (this.timeout.unref) { this.timeout.unref() } } else { this.timeout = null } this.timeoutValue = value } else if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { this.timeout.refresh() } } } resume () { if (this.socket.destroyed || !this.paused) { return } assert(this.ptr != null) assert(currentParser == null) this.llhttp.llhttp_resume(this.ptr) assert(this.timeoutType === TIMEOUT_BODY) if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { this.timeout.refresh() } } this.paused = false this.execute(this.socket.read() || EMPTY_BUF) // Flush parser. this.readMore() } readMore () { while (!this.paused && this.ptr) { const chunk = this.socket.read() if (chunk === null) { break } this.execute(chunk) } } execute (data) { assert(this.ptr != null) assert(currentParser == null) assert(!this.paused) const { socket, llhttp } = this if (data.length > currentBufferSize) { if (currentBufferPtr) { llhttp.free(currentBufferPtr) } currentBufferSize = Math.ceil(data.length / 4096) * 4096 currentBufferPtr = llhttp.malloc(currentBufferSize) } new Uint8Array(llhttp.memory.buffer, currentBufferPtr, currentBufferSize).set(data) // Call `execute` on the wasm parser. // We pass the `llhttp_parser` pointer address, the pointer address of buffer view data, // and finally the length of bytes to parse. // The return value is an error code or `constants.ERROR.OK`. try { let ret try { currentBufferRef = data currentParser = this ret = llhttp.llhttp_execute(this.ptr, currentBufferPtr, data.length) /* eslint-disable-next-line no-useless-catch */ } catch (err) { /* istanbul ignore next: difficult to make a test case for */ throw err } finally { currentParser = null currentBufferRef = null } const offset = llhttp.llhttp_get_error_pos(this.ptr) - currentBufferPtr if (ret === constants.ERROR.PAUSED_UPGRADE) { this.onUpgrade(data.slice(offset)) } else if (ret === constants.ERROR.PAUSED) { this.paused = true socket.unshift(data.slice(offset)) } else if (ret !== constants.ERROR.OK) { const ptr = llhttp.llhttp_get_error_reason(this.ptr) let message = '' /* istanbul ignore else: difficult to make a test case for */ if (ptr) { const len = new Uint8Array(llhttp.memory.buffer, ptr).indexOf(0) message = 'Response does not match the HTTP/1.1 protocol (' + Buffer.from(llhttp.memory.buffer, ptr, len).toString() + ')' } throw new HTTPParserError(message, constants.ERROR[ret], data.slice(offset)) } } catch (err) { util.destroy(socket, err) } } destroy () { assert(this.ptr != null) assert(currentParser == null) this.llhttp.llhttp_free(this.ptr) this.ptr = null timers.clearTimeout(this.timeout) this.timeout = null this.timeoutValue = null this.timeoutType = null this.paused = false } onStatus (buf) { this.statusText = buf.toString() } onMessageBegin () { const { socket, client } = this /* istanbul ignore next: difficult to make a test case for */ if (socket.destroyed) { return -1 } const request = client[kQueue][client[kRunningIdx]] if (!request) { return -1 } } onHeaderField (buf) { const len = this.headers.length if ((len & 1) === 0) { this.headers.push(buf) } else { this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) } this.trackHeader(buf.length) } onHeaderValue (buf) { let len = this.headers.length if ((len & 1) === 1) { this.headers.push(buf) len += 1 } else { this.headers[len - 1] = Buffer.concat([this.headers[len - 1], buf]) } const key = this.headers[len - 2] if (key.length === 10 && key.toString().toLowerCase() === 'keep-alive') { this.keepAlive += buf.toString() } else if (key.length === 10 && key.toString().toLowerCase() === 'connection') { this.connection += buf.toString() } else if (key.length === 14 && key.toString().toLowerCase() === 'content-length') { this.contentLength += buf.toString() } this.trackHeader(buf.length) } trackHeader (len) { this.headersSize += len if (this.headersSize >= this.headersMaxSize) { util.destroy(this.socket, new HeadersOverflowError()) } } onUpgrade (head) { const { upgrade, client, socket, headers, statusCode } = this assert(upgrade) const request = client[kQueue][client[kRunningIdx]] assert(request) assert(!socket.destroyed) assert(socket === client[kSocket]) assert(!this.paused) assert(request.upgrade || request.method === 'CONNECT') this.statusCode = null this.statusText = '' this.shouldKeepAlive = null assert(this.headers.length % 2 === 0) this.headers = [] this.headersSize = 0 socket.unshift(head) socket[kParser].destroy() socket[kParser] = null socket[kClient] = null socket[kError] = null socket .removeListener('error', onSocketError) .removeListener('readable', onSocketReadable) .removeListener('end', onSocketEnd) .removeListener('close', onSocketClose) client[kSocket] = null client[kQueue][client[kRunningIdx]++] = null client.emit('disconnect', client[kUrl], [client], new InformationalError('upgrade')) try { request.onUpgrade(statusCode, headers, socket) } catch (err) { util.destroy(socket, err) } resume(client) } onHeadersComplete (statusCode, upgrade, shouldKeepAlive) { const { client, socket, headers, statusText } = this /* istanbul ignore next: difficult to make a test case for */ if (socket.destroyed) { return -1 } const request = client[kQueue][client[kRunningIdx]] /* istanbul ignore next: difficult to make a test case for */ if (!request) { return -1 } assert(!this.upgrade) assert(this.statusCode < 200) if (statusCode === 100) { util.destroy(socket, new SocketError('bad response', util.getSocketInfo(socket))) return -1 } /* this can only happen if server is misbehaving */ if (upgrade && !request.upgrade) { util.destroy(socket, new SocketError('bad upgrade', util.getSocketInfo(socket))) return -1 } assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) this.statusCode = statusCode this.shouldKeepAlive = ( shouldKeepAlive || // Override llhttp value which does not allow keepAlive for HEAD. (request.method === 'HEAD' && !socket[kReset] && this.connection.toLowerCase() === 'keep-alive') ) if (this.statusCode >= 200) { const bodyTimeout = request.bodyTimeout != null ? request.bodyTimeout : client[kBodyTimeout] this.setTimeout(bodyTimeout, TIMEOUT_BODY) } else if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { this.timeout.refresh() } } if (request.method === 'CONNECT') { assert(client[kRunning] === 1) this.upgrade = true return 2 } if (upgrade) { assert(client[kRunning] === 1) this.upgrade = true return 2 } assert(this.headers.length % 2 === 0) this.headers = [] this.headersSize = 0 if (this.shouldKeepAlive && client[kPipelining]) { const keepAliveTimeout = this.keepAlive ? util.parseKeepAliveTimeout(this.keepAlive) : null if (keepAliveTimeout != null) { const timeout = Math.min( keepAliveTimeout - client[kKeepAliveTimeoutThreshold], client[kKeepAliveMaxTimeout] ) if (timeout <= 0) { socket[kReset] = true } else { client[kKeepAliveTimeoutValue] = timeout } } else { client[kKeepAliveTimeoutValue] = client[kKeepAliveDefaultTimeout] } } else { // Stop more requests from being dispatched. socket[kReset] = true } const pause = request.onHeaders(statusCode, headers, this.resume, statusText) === false if (request.aborted) { return -1 } if (request.method === 'HEAD') { return 1 } if (statusCode < 200) { return 1 } if (socket[kBlocking]) { socket[kBlocking] = false resume(client) } return pause ? constants.ERROR.PAUSED : 0 } onBody (buf) { const { client, socket, statusCode, maxResponseSize } = this if (socket.destroyed) { return -1 } const request = client[kQueue][client[kRunningIdx]] assert(request) assert.strictEqual(this.timeoutType, TIMEOUT_BODY) if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { this.timeout.refresh() } } assert(statusCode >= 200) if (maxResponseSize > -1 && this.bytesRead + buf.length > maxResponseSize) { util.destroy(socket, new ResponseExceededMaxSizeError()) return -1 } this.bytesRead += buf.length if (request.onData(buf) === false) { return constants.ERROR.PAUSED } } onMessageComplete () { const { client, socket, statusCode, upgrade, headers, contentLength, bytesRead, shouldKeepAlive } = this if (socket.destroyed && (!statusCode || shouldKeepAlive)) { return -1 } if (upgrade) { return } const request = client[kQueue][client[kRunningIdx]] assert(request) assert(statusCode >= 100) this.statusCode = null this.statusText = '' this.bytesRead = 0 this.contentLength = '' this.keepAlive = '' this.connection = '' assert(this.headers.length % 2 === 0) this.headers = [] this.headersSize = 0 if (statusCode < 200) { return } /* istanbul ignore next: should be handled by llhttp? */ if (request.method !== 'HEAD' && contentLength && bytesRead !== parseInt(contentLength, 10)) { util.destroy(socket, new ResponseContentLengthMismatchError()) return -1 } request.onComplete(headers) client[kQueue][client[kRunningIdx]++] = null if (socket[kWriting]) { assert.strictEqual(client[kRunning], 0) // Response completed before request. util.destroy(socket, new InformationalError('reset')) return constants.ERROR.PAUSED } else if (!shouldKeepAlive) { util.destroy(socket, new InformationalError('reset')) return constants.ERROR.PAUSED } else if (socket[kReset] && client[kRunning] === 0) { // Destroy socket once all requests have completed. // The request at the tail of the pipeline is the one // that requested reset and no further requests should // have been queued since then. util.destroy(socket, new InformationalError('reset')) return constants.ERROR.PAUSED } else if (client[kPipelining] === 1) { // We must wait a full event loop cycle to reuse this socket to make sure // that non-spec compliant servers are not closing the connection even if they // said they won't. setImmediate(resume, client) } else { resume(client) } } } function onParserTimeout (parser) { const { socket, timeoutType, client } = parser /* istanbul ignore else */ if (timeoutType === TIMEOUT_HEADERS) { if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { assert(!parser.paused, 'cannot be paused while waiting for headers') util.destroy(socket, new HeadersTimeoutError()) } } else if (timeoutType === TIMEOUT_BODY) { if (!parser.paused) { util.destroy(socket, new BodyTimeoutError()) } } else if (timeoutType === TIMEOUT_IDLE) { assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) util.destroy(socket, new InformationalError('socket idle timeout')) } } function onSocketReadable () { const { [kParser]: parser } = this if (parser) { parser.readMore() } } function onSocketError (err) { const { [kClient]: client, [kParser]: parser } = this assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') if (client[kHTTPConnVersion] !== 'h2') { // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded // to the user. if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { // We treat all incoming data so for as a valid response. parser.onMessageComplete() return } } this[kError] = err onError(this[kClient], err) } function onError (client, err) { if ( client[kRunning] === 0 && err.code !== 'UND_ERR_INFO' && err.code !== 'UND_ERR_SOCKET' ) { // Error is not caused by running request and not a recoverable // socket error. assert(client[kPendingIdx] === client[kRunningIdx]) const requests = client[kQueue].splice(client[kRunningIdx]) for (let i = 0; i < requests.length; i++) { const request = requests[i] errorRequest(client, request, err) } assert(client[kSize] === 0) } } function onSocketEnd () { const { [kParser]: parser, [kClient]: client } = this if (client[kHTTPConnVersion] !== 'h2') { if (parser.statusCode && !parser.shouldKeepAlive) { // We treat all incoming data so far as a valid response. parser.onMessageComplete() return } } util.destroy(this, new SocketError('other side closed', util.getSocketInfo(this))) } function onSocketClose () { const { [kClient]: client, [kParser]: parser } = this if (client[kHTTPConnVersion] === 'h1' && parser) { if (!this[kError] && parser.statusCode && !parser.shouldKeepAlive) { // We treat all incoming data so far as a valid response. parser.onMessageComplete() } this[kParser].destroy() this[kParser] = null } const err = this[kError] || new SocketError('closed', util.getSocketInfo(this)) client[kSocket] = null if (client.destroyed) { assert(client[kPending] === 0) // Fail entire queue. const requests = client[kQueue].splice(client[kRunningIdx]) for (let i = 0; i < requests.length; i++) { const request = requests[i] errorRequest(client, request, err) } } else if (client[kRunning] > 0 && err.code !== 'UND_ERR_INFO') { // Fail head of pipeline. const request = client[kQueue][client[kRunningIdx]] client[kQueue][client[kRunningIdx]++] = null errorRequest(client, request, err) } client[kPendingIdx] = client[kRunningIdx] assert(client[kRunning] === 0) client.emit('disconnect', client[kUrl], [client], err) resume(client) } async function connect (client) { assert(!client[kConnecting]) assert(!client[kSocket]) let { host, hostname, protocol, port } = client[kUrl] // Resolve ipv6 if (hostname[0] === '[') { const idx = hostname.indexOf(']') assert(idx !== -1) const ip = hostname.substring(1, idx) assert(net.isIP(ip)) hostname = ip } client[kConnecting] = true if (channels.beforeConnect.hasSubscribers) { channels.beforeConnect.publish({ connectParams: { host, hostname, protocol, port, servername: client[kServerName], localAddress: client[kLocalAddress] }, connector: client[kConnector] }) } try { const socket = await new Promise((resolve, reject) => { client[kConnector]({ host, hostname, protocol, port, servername: client[kServerName], localAddress: client[kLocalAddress] }, (err, socket) => { if (err) { reject(err) } else { resolve(socket) } }) }) if (client.destroyed) { util.destroy(socket.on('error', () => {}), new ClientDestroyedError()) return } client[kConnecting] = false assert(socket) const isH2 = socket.alpnProtocol === 'h2' if (isH2) { if (!h2ExperimentalWarned) { h2ExperimentalWarned = true process.emitWarning('H2 support is experimental, expect them to change at any time.', { code: 'UNDICI-H2' }) } const session = http2.connect(client[kUrl], { createConnection: () => socket, peerMaxConcurrentStreams: client[kHTTP2SessionState].maxConcurrentStreams }) client[kHTTPConnVersion] = 'h2' session[kClient] = client session[kSocket] = socket session.on('error', onHttp2SessionError) session.on('frameError', onHttp2FrameError) session.on('end', onHttp2SessionEnd) session.on('goaway', onHTTP2GoAway) session.on('close', onSocketClose) session.unref() client[kHTTP2Session] = session socket[kHTTP2Session] = session } else { if (!llhttpInstance) { llhttpInstance = await llhttpPromise llhttpPromise = null } socket[kNoRef] = false socket[kWriting] = false socket[kReset] = false socket[kBlocking] = false socket[kParser] = new Parser(client, socket, llhttpInstance) } socket[kCounter] = 0 socket[kMaxRequests] = client[kMaxRequests] socket[kClient] = client socket[kError] = null socket .on('error', onSocketError) .on('readable', onSocketReadable) .on('end', onSocketEnd) .on('close', onSocketClose) client[kSocket] = socket if (channels.connected.hasSubscribers) { channels.connected.publish({ connectParams: { host, hostname, protocol, port, servername: client[kServerName], localAddress: client[kLocalAddress] }, connector: client[kConnector], socket }) } client.emit('connect', client[kUrl], [client]) } catch (err) { if (client.destroyed) { return } client[kConnecting] = false if (channels.connectError.hasSubscribers) { channels.connectError.publish({ connectParams: { host, hostname, protocol, port, servername: client[kServerName], localAddress: client[kLocalAddress] }, connector: client[kConnector], error: err }) } if (err.code === 'ERR_TLS_CERT_ALTNAME_INVALID') { assert(client[kRunning] === 0) while (client[kPending] > 0 && client[kQueue][client[kPendingIdx]].servername === client[kServerName]) { const request = client[kQueue][client[kPendingIdx]++] errorRequest(client, request, err) } } else { onError(client, err) } client.emit('connectionError', client[kUrl], [client], err) } resume(client) } function emitDrain (client) { client[kNeedDrain] = 0 client.emit('drain', client[kUrl], [client]) } function resume (client, sync) { if (client[kResuming] === 2) { return } client[kResuming] = 2 _resume(client, sync) client[kResuming] = 0 if (client[kRunningIdx] > 256) { client[kQueue].splice(0, client[kRunningIdx]) client[kPendingIdx] -= client[kRunningIdx] client[kRunningIdx] = 0 } } function _resume (client, sync) { while (true) { if (client.destroyed) { assert(client[kPending] === 0) return } if (client[kClosedResolve] && !client[kSize]) { client[kClosedResolve]() client[kClosedResolve] = null return } const socket = client[kSocket] if (socket && !socket.destroyed && socket.alpnProtocol !== 'h2') { if (client[kSize] === 0) { if (!socket[kNoRef] && socket.unref) { socket.unref() socket[kNoRef] = true } } else if (socket[kNoRef] && socket.ref) { socket.ref() socket[kNoRef] = false } if (client[kSize] === 0) { if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { const request = client[kQueue][client[kRunningIdx]] const headersTimeout = request.headersTimeout != null ? request.headersTimeout : client[kHeadersTimeout] socket[kParser].setTimeout(headersTimeout, TIMEOUT_HEADERS) } } } if (client[kBusy]) { client[kNeedDrain] = 2 } else if (client[kNeedDrain] === 2) { if (sync) { client[kNeedDrain] = 1 process.nextTick(emitDrain, client) } else { emitDrain(client) } continue } if (client[kPending] === 0) { return } if (client[kRunning] >= (client[kPipelining] || 1)) { return } const request = client[kQueue][client[kPendingIdx]] if (client[kUrl].protocol === 'https:' && client[kServerName] !== request.servername) { if (client[kRunning] > 0) { return } client[kServerName] = request.servername if (socket && socket.servername !== request.servername) { util.destroy(socket, new InformationalError('servername changed')) return } } if (client[kConnecting]) { return } if (!socket && !client[kHTTP2Session]) { connect(client) return } if (socket.destroyed || socket[kWriting] || socket[kReset] || socket[kBlocking]) { return } if (client[kRunning] > 0 && !request.idempotent) { // Non-idempotent request cannot be retried. // Ensure that no other requests are inflight and // could cause failure. return } if (client[kRunning] > 0 && (request.upgrade || request.method === 'CONNECT')) { // Don't dispatch an upgrade until all preceding requests have completed. // A misbehaving server might upgrade the connection before all pipelined // request has completed. return } if (client[kRunning] > 0 && util.bodyLength(request.body) !== 0 && (util.isStream(request.body) || util.isAsyncIterable(request.body))) { // Request with stream or iterator body can error while other requests // are inflight and indirectly error those as well. // Ensure this doesn't happen by waiting for inflight // to complete before dispatching. // Request with stream or iterator body cannot be retried. // Ensure that no other requests are inflight and // could cause failure. return } if (!request.aborted && write(client, request)) { client[kPendingIdx]++ } else { client[kQueue].splice(client[kPendingIdx], 1) } } } // https://www.rfc-editor.org/rfc/rfc7230#section-3.3.2 function shouldSendContentLength (method) { return method !== 'GET' && method !== 'HEAD' && method !== 'OPTIONS' && method !== 'TRACE' && method !== 'CONNECT' } function write (client, request) { if (client[kHTTPConnVersion] === 'h2') { writeH2(client, client[kHTTP2Session], request) return } const { body, method, path, host, upgrade, headers, blocking, reset } = request // https://tools.ietf.org/html/rfc7231#section-4.3.1 // https://tools.ietf.org/html/rfc7231#section-4.3.2 // https://tools.ietf.org/html/rfc7231#section-4.3.5 // Sending a payload body on a request that does not // expect it can cause undefined behavior on some // servers and corrupt connection state. Do not // re-use the connection for further requests. const expectsPayload = ( method === 'PUT' || method === 'POST' || method === 'PATCH' ) if (body && typeof body.read === 'function') { // Try to read EOF in order to get length. body.read(0) } const bodyLength = util.bodyLength(body) let contentLength = bodyLength if (contentLength === null) { contentLength = request.contentLength } if (contentLength === 0 && !expectsPayload) { // https://tools.ietf.org/html/rfc7230#section-3.3.2 // A user agent SHOULD NOT send a Content-Length header field when // the request message does not contain a payload body and the method // semantics do not anticipate such a body. contentLength = null } // https://github.com/nodejs/undici/issues/2046 // A user agent may send a Content-Length header with 0 value, this should be allowed. if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength !== null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { errorRequest(client, request, new RequestContentLengthMismatchError()) return false } process.emitWarning(new RequestContentLengthMismatchError()) } const socket = client[kSocket] try { request.onConnect((err) => { if (request.aborted || request.completed) { return } errorRequest(client, request, err || new RequestAbortedError()) util.destroy(socket, new InformationalError('aborted')) }) } catch (err) { errorRequest(client, request, err) } if (request.aborted) { return false } if (method === 'HEAD') { // https://github.com/mcollina/undici/issues/258 // Close after a HEAD request to interop with misbehaving servers // that may send a body in the response. socket[kReset] = true } if (upgrade || method === 'CONNECT') { // On CONNECT or upgrade, block pipeline from dispatching further // requests on this connection. socket[kReset] = true } if (reset != null) { socket[kReset] = reset } if (client[kMaxRequests] && socket[kCounter]++ >= client[kMaxRequests]) { socket[kReset] = true } if (blocking) { socket[kBlocking] = true } let header = `${method} ${path} HTTP/1.1\r\n` if (typeof host === 'string') { header += `host: ${host}\r\n` } else { header += client[kHostHeader] } if (upgrade) { header += `connection: upgrade\r\nupgrade: ${upgrade}\r\n` } else if (client[kPipelining] && !socket[kReset]) { header += 'connection: keep-alive\r\n' } else { header += 'connection: close\r\n' } if (headers) { header += headers } if (channels.sendHeaders.hasSubscribers) { channels.sendHeaders.publish({ request, headers: header, socket }) } /* istanbul ignore else: assertion */ if (!body || bodyLength === 0) { if (contentLength === 0) { socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') } else { assert(contentLength === null, 'no body must not have content length') socket.write(`${header}\r\n`, 'latin1') } request.onRequestSent() } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, 'buffer body must have content length') socket.cork() socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') socket.write(body) socket.uncork() request.onBodySent(body) request.onRequestSent() if (!expectsPayload) { socket[kReset] = true } } else if (util.isBlobLike(body)) { if (typeof body.stream === 'function') { writeIterable({ body: body.stream(), client, request, socket, contentLength, header, expectsPayload }) } else { writeBlob({ body, client, request, socket, contentLength, header, expectsPayload }) } } else if (util.isStream(body)) { writeStream({ body, client, request, socket, contentLength, header, expectsPayload }) } else if (util.isIterable(body)) { writeIterable({ body, client, request, socket, contentLength, header, expectsPayload }) } else { assert(false) } return true } function writeH2 (client, session, request) { const { body, method, path, host, upgrade, expectContinue, signal, headers: reqHeaders } = request let headers if (typeof reqHeaders === 'string') headers = Request[kHTTP2CopyHeaders](reqHeaders.trim()) else headers = reqHeaders if (upgrade) { errorRequest(client, request, new Error('Upgrade not supported for H2')) return false } try { // TODO(HTTP/2): Should we call onConnect immediately or on stream ready event? request.onConnect((err) => { if (request.aborted || request.completed) { return } errorRequest(client, request, err || new RequestAbortedError()) }) } catch (err) { errorRequest(client, request, err) } if (request.aborted) { return false } /** @type {import('node:http2').ClientHttp2Stream} */ let stream const h2State = client[kHTTP2SessionState] headers[HTTP2_HEADER_AUTHORITY] = host || client[kHost] headers[HTTP2_HEADER_METHOD] = method if (method === 'CONNECT') { session.ref() // we are already connected, streams are pending, first request // will create a new stream. We trigger a request to create the stream and wait until // `ready` event is triggered // We disabled endStream to allow the user to write to the stream stream = session.request(headers, { endStream: false, signal }) if (stream.id && !stream.pending) { request.onUpgrade(null, null, stream) ++h2State.openStreams } else { stream.once('ready', () => { request.onUpgrade(null, null, stream) ++h2State.openStreams }) } stream.once('close', () => { h2State.openStreams -= 1 // TODO(HTTP/2): unref only if current streams count is 0 if (h2State.openStreams === 0) session.unref() }) return true } // https://tools.ietf.org/html/rfc7540#section-8.3 // :path and :scheme headers must be omited when sending CONNECT headers[HTTP2_HEADER_PATH] = path headers[HTTP2_HEADER_SCHEME] = 'https' // https://tools.ietf.org/html/rfc7231#section-4.3.1 // https://tools.ietf.org/html/rfc7231#section-4.3.2 // https://tools.ietf.org/html/rfc7231#section-4.3.5 // Sending a payload body on a request that does not // expect it can cause undefined behavior on some // servers and corrupt connection state. Do not // re-use the connection for further requests. const expectsPayload = ( method === 'PUT' || method === 'POST' || method === 'PATCH' ) if (body && typeof body.read === 'function') { // Try to read EOF in order to get length. body.read(0) } let contentLength = util.bodyLength(body) if (contentLength == null) { contentLength = request.contentLength } if (contentLength === 0 || !expectsPayload) { // https://tools.ietf.org/html/rfc7230#section-3.3.2 // A user agent SHOULD NOT send a Content-Length header field when // the request message does not contain a payload body and the method // semantics do not anticipate such a body. contentLength = null } // https://github.com/nodejs/undici/issues/2046 // A user agent may send a Content-Length header with 0 value, this should be allowed. if (shouldSendContentLength(method) && contentLength > 0 && request.contentLength != null && request.contentLength !== contentLength) { if (client[kStrictContentLength]) { errorRequest(client, request, new RequestContentLengthMismatchError()) return false } process.emitWarning(new RequestContentLengthMismatchError()) } if (contentLength != null) { assert(body, 'no body must not have content length') headers[HTTP2_HEADER_CONTENT_LENGTH] = `${contentLength}` } session.ref() const shouldEndStream = method === 'GET' || method === 'HEAD' if (expectContinue) { headers[HTTP2_HEADER_EXPECT] = '100-continue' stream = session.request(headers, { endStream: shouldEndStream, signal }) stream.once('continue', writeBodyH2) } else { stream = session.request(headers, { endStream: shouldEndStream, signal }) writeBodyH2() } // Increment counter as we have new several streams open ++h2State.openStreams stream.once('response', headers => { const { [HTTP2_HEADER_STATUS]: statusCode, ...realHeaders } = headers if (request.onHeaders(Number(statusCode), realHeaders, stream.resume.bind(stream), '') === false) { stream.pause() } }) stream.once('end', () => { request.onComplete([]) }) stream.on('data', (chunk) => { if (request.onData(chunk) === false) { stream.pause() } }) stream.once('close', () => { h2State.openStreams -= 1 // TODO(HTTP/2): unref only if current streams count is 0 if (h2State.openStreams === 0) { session.unref() } }) stream.once('error', function (err) { if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1 util.destroy(stream, err) } }) stream.once('frameError', (type, code) => { const err = new InformationalError(`HTTP/2: "frameError" received - type ${type}, code ${code}`) errorRequest(client, request, err) if (client[kHTTP2Session] && !client[kHTTP2Session].destroyed && !this.closed && !this.destroyed) { h2State.streams -= 1 util.destroy(stream, err) } }) // stream.on('aborted', () => { // // TODO(HTTP/2): Support aborted // }) // stream.on('timeout', () => { // // TODO(HTTP/2): Support timeout // }) // stream.on('push', headers => { // // TODO(HTTP/2): Suppor push // }) // stream.on('trailers', headers => { // // TODO(HTTP/2): Support trailers // }) return true function writeBodyH2 () { /* istanbul ignore else: assertion */ if (!body) { request.onRequestSent() } else if (util.isBuffer(body)) { assert(contentLength === body.byteLength, 'buffer body must have content length') stream.cork() stream.write(body) stream.uncork() stream.end() request.onBodySent(body) request.onRequestSent() } else if (util.isBlobLike(body)) { if (typeof body.stream === 'function') { writeIterable({ client, request, contentLength, h2stream: stream, expectsPayload, body: body.stream(), socket: client[kSocket], header: '' }) } else { writeBlob({ body, client, request, contentLength, expectsPayload, h2stream: stream, header: '', socket: client[kSocket] }) } } else if (util.isStream(body)) { writeStream({ body, client, request, contentLength, expectsPayload, socket: client[kSocket], h2stream: stream, header: '' }) } else if (util.isIterable(body)) { writeIterable({ body, client, request, contentLength, expectsPayload, header: '', h2stream: stream, socket: client[kSocket] }) } else { assert(false) } } } function writeStream ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, 'stream body cannot be pipelined') if (client[kHTTPConnVersion] === 'h2') { // For HTTP/2, is enough to pipe the stream const pipe = pipeline( body, h2stream, (err) => { if (err) { util.destroy(body, err) util.destroy(h2stream, err) } else { request.onRequestSent() } } ) pipe.on('data', onPipeData) pipe.once('end', () => { pipe.removeListener('data', onPipeData) util.destroy(pipe) }) function onPipeData (chunk) { request.onBodySent(chunk) } return } let finished = false const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) const onData = function (chunk) { if (finished) { return } try { if (!writer.write(chunk) && this.pause) { this.pause() } } catch (err) { util.destroy(this, err) } } const onDrain = function () { if (finished) { return } if (body.resume) { body.resume() } } const onAbort = function () { if (finished) { return } const err = new RequestAbortedError() queueMicrotask(() => onFinished(err)) } const onFinished = function (err) { if (finished) { return } finished = true assert(socket.destroyed || (socket[kWriting] && client[kRunning] <= 1)) socket .off('drain', onDrain) .off('error', onFinished) body .removeListener('data', onData) .removeListener('end', onFinished) .removeListener('error', onFinished) .removeListener('close', onAbort) if (!err) { try { writer.end() } catch (er) { err = er } } writer.destroy(err) if (err && (err.code !== 'UND_ERR_INFO' || err.message !== 'reset')) { util.destroy(body, err) } else { util.destroy(body) } } body .on('data', onData) .on('end', onFinished) .on('error', onFinished) .on('close', onAbort) if (body.resume) { body.resume() } socket .on('drain', onDrain) .on('error', onFinished) } async function writeBlob ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { assert(contentLength === body.size, 'blob body must have content length') const isH2 = client[kHTTPConnVersion] === 'h2' try { if (contentLength != null && contentLength !== body.size) { throw new RequestContentLengthMismatchError() } const buffer = Buffer.from(await body.arrayBuffer()) if (isH2) { h2stream.cork() h2stream.write(buffer) h2stream.uncork() } else { socket.cork() socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') socket.write(buffer) socket.uncork() } request.onBodySent(buffer) request.onRequestSent() if (!expectsPayload) { socket[kReset] = true } resume(client) } catch (err) { util.destroy(isH2 ? h2stream : socket, err) } } async function writeIterable ({ h2stream, body, client, request, socket, contentLength, header, expectsPayload }) { assert(contentLength !== 0 || client[kRunning] === 0, 'iterator body cannot be pipelined') let callback = null function onDrain () { if (callback) { const cb = callback callback = null cb() } } const waitForDrain = () => new Promise((resolve, reject) => { assert(callback === null) if (socket[kError]) { reject(socket[kError]) } else { callback = resolve } }) if (client[kHTTPConnVersion] === 'h2') { h2stream .on('close', onDrain) .on('drain', onDrain) try { // It's up to the user to somehow abort the async iterable. for await (const chunk of body) { if (socket[kError]) { throw socket[kError] } const res = h2stream.write(chunk) request.onBodySent(chunk) if (!res) { await waitForDrain() } } } catch (err) { h2stream.destroy(err) } finally { request.onRequestSent() h2stream.end() h2stream .off('close', onDrain) .off('drain', onDrain) } return } socket .on('close', onDrain) .on('drain', onDrain) const writer = new AsyncWriter({ socket, request, contentLength, client, expectsPayload, header }) try { // It's up to the user to somehow abort the async iterable. for await (const chunk of body) { if (socket[kError]) { throw socket[kError] } if (!writer.write(chunk)) { await waitForDrain() } } writer.end() } catch (err) { writer.destroy(err) } finally { socket .off('close', onDrain) .off('drain', onDrain) } } class AsyncWriter { constructor ({ socket, request, contentLength, client, expectsPayload, header }) { this.socket = socket this.request = request this.contentLength = contentLength this.client = client this.bytesWritten = 0 this.expectsPayload = expectsPayload this.header = header socket[kWriting] = true } write (chunk) { const { socket, request, contentLength, client, bytesWritten, expectsPayload, header } = this if (socket[kError]) { throw socket[kError] } if (socket.destroyed) { return false } const len = Buffer.byteLength(chunk) if (!len) { return true } // We should defer writing chunks. if (contentLength !== null && bytesWritten + len > contentLength) { if (client[kStrictContentLength]) { throw new RequestContentLengthMismatchError() } process.emitWarning(new RequestContentLengthMismatchError()) } socket.cork() if (bytesWritten === 0) { if (!expectsPayload) { socket[kReset] = true } if (contentLength === null) { socket.write(`${header}transfer-encoding: chunked\r\n`, 'latin1') } else { socket.write(`${header}content-length: ${contentLength}\r\n\r\n`, 'latin1') } } if (contentLength === null) { socket.write(`\r\n${len.toString(16)}\r\n`, 'latin1') } this.bytesWritten += len const ret = socket.write(chunk) socket.uncork() request.onBodySent(chunk) if (!ret) { if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { // istanbul ignore else: only for jest if (socket[kParser].timeout.refresh) { socket[kParser].timeout.refresh() } } } return ret } end () { const { socket, contentLength, client, bytesWritten, expectsPayload, header, request } = this request.onRequestSent() socket[kWriting] = false if (socket[kError]) { throw socket[kError] } if (socket.destroyed) { return } if (bytesWritten === 0) { if (expectsPayload) { // https://tools.ietf.org/html/rfc7230#section-3.3.2 // A user agent SHOULD send a Content-Length in a request message when // no Transfer-Encoding is sent and the request method defines a meaning // for an enclosed payload body. socket.write(`${header}content-length: 0\r\n\r\n`, 'latin1') } else { socket.write(`${header}\r\n`, 'latin1') } } else if (contentLength === null) { socket.write('\r\n0\r\n\r\n', 'latin1') } if (contentLength !== null && bytesWritten !== contentLength) { if (client[kStrictContentLength]) { throw new RequestContentLengthMismatchError() } else { process.emitWarning(new RequestContentLengthMismatchError()) } } if (socket[kParser].timeout && socket[kParser].timeoutType === TIMEOUT_HEADERS) { // istanbul ignore else: only for jest if (socket[kParser].timeout.refresh) { socket[kParser].timeout.refresh() } } resume(client) } destroy (err) { const { socket, client } = this socket[kWriting] = false if (err) { assert(client[kRunning] <= 1, 'pipeline should only contain this request') util.destroy(socket, err) } } } function errorRequest (client, request, err) { try { request.onError(err) assert(request.aborted) } catch (err) { client.emit('error', err) } } module.exports = Client /***/ }), /***/ 13194: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* istanbul ignore file: only for Node 12 */ const { kConnected, kSize } = __nccwpck_require__(36443) class CompatWeakRef { constructor (value) { this.value = value } deref () { return this.value[kConnected] === 0 && this.value[kSize] === 0 ? undefined : this.value } } class CompatFinalizer { constructor (finalizer) { this.finalizer = finalizer } register (dispatcher, key) { if (dispatcher.on) { dispatcher.on('disconnect', () => { if (dispatcher[kConnected] === 0 && dispatcher[kSize] === 0) { this.finalizer(key) } }) } } } module.exports = function () { // FIXME: remove workaround when the Node bug is fixed // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 if (process.env.NODE_V8_COVERAGE) { return { WeakRef: CompatWeakRef, FinalizationRegistry: CompatFinalizer } } return { WeakRef: global.WeakRef || CompatWeakRef, FinalizationRegistry: global.FinalizationRegistry || CompatFinalizer } } /***/ }), /***/ 19237: /***/ ((module) => { "use strict"; // https://wicg.github.io/cookie-store/#cookie-maximum-attribute-value-size const maxAttributeValueSize = 1024 // https://wicg.github.io/cookie-store/#cookie-maximum-name-value-pair-size const maxNameValuePairSize = 4096 module.exports = { maxAttributeValueSize, maxNameValuePairSize } /***/ }), /***/ 53168: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { parseSetCookie } = __nccwpck_require__(8915) const { stringify } = __nccwpck_require__(3834) const { webidl } = __nccwpck_require__(74222) const { Headers } = __nccwpck_require__(26349) /** * @typedef {Object} Cookie * @property {string} name * @property {string} value * @property {Date|number|undefined} expires * @property {number|undefined} maxAge * @property {string|undefined} domain * @property {string|undefined} path * @property {boolean|undefined} secure * @property {boolean|undefined} httpOnly * @property {'Strict'|'Lax'|'None'} sameSite * @property {string[]} unparsed */ /** * @param {Headers} headers * @returns {Record} */ function getCookies (headers) { webidl.argumentLengthCheck(arguments, 1, { header: 'getCookies' }) webidl.brandCheck(headers, Headers, { strict: false }) const cookie = headers.get('cookie') const out = {} if (!cookie) { return out } for (const piece of cookie.split(';')) { const [name, ...value] = piece.split('=') out[name.trim()] = value.join('=') } return out } /** * @param {Headers} headers * @param {string} name * @param {{ path?: string, domain?: string }|undefined} attributes * @returns {void} */ function deleteCookie (headers, name, attributes) { webidl.argumentLengthCheck(arguments, 2, { header: 'deleteCookie' }) webidl.brandCheck(headers, Headers, { strict: false }) name = webidl.converters.DOMString(name) attributes = webidl.converters.DeleteCookieAttributes(attributes) // Matches behavior of // https://github.com/denoland/deno_std/blob/63827b16330b82489a04614027c33b7904e08be5/http/cookie.ts#L278 setCookie(headers, { name, value: '', expires: new Date(0), ...attributes }) } /** * @param {Headers} headers * @returns {Cookie[]} */ function getSetCookies (headers) { webidl.argumentLengthCheck(arguments, 1, { header: 'getSetCookies' }) webidl.brandCheck(headers, Headers, { strict: false }) const cookies = headers.getSetCookie() if (!cookies) { return [] } return cookies.map((pair) => parseSetCookie(pair)) } /** * @param {Headers} headers * @param {Cookie} cookie * @returns {void} */ function setCookie (headers, cookie) { webidl.argumentLengthCheck(arguments, 2, { header: 'setCookie' }) webidl.brandCheck(headers, Headers, { strict: false }) cookie = webidl.converters.Cookie(cookie) const str = stringify(cookie) if (str) { headers.append('Set-Cookie', stringify(cookie)) } } webidl.converters.DeleteCookieAttributes = webidl.dictionaryConverter([ { converter: webidl.nullableConverter(webidl.converters.DOMString), key: 'path', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters.DOMString), key: 'domain', defaultValue: null } ]) webidl.converters.Cookie = webidl.dictionaryConverter([ { converter: webidl.converters.DOMString, key: 'name' }, { converter: webidl.converters.DOMString, key: 'value' }, { converter: webidl.nullableConverter((value) => { if (typeof value === 'number') { return webidl.converters['unsigned long long'](value) } return new Date(value) }), key: 'expires', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters['long long']), key: 'maxAge', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters.DOMString), key: 'domain', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters.DOMString), key: 'path', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters.boolean), key: 'secure', defaultValue: null }, { converter: webidl.nullableConverter(webidl.converters.boolean), key: 'httpOnly', defaultValue: null }, { converter: webidl.converters.USVString, key: 'sameSite', allowedValues: ['Strict', 'Lax', 'None'] }, { converter: webidl.sequenceConverter(webidl.converters.DOMString), key: 'unparsed', defaultValue: [] } ]) module.exports = { getCookies, deleteCookie, getSetCookies, setCookie } /***/ }), /***/ 8915: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { maxNameValuePairSize, maxAttributeValueSize } = __nccwpck_require__(19237) const { isCTLExcludingHtab } = __nccwpck_require__(3834) const { collectASequenceOfCodePointsFast } = __nccwpck_require__(94322) const assert = __nccwpck_require__(42613) /** * @description Parses the field-value attributes of a set-cookie header string. * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 * @param {string} header * @returns if the header is invalid, null will be returned */ function parseSetCookie (header) { // 1. If the set-cookie-string contains a %x00-08 / %x0A-1F / %x7F // character (CTL characters excluding HTAB): Abort these steps and // ignore the set-cookie-string entirely. if (isCTLExcludingHtab(header)) { return null } let nameValuePair = '' let unparsedAttributes = '' let name = '' let value = '' // 2. If the set-cookie-string contains a %x3B (";") character: if (header.includes(';')) { // 1. The name-value-pair string consists of the characters up to, // but not including, the first %x3B (";"), and the unparsed- // attributes consist of the remainder of the set-cookie-string // (including the %x3B (";") in question). const position = { position: 0 } nameValuePair = collectASequenceOfCodePointsFast(';', header, position) unparsedAttributes = header.slice(position.position) } else { // Otherwise: // 1. The name-value-pair string consists of all the characters // contained in the set-cookie-string, and the unparsed- // attributes is the empty string. nameValuePair = header } // 3. If the name-value-pair string lacks a %x3D ("=") character, then // the name string is empty, and the value string is the value of // name-value-pair. if (!nameValuePair.includes('=')) { value = nameValuePair } else { // Otherwise, the name string consists of the characters up to, but // not including, the first %x3D ("=") character, and the (possibly // empty) value string consists of the characters after the first // %x3D ("=") character. const position = { position: 0 } name = collectASequenceOfCodePointsFast( '=', nameValuePair, position ) value = nameValuePair.slice(position.position + 1) } // 4. Remove any leading or trailing WSP characters from the name // string and the value string. name = name.trim() value = value.trim() // 5. If the sum of the lengths of the name string and the value string // is more than 4096 octets, abort these steps and ignore the set- // cookie-string entirely. if (name.length + value.length > maxNameValuePairSize) { return null } // 6. The cookie-name is the name string, and the cookie-value is the // value string. return { name, value, ...parseUnparsedAttributes(unparsedAttributes) } } /** * Parses the remaining attributes of a set-cookie header * @see https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4 * @param {string} unparsedAttributes * @param {[Object.]={}} cookieAttributeList */ function parseUnparsedAttributes (unparsedAttributes, cookieAttributeList = {}) { // 1. If the unparsed-attributes string is empty, skip the rest of // these steps. if (unparsedAttributes.length === 0) { return cookieAttributeList } // 2. Discard the first character of the unparsed-attributes (which // will be a %x3B (";") character). assert(unparsedAttributes[0] === ';') unparsedAttributes = unparsedAttributes.slice(1) let cookieAv = '' // 3. If the remaining unparsed-attributes contains a %x3B (";") // character: if (unparsedAttributes.includes(';')) { // 1. Consume the characters of the unparsed-attributes up to, but // not including, the first %x3B (";") character. cookieAv = collectASequenceOfCodePointsFast( ';', unparsedAttributes, { position: 0 } ) unparsedAttributes = unparsedAttributes.slice(cookieAv.length) } else { // Otherwise: // 1. Consume the remainder of the unparsed-attributes. cookieAv = unparsedAttributes unparsedAttributes = '' } // Let the cookie-av string be the characters consumed in this step. let attributeName = '' let attributeValue = '' // 4. If the cookie-av string contains a %x3D ("=") character: if (cookieAv.includes('=')) { // 1. The (possibly empty) attribute-name string consists of the // characters up to, but not including, the first %x3D ("=") // character, and the (possibly empty) attribute-value string // consists of the characters after the first %x3D ("=") // character. const position = { position: 0 } attributeName = collectASequenceOfCodePointsFast( '=', cookieAv, position ) attributeValue = cookieAv.slice(position.position + 1) } else { // Otherwise: // 1. The attribute-name string consists of the entire cookie-av // string, and the attribute-value string is empty. attributeName = cookieAv } // 5. Remove any leading or trailing WSP characters from the attribute- // name string and the attribute-value string. attributeName = attributeName.trim() attributeValue = attributeValue.trim() // 6. If the attribute-value is longer than 1024 octets, ignore the // cookie-av string and return to Step 1 of this algorithm. if (attributeValue.length > maxAttributeValueSize) { return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } // 7. Process the attribute-name and attribute-value according to the // requirements in the following subsections. (Notice that // attributes with unrecognized attribute-names are ignored.) const attributeNameLowercase = attributeName.toLowerCase() // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.1 // If the attribute-name case-insensitively matches the string // "Expires", the user agent MUST process the cookie-av as follows. if (attributeNameLowercase === 'expires') { // 1. Let the expiry-time be the result of parsing the attribute-value // as cookie-date (see Section 5.1.1). const expiryTime = new Date(attributeValue) // 2. If the attribute-value failed to parse as a cookie date, ignore // the cookie-av. cookieAttributeList.expires = expiryTime } else if (attributeNameLowercase === 'max-age') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.2 // If the attribute-name case-insensitively matches the string "Max- // Age", the user agent MUST process the cookie-av as follows. // 1. If the first character of the attribute-value is not a DIGIT or a // "-" character, ignore the cookie-av. const charCode = attributeValue.charCodeAt(0) if ((charCode < 48 || charCode > 57) && attributeValue[0] !== '-') { return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } // 2. If the remainder of attribute-value contains a non-DIGIT // character, ignore the cookie-av. if (!/^\d+$/.test(attributeValue)) { return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } // 3. Let delta-seconds be the attribute-value converted to an integer. const deltaSeconds = Number(attributeValue) // 4. Let cookie-age-limit be the maximum age of the cookie (which // SHOULD be 400 days or less, see Section 4.1.2.2). // 5. Set delta-seconds to the smaller of its present value and cookie- // age-limit. // deltaSeconds = Math.min(deltaSeconds * 1000, maxExpiresMs) // 6. If delta-seconds is less than or equal to zero (0), let expiry- // time be the earliest representable date and time. Otherwise, let // the expiry-time be the current date and time plus delta-seconds // seconds. // const expiryTime = deltaSeconds <= 0 ? Date.now() : Date.now() + deltaSeconds // 7. Append an attribute to the cookie-attribute-list with an // attribute-name of Max-Age and an attribute-value of expiry-time. cookieAttributeList.maxAge = deltaSeconds } else if (attributeNameLowercase === 'domain') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.3 // If the attribute-name case-insensitively matches the string "Domain", // the user agent MUST process the cookie-av as follows. // 1. Let cookie-domain be the attribute-value. let cookieDomain = attributeValue // 2. If cookie-domain starts with %x2E ("."), let cookie-domain be // cookie-domain without its leading %x2E ("."). if (cookieDomain[0] === '.') { cookieDomain = cookieDomain.slice(1) } // 3. Convert the cookie-domain to lower case. cookieDomain = cookieDomain.toLowerCase() // 4. Append an attribute to the cookie-attribute-list with an // attribute-name of Domain and an attribute-value of cookie-domain. cookieAttributeList.domain = cookieDomain } else if (attributeNameLowercase === 'path') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.4 // If the attribute-name case-insensitively matches the string "Path", // the user agent MUST process the cookie-av as follows. // 1. If the attribute-value is empty or if the first character of the // attribute-value is not %x2F ("/"): let cookiePath = '' if (attributeValue.length === 0 || attributeValue[0] !== '/') { // 1. Let cookie-path be the default-path. cookiePath = '/' } else { // Otherwise: // 1. Let cookie-path be the attribute-value. cookiePath = attributeValue } // 2. Append an attribute to the cookie-attribute-list with an // attribute-name of Path and an attribute-value of cookie-path. cookieAttributeList.path = cookiePath } else if (attributeNameLowercase === 'secure') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.5 // If the attribute-name case-insensitively matches the string "Secure", // the user agent MUST append an attribute to the cookie-attribute-list // with an attribute-name of Secure and an empty attribute-value. cookieAttributeList.secure = true } else if (attributeNameLowercase === 'httponly') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.6 // If the attribute-name case-insensitively matches the string // "HttpOnly", the user agent MUST append an attribute to the cookie- // attribute-list with an attribute-name of HttpOnly and an empty // attribute-value. cookieAttributeList.httpOnly = true } else if (attributeNameLowercase === 'samesite') { // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-rfc6265bis#section-5.4.7 // If the attribute-name case-insensitively matches the string // "SameSite", the user agent MUST process the cookie-av as follows: // 1. Let enforcement be "Default". let enforcement = 'Default' const attributeValueLowercase = attributeValue.toLowerCase() // 2. If cookie-av's attribute-value is a case-insensitive match for // "None", set enforcement to "None". if (attributeValueLowercase.includes('none')) { enforcement = 'None' } // 3. If cookie-av's attribute-value is a case-insensitive match for // "Strict", set enforcement to "Strict". if (attributeValueLowercase.includes('strict')) { enforcement = 'Strict' } // 4. If cookie-av's attribute-value is a case-insensitive match for // "Lax", set enforcement to "Lax". if (attributeValueLowercase.includes('lax')) { enforcement = 'Lax' } // 5. Append an attribute to the cookie-attribute-list with an // attribute-name of "SameSite" and an attribute-value of // enforcement. cookieAttributeList.sameSite = enforcement } else { cookieAttributeList.unparsed ??= [] cookieAttributeList.unparsed.push(`${attributeName}=${attributeValue}`) } // 8. Return to Step 1 of this algorithm. return parseUnparsedAttributes(unparsedAttributes, cookieAttributeList) } module.exports = { parseSetCookie, parseUnparsedAttributes } /***/ }), /***/ 3834: /***/ ((module) => { "use strict"; /** * @param {string} value * @returns {boolean} */ function isCTLExcludingHtab (value) { if (value.length === 0) { return false } for (const char of value) { const code = char.charCodeAt(0) if ( (code >= 0x00 || code <= 0x08) || (code >= 0x0A || code <= 0x1F) || code === 0x7F ) { return false } } } /** CHAR = token = 1* separators = "(" | ")" | "<" | ">" | "@" | "," | ";" | ":" | "\" | <"> | "/" | "[" | "]" | "?" | "=" | "{" | "}" | SP | HT * @param {string} name */ function validateCookieName (name) { for (const char of name) { const code = char.charCodeAt(0) if ( (code <= 0x20 || code > 0x7F) || char === '(' || char === ')' || char === '>' || char === '<' || char === '@' || char === ',' || char === ';' || char === ':' || char === '\\' || char === '"' || char === '/' || char === '[' || char === ']' || char === '?' || char === '=' || char === '{' || char === '}' ) { throw new Error('Invalid cookie name') } } } /** cookie-value = *cookie-octet / ( DQUOTE *cookie-octet DQUOTE ) cookie-octet = %x21 / %x23-2B / %x2D-3A / %x3C-5B / %x5D-7E ; US-ASCII characters excluding CTLs, ; whitespace DQUOTE, comma, semicolon, ; and backslash * @param {string} value */ function validateCookieValue (value) { for (const char of value) { const code = char.charCodeAt(0) if ( code < 0x21 || // exclude CTLs (0-31) code === 0x22 || code === 0x2C || code === 0x3B || code === 0x5C || code > 0x7E // non-ascii ) { throw new Error('Invalid header value') } } } /** * path-value = * @param {string} path */ function validateCookiePath (path) { for (const char of path) { const code = char.charCodeAt(0) if (code < 0x21 || char === ';') { throw new Error('Invalid cookie path') } } } /** * I have no idea why these values aren't allowed to be honest, * but Deno tests these. - Khafra * @param {string} domain */ function validateCookieDomain (domain) { if ( domain.startsWith('-') || domain.endsWith('.') || domain.endsWith('-') ) { throw new Error('Invalid cookie domain') } } /** * @see https://www.rfc-editor.org/rfc/rfc7231#section-7.1.1.1 * @param {number|Date} date IMF-fixdate = day-name "," SP date1 SP time-of-day SP GMT ; fixed length/zone/capitalization subset of the format ; see Section 3.3 of [RFC5322] day-name = %x4D.6F.6E ; "Mon", case-sensitive / %x54.75.65 ; "Tue", case-sensitive / %x57.65.64 ; "Wed", case-sensitive / %x54.68.75 ; "Thu", case-sensitive / %x46.72.69 ; "Fri", case-sensitive / %x53.61.74 ; "Sat", case-sensitive / %x53.75.6E ; "Sun", case-sensitive date1 = day SP month SP year ; e.g., 02 Jun 1982 day = 2DIGIT month = %x4A.61.6E ; "Jan", case-sensitive / %x46.65.62 ; "Feb", case-sensitive / %x4D.61.72 ; "Mar", case-sensitive / %x41.70.72 ; "Apr", case-sensitive / %x4D.61.79 ; "May", case-sensitive / %x4A.75.6E ; "Jun", case-sensitive / %x4A.75.6C ; "Jul", case-sensitive / %x41.75.67 ; "Aug", case-sensitive / %x53.65.70 ; "Sep", case-sensitive / %x4F.63.74 ; "Oct", case-sensitive / %x4E.6F.76 ; "Nov", case-sensitive / %x44.65.63 ; "Dec", case-sensitive year = 4DIGIT GMT = %x47.4D.54 ; "GMT", case-sensitive time-of-day = hour ":" minute ":" second ; 00:00:00 - 23:59:60 (leap second) hour = 2DIGIT minute = 2DIGIT second = 2DIGIT */ function toIMFDate (date) { if (typeof date === 'number') { date = new Date(date) } const days = [ 'Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat' ] const months = [ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec' ] const dayName = days[date.getUTCDay()] const day = date.getUTCDate().toString().padStart(2, '0') const month = months[date.getUTCMonth()] const year = date.getUTCFullYear() const hour = date.getUTCHours().toString().padStart(2, '0') const minute = date.getUTCMinutes().toString().padStart(2, '0') const second = date.getUTCSeconds().toString().padStart(2, '0') return `${dayName}, ${day} ${month} ${year} ${hour}:${minute}:${second} GMT` } /** max-age-av = "Max-Age=" non-zero-digit *DIGIT ; In practice, both expires-av and max-age-av ; are limited to dates representable by the ; user agent. * @param {number} maxAge */ function validateCookieMaxAge (maxAge) { if (maxAge < 0) { throw new Error('Invalid cookie max-age') } } /** * @see https://www.rfc-editor.org/rfc/rfc6265#section-4.1.1 * @param {import('./index').Cookie} cookie */ function stringify (cookie) { if (cookie.name.length === 0) { return null } validateCookieName(cookie.name) validateCookieValue(cookie.value) const out = [`${cookie.name}=${cookie.value}`] // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.1 // https://datatracker.ietf.org/doc/html/draft-ietf-httpbis-cookie-prefixes-00#section-3.2 if (cookie.name.startsWith('__Secure-')) { cookie.secure = true } if (cookie.name.startsWith('__Host-')) { cookie.secure = true cookie.domain = null cookie.path = '/' } if (cookie.secure) { out.push('Secure') } if (cookie.httpOnly) { out.push('HttpOnly') } if (typeof cookie.maxAge === 'number') { validateCookieMaxAge(cookie.maxAge) out.push(`Max-Age=${cookie.maxAge}`) } if (cookie.domain) { validateCookieDomain(cookie.domain) out.push(`Domain=${cookie.domain}`) } if (cookie.path) { validateCookiePath(cookie.path) out.push(`Path=${cookie.path}`) } if (cookie.expires && cookie.expires.toString() !== 'Invalid Date') { out.push(`Expires=${toIMFDate(cookie.expires)}`) } if (cookie.sameSite) { out.push(`SameSite=${cookie.sameSite}`) } for (const part of cookie.unparsed) { if (!part.includes('=')) { throw new Error('Invalid unparsed') } const [key, ...value] = part.split('=') out.push(`${key.trim()}=${value.join('=')}`) } return out.join('; ') } module.exports = { isCTLExcludingHtab, validateCookieName, validateCookiePath, validateCookieValue, toIMFDate, stringify } /***/ }), /***/ 59136: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const net = __nccwpck_require__(69278) const assert = __nccwpck_require__(42613) const util = __nccwpck_require__(3440) const { InvalidArgumentError, ConnectTimeoutError } = __nccwpck_require__(68707) let tls // include tls conditionally since it is not always available // TODO: session re-use does not wait for the first // connection to resolve the session and might therefore // resolve the same servername multiple times even when // re-use is enabled. let SessionCache // FIXME: remove workaround when the Node bug is fixed // https://github.com/nodejs/node/issues/49344#issuecomment-1741776308 if (global.FinalizationRegistry && !process.env.NODE_V8_COVERAGE) { SessionCache = class WeakSessionCache { constructor (maxCachedSessions) { this._maxCachedSessions = maxCachedSessions this._sessionCache = new Map() this._sessionRegistry = new global.FinalizationRegistry((key) => { if (this._sessionCache.size < this._maxCachedSessions) { return } const ref = this._sessionCache.get(key) if (ref !== undefined && ref.deref() === undefined) { this._sessionCache.delete(key) } }) } get (sessionKey) { const ref = this._sessionCache.get(sessionKey) return ref ? ref.deref() : null } set (sessionKey, session) { if (this._maxCachedSessions === 0) { return } this._sessionCache.set(sessionKey, new WeakRef(session)) this._sessionRegistry.register(session, sessionKey) } } } else { SessionCache = class SimpleSessionCache { constructor (maxCachedSessions) { this._maxCachedSessions = maxCachedSessions this._sessionCache = new Map() } get (sessionKey) { return this._sessionCache.get(sessionKey) } set (sessionKey, session) { if (this._maxCachedSessions === 0) { return } if (this._sessionCache.size >= this._maxCachedSessions) { // remove the oldest session const { value: oldestKey } = this._sessionCache.keys().next() this._sessionCache.delete(oldestKey) } this._sessionCache.set(sessionKey, session) } } } function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, ...opts }) { if (maxCachedSessions != null && (!Number.isInteger(maxCachedSessions) || maxCachedSessions < 0)) { throw new InvalidArgumentError('maxCachedSessions must be a positive integer or zero') } const options = { path: socketPath, ...opts } const sessionCache = new SessionCache(maxCachedSessions == null ? 100 : maxCachedSessions) timeout = timeout == null ? 10e3 : timeout allowH2 = allowH2 != null ? allowH2 : false return function connect ({ hostname, host, protocol, port, servername, localAddress, httpSocket }, callback) { let socket if (protocol === 'https:') { if (!tls) { tls = __nccwpck_require__(64756) } servername = servername || options.servername || util.getServerName(host) || null const sessionKey = servername || hostname const session = sessionCache.get(sessionKey) || null assert(sessionKey) socket = tls.connect({ highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... ...options, servername, session, localAddress, // TODO(HTTP/2): Add support for h2c ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], socket: httpSocket, // upgrade socket connection port: port || 443, host: hostname }) socket .on('session', function (session) { // TODO (fix): Can a session become invalid once established? Don't think so? sessionCache.set(sessionKey, session) }) } else { assert(!httpSocket, 'httpSocket can only be sent on TLS update') socket = net.connect({ highWaterMark: 64 * 1024, // Same as nodejs fs streams. ...options, localAddress, port: port || 80, host: hostname }) } // Set TCP keep alive options on the socket here instead of in connect() for the case of assigning the socket if (options.keepAlive == null || options.keepAlive) { const keepAliveInitialDelay = options.keepAliveInitialDelay === undefined ? 60e3 : options.keepAliveInitialDelay socket.setKeepAlive(true, keepAliveInitialDelay) } const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) socket .setNoDelay(true) .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { cancelTimeout() if (callback) { const cb = callback callback = null cb(null, this) } }) .on('error', function (err) { cancelTimeout() if (callback) { const cb = callback callback = null cb(err) } }) return socket } } function setupTimeout (onConnectTimeout, timeout) { if (!timeout) { return () => {} } let s1 = null let s2 = null const timeoutId = setTimeout(() => { // setImmediate is added to make sure that we priotorise socket error events over timeouts s1 = setImmediate(() => { if (process.platform === 'win32') { // Windows needs an extra setImmediate probably due to implementation differences in the socket logic s2 = setImmediate(() => onConnectTimeout()) } else { onConnectTimeout() } }) }, timeout) return () => { clearTimeout(timeoutId) clearImmediate(s1) clearImmediate(s2) } } function onConnectTimeout (socket) { util.destroy(socket, new ConnectTimeoutError()) } module.exports = buildConnector /***/ }), /***/ 10735: /***/ ((module) => { "use strict"; /** @type {Record} */ const headerNameLowerCasedRecord = {} // https://developer.mozilla.org/docs/Web/HTTP/Headers const wellknownHeaderNames = [ 'Accept', 'Accept-Encoding', 'Accept-Language', 'Accept-Ranges', 'Access-Control-Allow-Credentials', 'Access-Control-Allow-Headers', 'Access-Control-Allow-Methods', 'Access-Control-Allow-Origin', 'Access-Control-Expose-Headers', 'Access-Control-Max-Age', 'Access-Control-Request-Headers', 'Access-Control-Request-Method', 'Age', 'Allow', 'Alt-Svc', 'Alt-Used', 'Authorization', 'Cache-Control', 'Clear-Site-Data', 'Connection', 'Content-Disposition', 'Content-Encoding', 'Content-Language', 'Content-Length', 'Content-Location', 'Content-Range', 'Content-Security-Policy', 'Content-Security-Policy-Report-Only', 'Content-Type', 'Cookie', 'Cross-Origin-Embedder-Policy', 'Cross-Origin-Opener-Policy', 'Cross-Origin-Resource-Policy', 'Date', 'Device-Memory', 'Downlink', 'ECT', 'ETag', 'Expect', 'Expect-CT', 'Expires', 'Forwarded', 'From', 'Host', 'If-Match', 'If-Modified-Since', 'If-None-Match', 'If-Range', 'If-Unmodified-Since', 'Keep-Alive', 'Last-Modified', 'Link', 'Location', 'Max-Forwards', 'Origin', 'Permissions-Policy', 'Pragma', 'Proxy-Authenticate', 'Proxy-Authorization', 'RTT', 'Range', 'Referer', 'Referrer-Policy', 'Refresh', 'Retry-After', 'Sec-WebSocket-Accept', 'Sec-WebSocket-Extensions', 'Sec-WebSocket-Key', 'Sec-WebSocket-Protocol', 'Sec-WebSocket-Version', 'Server', 'Server-Timing', 'Service-Worker-Allowed', 'Service-Worker-Navigation-Preload', 'Set-Cookie', 'SourceMap', 'Strict-Transport-Security', 'Supports-Loading-Mode', 'TE', 'Timing-Allow-Origin', 'Trailer', 'Transfer-Encoding', 'Upgrade', 'Upgrade-Insecure-Requests', 'User-Agent', 'Vary', 'Via', 'WWW-Authenticate', 'X-Content-Type-Options', 'X-DNS-Prefetch-Control', 'X-Frame-Options', 'X-Permitted-Cross-Domain-Policies', 'X-Powered-By', 'X-Requested-With', 'X-XSS-Protection' ] for (let i = 0; i < wellknownHeaderNames.length; ++i) { const key = wellknownHeaderNames[i] const lowerCasedKey = key.toLowerCase() headerNameLowerCasedRecord[key] = headerNameLowerCasedRecord[lowerCasedKey] = lowerCasedKey } // Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. Object.setPrototypeOf(headerNameLowerCasedRecord, null) module.exports = { wellknownHeaderNames, headerNameLowerCasedRecord } /***/ }), /***/ 68707: /***/ ((module) => { "use strict"; class UndiciError extends Error { constructor (message) { super(message) this.name = 'UndiciError' this.code = 'UND_ERR' } } class ConnectTimeoutError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, ConnectTimeoutError) this.name = 'ConnectTimeoutError' this.message = message || 'Connect Timeout Error' this.code = 'UND_ERR_CONNECT_TIMEOUT' } } class HeadersTimeoutError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, HeadersTimeoutError) this.name = 'HeadersTimeoutError' this.message = message || 'Headers Timeout Error' this.code = 'UND_ERR_HEADERS_TIMEOUT' } } class HeadersOverflowError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, HeadersOverflowError) this.name = 'HeadersOverflowError' this.message = message || 'Headers Overflow Error' this.code = 'UND_ERR_HEADERS_OVERFLOW' } } class BodyTimeoutError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, BodyTimeoutError) this.name = 'BodyTimeoutError' this.message = message || 'Body Timeout Error' this.code = 'UND_ERR_BODY_TIMEOUT' } } class ResponseStatusCodeError extends UndiciError { constructor (message, statusCode, headers, body) { super(message) Error.captureStackTrace(this, ResponseStatusCodeError) this.name = 'ResponseStatusCodeError' this.message = message || 'Response Status Code Error' this.code = 'UND_ERR_RESPONSE_STATUS_CODE' this.body = body this.status = statusCode this.statusCode = statusCode this.headers = headers } } class InvalidArgumentError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, InvalidArgumentError) this.name = 'InvalidArgumentError' this.message = message || 'Invalid Argument Error' this.code = 'UND_ERR_INVALID_ARG' } } class InvalidReturnValueError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, InvalidReturnValueError) this.name = 'InvalidReturnValueError' this.message = message || 'Invalid Return Value Error' this.code = 'UND_ERR_INVALID_RETURN_VALUE' } } class RequestAbortedError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, RequestAbortedError) this.name = 'AbortError' this.message = message || 'Request aborted' this.code = 'UND_ERR_ABORTED' } } class InformationalError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, InformationalError) this.name = 'InformationalError' this.message = message || 'Request information' this.code = 'UND_ERR_INFO' } } class RequestContentLengthMismatchError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, RequestContentLengthMismatchError) this.name = 'RequestContentLengthMismatchError' this.message = message || 'Request body length does not match content-length header' this.code = 'UND_ERR_REQ_CONTENT_LENGTH_MISMATCH' } } class ResponseContentLengthMismatchError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, ResponseContentLengthMismatchError) this.name = 'ResponseContentLengthMismatchError' this.message = message || 'Response body length does not match content-length header' this.code = 'UND_ERR_RES_CONTENT_LENGTH_MISMATCH' } } class ClientDestroyedError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, ClientDestroyedError) this.name = 'ClientDestroyedError' this.message = message || 'The client is destroyed' this.code = 'UND_ERR_DESTROYED' } } class ClientClosedError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, ClientClosedError) this.name = 'ClientClosedError' this.message = message || 'The client is closed' this.code = 'UND_ERR_CLOSED' } } class SocketError extends UndiciError { constructor (message, socket) { super(message) Error.captureStackTrace(this, SocketError) this.name = 'SocketError' this.message = message || 'Socket error' this.code = 'UND_ERR_SOCKET' this.socket = socket } } class NotSupportedError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, NotSupportedError) this.name = 'NotSupportedError' this.message = message || 'Not supported error' this.code = 'UND_ERR_NOT_SUPPORTED' } } class BalancedPoolMissingUpstreamError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, NotSupportedError) this.name = 'MissingUpstreamError' this.message = message || 'No upstream has been added to the BalancedPool' this.code = 'UND_ERR_BPL_MISSING_UPSTREAM' } } class HTTPParserError extends Error { constructor (message, code, data) { super(message) Error.captureStackTrace(this, HTTPParserError) this.name = 'HTTPParserError' this.code = code ? `HPE_${code}` : undefined this.data = data ? data.toString() : undefined } } class ResponseExceededMaxSizeError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, ResponseExceededMaxSizeError) this.name = 'ResponseExceededMaxSizeError' this.message = message || 'Response content exceeded max size' this.code = 'UND_ERR_RES_EXCEEDED_MAX_SIZE' } } class RequestRetryError extends UndiciError { constructor (message, code, { headers, data }) { super(message) Error.captureStackTrace(this, RequestRetryError) this.name = 'RequestRetryError' this.message = message || 'Request retry error' this.code = 'UND_ERR_REQ_RETRY' this.statusCode = code this.data = data this.headers = headers } } module.exports = { HTTPParserError, UndiciError, HeadersTimeoutError, HeadersOverflowError, BodyTimeoutError, RequestContentLengthMismatchError, ConnectTimeoutError, ResponseStatusCodeError, InvalidArgumentError, InvalidReturnValueError, RequestAbortedError, ClientDestroyedError, ClientClosedError, InformationalError, SocketError, NotSupportedError, ResponseContentLengthMismatchError, BalancedPoolMissingUpstreamError, ResponseExceededMaxSizeError, RequestRetryError } /***/ }), /***/ 44655: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { InvalidArgumentError, NotSupportedError } = __nccwpck_require__(68707) const assert = __nccwpck_require__(42613) const { kHTTP2BuildRequest, kHTTP2CopyHeaders, kHTTP1BuildRequest } = __nccwpck_require__(36443) const util = __nccwpck_require__(3440) // tokenRegExp and headerCharRegex have been lifted from // https://github.com/nodejs/node/blob/main/lib/_http_common.js /** * Verifies that the given val is a valid HTTP token * per the rules defined in RFC 7230 * See https://tools.ietf.org/html/rfc7230#section-3.2.6 */ const tokenRegExp = /^[\^_`a-zA-Z\-0-9!#$%&'*+.|~]+$/ /** * Matches if val contains an invalid field-vchar * field-value = *( field-content / obs-fold ) * field-content = field-vchar [ 1*( SP / HTAB ) field-vchar ] * field-vchar = VCHAR / obs-text */ const headerCharRegex = /[^\t\x20-\x7e\x80-\xff]/ // Verifies that a given path is valid does not contain control chars \x00 to \x20 const invalidPathRegex = /[^\u0021-\u00ff]/ const kHandler = Symbol('handler') const channels = {} let extractBody try { const diagnosticsChannel = __nccwpck_require__(31637) channels.create = diagnosticsChannel.channel('undici:request:create') channels.bodySent = diagnosticsChannel.channel('undici:request:bodySent') channels.headers = diagnosticsChannel.channel('undici:request:headers') channels.trailers = diagnosticsChannel.channel('undici:request:trailers') channels.error = diagnosticsChannel.channel('undici:request:error') } catch { channels.create = { hasSubscribers: false } channels.bodySent = { hasSubscribers: false } channels.headers = { hasSubscribers: false } channels.trailers = { hasSubscribers: false } channels.error = { hasSubscribers: false } } class Request { constructor (origin, { path, method, body, headers, query, idempotent, blocking, upgrade, headersTimeout, bodyTimeout, reset, throwOnError, expectContinue }, handler) { if (typeof path !== 'string') { throw new InvalidArgumentError('path must be a string') } else if ( path[0] !== '/' && !(path.startsWith('http://') || path.startsWith('https://')) && method !== 'CONNECT' ) { throw new InvalidArgumentError('path must be an absolute URL or start with a slash') } else if (invalidPathRegex.exec(path) !== null) { throw new InvalidArgumentError('invalid request path') } if (typeof method !== 'string') { throw new InvalidArgumentError('method must be a string') } else if (tokenRegExp.exec(method) === null) { throw new InvalidArgumentError('invalid request method') } if (upgrade && typeof upgrade !== 'string') { throw new InvalidArgumentError('upgrade must be a string') } if (headersTimeout != null && (!Number.isFinite(headersTimeout) || headersTimeout < 0)) { throw new InvalidArgumentError('invalid headersTimeout') } if (bodyTimeout != null && (!Number.isFinite(bodyTimeout) || bodyTimeout < 0)) { throw new InvalidArgumentError('invalid bodyTimeout') } if (reset != null && typeof reset !== 'boolean') { throw new InvalidArgumentError('invalid reset') } if (expectContinue != null && typeof expectContinue !== 'boolean') { throw new InvalidArgumentError('invalid expectContinue') } this.headersTimeout = headersTimeout this.bodyTimeout = bodyTimeout this.throwOnError = throwOnError === true this.method = method this.abort = null if (body == null) { this.body = null } else if (util.isStream(body)) { this.body = body const rState = this.body._readableState if (!rState || !rState.autoDestroy) { this.endHandler = function autoDestroy () { util.destroy(this) } this.body.on('end', this.endHandler) } this.errorHandler = err => { if (this.abort) { this.abort(err) } else { this.error = err } } this.body.on('error', this.errorHandler) } else if (util.isBuffer(body)) { this.body = body.byteLength ? body : null } else if (ArrayBuffer.isView(body)) { this.body = body.buffer.byteLength ? Buffer.from(body.buffer, body.byteOffset, body.byteLength) : null } else if (body instanceof ArrayBuffer) { this.body = body.byteLength ? Buffer.from(body) : null } else if (typeof body === 'string') { this.body = body.length ? Buffer.from(body) : null } else if (util.isFormDataLike(body) || util.isIterable(body) || util.isBlobLike(body)) { this.body = body } else { throw new InvalidArgumentError('body must be a string, a Buffer, a Readable stream, an iterable, or an async iterable') } this.completed = false this.aborted = false this.upgrade = upgrade || null this.path = query ? util.buildURL(path, query) : path this.origin = origin this.idempotent = idempotent == null ? method === 'HEAD' || method === 'GET' : idempotent this.blocking = blocking == null ? false : blocking this.reset = reset == null ? null : reset this.host = null this.contentLength = null this.contentType = null this.headers = '' // Only for H2 this.expectContinue = expectContinue != null ? expectContinue : false if (Array.isArray(headers)) { if (headers.length % 2 !== 0) { throw new InvalidArgumentError('headers array must be even') } for (let i = 0; i < headers.length; i += 2) { processHeader(this, headers[i], headers[i + 1]) } } else if (headers && typeof headers === 'object') { const keys = Object.keys(headers) for (let i = 0; i < keys.length; i++) { const key = keys[i] processHeader(this, key, headers[key]) } } else if (headers != null) { throw new InvalidArgumentError('headers must be an object or an array') } if (util.isFormDataLike(this.body)) { if (util.nodeMajor < 16 || (util.nodeMajor === 16 && util.nodeMinor < 8)) { throw new InvalidArgumentError('Form-Data bodies are only supported in node v16.8 and newer.') } if (!extractBody) { extractBody = (__nccwpck_require__(8923).extractBody) } const [bodyStream, contentType] = extractBody(body) if (this.contentType == null) { this.contentType = contentType this.headers += `content-type: ${contentType}\r\n` } this.body = bodyStream.stream this.contentLength = bodyStream.length } else if (util.isBlobLike(body) && this.contentType == null && body.type) { this.contentType = body.type this.headers += `content-type: ${body.type}\r\n` } util.validateHandler(handler, method, upgrade) this.servername = util.getServerName(this.host) this[kHandler] = handler if (channels.create.hasSubscribers) { channels.create.publish({ request: this }) } } onBodySent (chunk) { if (this[kHandler].onBodySent) { try { return this[kHandler].onBodySent(chunk) } catch (err) { this.abort(err) } } } onRequestSent () { if (channels.bodySent.hasSubscribers) { channels.bodySent.publish({ request: this }) } if (this[kHandler].onRequestSent) { try { return this[kHandler].onRequestSent() } catch (err) { this.abort(err) } } } onConnect (abort) { assert(!this.aborted) assert(!this.completed) if (this.error) { abort(this.error) } else { this.abort = abort return this[kHandler].onConnect(abort) } } onHeaders (statusCode, headers, resume, statusText) { assert(!this.aborted) assert(!this.completed) if (channels.headers.hasSubscribers) { channels.headers.publish({ request: this, response: { statusCode, headers, statusText } }) } try { return this[kHandler].onHeaders(statusCode, headers, resume, statusText) } catch (err) { this.abort(err) } } onData (chunk) { assert(!this.aborted) assert(!this.completed) try { return this[kHandler].onData(chunk) } catch (err) { this.abort(err) return false } } onUpgrade (statusCode, headers, socket) { assert(!this.aborted) assert(!this.completed) return this[kHandler].onUpgrade(statusCode, headers, socket) } onComplete (trailers) { this.onFinally() assert(!this.aborted) this.completed = true if (channels.trailers.hasSubscribers) { channels.trailers.publish({ request: this, trailers }) } try { return this[kHandler].onComplete(trailers) } catch (err) { // TODO (fix): This might be a bad idea? this.onError(err) } } onError (error) { this.onFinally() if (channels.error.hasSubscribers) { channels.error.publish({ request: this, error }) } if (this.aborted) { return } this.aborted = true return this[kHandler].onError(error) } onFinally () { if (this.errorHandler) { this.body.off('error', this.errorHandler) this.errorHandler = null } if (this.endHandler) { this.body.off('end', this.endHandler) this.endHandler = null } } // TODO: adjust to support H2 addHeader (key, value) { processHeader(this, key, value) return this } static [kHTTP1BuildRequest] (origin, opts, handler) { // TODO: Migrate header parsing here, to make Requests // HTTP agnostic return new Request(origin, opts, handler) } static [kHTTP2BuildRequest] (origin, opts, handler) { const headers = opts.headers opts = { ...opts, headers: null } const request = new Request(origin, opts, handler) request.headers = {} if (Array.isArray(headers)) { if (headers.length % 2 !== 0) { throw new InvalidArgumentError('headers array must be even') } for (let i = 0; i < headers.length; i += 2) { processHeader(request, headers[i], headers[i + 1], true) } } else if (headers && typeof headers === 'object') { const keys = Object.keys(headers) for (let i = 0; i < keys.length; i++) { const key = keys[i] processHeader(request, key, headers[key], true) } } else if (headers != null) { throw new InvalidArgumentError('headers must be an object or an array') } return request } static [kHTTP2CopyHeaders] (raw) { const rawHeaders = raw.split('\r\n') const headers = {} for (const header of rawHeaders) { const [key, value] = header.split(': ') if (value == null || value.length === 0) continue if (headers[key]) headers[key] += `,${value}` else headers[key] = value } return headers } } function processHeaderValue (key, val, skipAppend) { if (val && typeof val === 'object') { throw new InvalidArgumentError(`invalid ${key} header`) } val = val != null ? `${val}` : '' if (headerCharRegex.exec(val) !== null) { throw new InvalidArgumentError(`invalid ${key} header`) } return skipAppend ? val : `${key}: ${val}\r\n` } function processHeader (request, key, val, skipAppend = false) { if (val && (typeof val === 'object' && !Array.isArray(val))) { throw new InvalidArgumentError(`invalid ${key} header`) } else if (val === undefined) { return } if ( request.host === null && key.length === 4 && key.toLowerCase() === 'host' ) { if (headerCharRegex.exec(val) !== null) { throw new InvalidArgumentError(`invalid ${key} header`) } // Consumed by Client request.host = val } else if ( request.contentLength === null && key.length === 14 && key.toLowerCase() === 'content-length' ) { request.contentLength = parseInt(val, 10) if (!Number.isFinite(request.contentLength)) { throw new InvalidArgumentError('invalid content-length header') } } else if ( request.contentType === null && key.length === 12 && key.toLowerCase() === 'content-type' ) { request.contentType = val if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) else request.headers += processHeaderValue(key, val) } else if ( key.length === 17 && key.toLowerCase() === 'transfer-encoding' ) { throw new InvalidArgumentError('invalid transfer-encoding header') } else if ( key.length === 10 && key.toLowerCase() === 'connection' ) { const value = typeof val === 'string' ? val.toLowerCase() : null if (value !== 'close' && value !== 'keep-alive') { throw new InvalidArgumentError('invalid connection header') } else if (value === 'close') { request.reset = true } } else if ( key.length === 10 && key.toLowerCase() === 'keep-alive' ) { throw new InvalidArgumentError('invalid keep-alive header') } else if ( key.length === 7 && key.toLowerCase() === 'upgrade' ) { throw new InvalidArgumentError('invalid upgrade header') } else if ( key.length === 6 && key.toLowerCase() === 'expect' ) { throw new NotSupportedError('expect header not supported') } else if (tokenRegExp.exec(key) === null) { throw new InvalidArgumentError('invalid header key') } else { if (Array.isArray(val)) { for (let i = 0; i < val.length; i++) { if (skipAppend) { if (request.headers[key]) request.headers[key] += `,${processHeaderValue(key, val[i], skipAppend)}` else request.headers[key] = processHeaderValue(key, val[i], skipAppend) } else { request.headers += processHeaderValue(key, val[i]) } } } else { if (skipAppend) request.headers[key] = processHeaderValue(key, val, skipAppend) else request.headers += processHeaderValue(key, val) } } } module.exports = Request /***/ }), /***/ 36443: /***/ ((module) => { module.exports = { kClose: Symbol('close'), kDestroy: Symbol('destroy'), kDispatch: Symbol('dispatch'), kUrl: Symbol('url'), kWriting: Symbol('writing'), kResuming: Symbol('resuming'), kQueue: Symbol('queue'), kConnect: Symbol('connect'), kConnecting: Symbol('connecting'), kHeadersList: Symbol('headers list'), kKeepAliveDefaultTimeout: Symbol('default keep alive timeout'), kKeepAliveMaxTimeout: Symbol('max keep alive timeout'), kKeepAliveTimeoutThreshold: Symbol('keep alive timeout threshold'), kKeepAliveTimeoutValue: Symbol('keep alive timeout'), kKeepAlive: Symbol('keep alive'), kHeadersTimeout: Symbol('headers timeout'), kBodyTimeout: Symbol('body timeout'), kServerName: Symbol('server name'), kLocalAddress: Symbol('local address'), kHost: Symbol('host'), kNoRef: Symbol('no ref'), kBodyUsed: Symbol('used'), kRunning: Symbol('running'), kBlocking: Symbol('blocking'), kPending: Symbol('pending'), kSize: Symbol('size'), kBusy: Symbol('busy'), kQueued: Symbol('queued'), kFree: Symbol('free'), kConnected: Symbol('connected'), kClosed: Symbol('closed'), kNeedDrain: Symbol('need drain'), kReset: Symbol('reset'), kDestroyed: Symbol.for('nodejs.stream.destroyed'), kMaxHeadersSize: Symbol('max headers size'), kRunningIdx: Symbol('running index'), kPendingIdx: Symbol('pending index'), kError: Symbol('error'), kClients: Symbol('clients'), kClient: Symbol('client'), kParser: Symbol('parser'), kOnDestroyed: Symbol('destroy callbacks'), kPipelining: Symbol('pipelining'), kSocket: Symbol('socket'), kHostHeader: Symbol('host header'), kConnector: Symbol('connector'), kStrictContentLength: Symbol('strict content length'), kMaxRedirections: Symbol('maxRedirections'), kMaxRequests: Symbol('maxRequestsPerClient'), kProxy: Symbol('proxy agent options'), kCounter: Symbol('socket request counter'), kInterceptors: Symbol('dispatch interceptors'), kMaxResponseSize: Symbol('max response size'), kHTTP2Session: Symbol('http2Session'), kHTTP2SessionState: Symbol('http2Session state'), kHTTP2BuildRequest: Symbol('http2 build request'), kHTTP1BuildRequest: Symbol('http1 build request'), kHTTP2CopyHeaders: Symbol('http2 copy headers'), kHTTPConnVersion: Symbol('http connection version'), kRetryHandlerDefaultRetry: Symbol('retry agent default retry'), kConstruct: Symbol('constructable') } /***/ }), /***/ 3440: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const assert = __nccwpck_require__(42613) const { kDestroyed, kBodyUsed } = __nccwpck_require__(36443) const { IncomingMessage } = __nccwpck_require__(58611) const stream = __nccwpck_require__(2203) const net = __nccwpck_require__(69278) const { InvalidArgumentError } = __nccwpck_require__(68707) const { Blob } = __nccwpck_require__(20181) const nodeUtil = __nccwpck_require__(39023) const { stringify } = __nccwpck_require__(83480) const { headerNameLowerCasedRecord } = __nccwpck_require__(10735) const [nodeMajor, nodeMinor] = process.versions.node.split('.').map(v => Number(v)) function nop () {} function isStream (obj) { return obj && typeof obj === 'object' && typeof obj.pipe === 'function' && typeof obj.on === 'function' } // based on https://github.com/node-fetch/fetch-blob/blob/8ab587d34080de94140b54f07168451e7d0b655e/index.js#L229-L241 (MIT License) function isBlobLike (object) { return (Blob && object instanceof Blob) || ( object && typeof object === 'object' && (typeof object.stream === 'function' || typeof object.arrayBuffer === 'function') && /^(Blob|File)$/.test(object[Symbol.toStringTag]) ) } function buildURL (url, queryParams) { if (url.includes('?') || url.includes('#')) { throw new Error('Query params cannot be passed when url already contains "?" or "#".') } const stringified = stringify(queryParams) if (stringified) { url += '?' + stringified } return url } function parseURL (url) { if (typeof url === 'string') { url = new URL(url) if (!/^https?:/.test(url.origin || url.protocol)) { throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') } return url } if (!url || typeof url !== 'object') { throw new InvalidArgumentError('Invalid URL: The URL argument must be a non-null object.') } if (!/^https?:/.test(url.origin || url.protocol)) { throw new InvalidArgumentError('Invalid URL protocol: the URL must start with `http:` or `https:`.') } if (!(url instanceof URL)) { if (url.port != null && url.port !== '' && !Number.isFinite(parseInt(url.port))) { throw new InvalidArgumentError('Invalid URL: port must be a valid integer or a string representation of an integer.') } if (url.path != null && typeof url.path !== 'string') { throw new InvalidArgumentError('Invalid URL path: the path must be a string or null/undefined.') } if (url.pathname != null && typeof url.pathname !== 'string') { throw new InvalidArgumentError('Invalid URL pathname: the pathname must be a string or null/undefined.') } if (url.hostname != null && typeof url.hostname !== 'string') { throw new InvalidArgumentError('Invalid URL hostname: the hostname must be a string or null/undefined.') } if (url.origin != null && typeof url.origin !== 'string') { throw new InvalidArgumentError('Invalid URL origin: the origin must be a string or null/undefined.') } const port = url.port != null ? url.port : (url.protocol === 'https:' ? 443 : 80) let origin = url.origin != null ? url.origin : `${url.protocol}//${url.hostname}:${port}` let path = url.path != null ? url.path : `${url.pathname || ''}${url.search || ''}` if (origin.endsWith('/')) { origin = origin.substring(0, origin.length - 1) } if (path && !path.startsWith('/')) { path = `/${path}` } // new URL(path, origin) is unsafe when `path` contains an absolute URL // From https://developer.mozilla.org/en-US/docs/Web/API/URL/URL: // If first parameter is a relative URL, second param is required, and will be used as the base URL. // If first parameter is an absolute URL, a given second param will be ignored. url = new URL(origin + path) } return url } function parseOrigin (url) { url = parseURL(url) if (url.pathname !== '/' || url.search || url.hash) { throw new InvalidArgumentError('invalid url') } return url } function getHostname (host) { if (host[0] === '[') { const idx = host.indexOf(']') assert(idx !== -1) return host.substring(1, idx) } const idx = host.indexOf(':') if (idx === -1) return host return host.substring(0, idx) } // IP addresses are not valid server names per RFC6066 // > Currently, the only server names supported are DNS hostnames function getServerName (host) { if (!host) { return null } assert.strictEqual(typeof host, 'string') const servername = getHostname(host) if (net.isIP(servername)) { return '' } return servername } function deepClone (obj) { return JSON.parse(JSON.stringify(obj)) } function isAsyncIterable (obj) { return !!(obj != null && typeof obj[Symbol.asyncIterator] === 'function') } function isIterable (obj) { return !!(obj != null && (typeof obj[Symbol.iterator] === 'function' || typeof obj[Symbol.asyncIterator] === 'function')) } function bodyLength (body) { if (body == null) { return 0 } else if (isStream(body)) { const state = body._readableState return state && state.objectMode === false && state.ended === true && Number.isFinite(state.length) ? state.length : null } else if (isBlobLike(body)) { return body.size != null ? body.size : null } else if (isBuffer(body)) { return body.byteLength } return null } function isDestroyed (stream) { return !stream || !!(stream.destroyed || stream[kDestroyed]) } function isReadableAborted (stream) { const state = stream && stream._readableState return isDestroyed(stream) && state && !state.endEmitted } function destroy (stream, err) { if (stream == null || !isStream(stream) || isDestroyed(stream)) { return } if (typeof stream.destroy === 'function') { if (Object.getPrototypeOf(stream).constructor === IncomingMessage) { // See: https://github.com/nodejs/node/pull/38505/files stream.socket = null } stream.destroy(err) } else if (err) { process.nextTick((stream, err) => { stream.emit('error', err) }, stream, err) } if (stream.destroyed !== true) { stream[kDestroyed] = true } } const KEEPALIVE_TIMEOUT_EXPR = /timeout=(\d+)/ function parseKeepAliveTimeout (val) { const m = val.toString().match(KEEPALIVE_TIMEOUT_EXPR) return m ? parseInt(m[1], 10) * 1000 : null } /** * Retrieves a header name and returns its lowercase value. * @param {string | Buffer} value Header name * @returns {string} */ function headerNameToString (value) { return headerNameLowerCasedRecord[value] || value.toLowerCase() } function parseHeaders (headers, obj = {}) { // For H2 support if (!Array.isArray(headers)) return headers for (let i = 0; i < headers.length; i += 2) { const key = headers[i].toString().toLowerCase() let val = obj[key] if (!val) { if (Array.isArray(headers[i + 1])) { obj[key] = headers[i + 1].map(x => x.toString('utf8')) } else { obj[key] = headers[i + 1].toString('utf8') } } else { if (!Array.isArray(val)) { val = [val] obj[key] = val } val.push(headers[i + 1].toString('utf8')) } } // See https://github.com/nodejs/node/pull/46528 if ('content-length' in obj && 'content-disposition' in obj) { obj['content-disposition'] = Buffer.from(obj['content-disposition']).toString('latin1') } return obj } function parseRawHeaders (headers) { const ret = [] let hasContentLength = false let contentDispositionIdx = -1 for (let n = 0; n < headers.length; n += 2) { const key = headers[n + 0].toString() const val = headers[n + 1].toString('utf8') if (key.length === 14 && (key === 'content-length' || key.toLowerCase() === 'content-length')) { ret.push(key, val) hasContentLength = true } else if (key.length === 19 && (key === 'content-disposition' || key.toLowerCase() === 'content-disposition')) { contentDispositionIdx = ret.push(key, val) - 1 } else { ret.push(key, val) } } // See https://github.com/nodejs/node/pull/46528 if (hasContentLength && contentDispositionIdx !== -1) { ret[contentDispositionIdx] = Buffer.from(ret[contentDispositionIdx]).toString('latin1') } return ret } function isBuffer (buffer) { // See, https://github.com/mcollina/undici/pull/319 return buffer instanceof Uint8Array || Buffer.isBuffer(buffer) } function validateHandler (handler, method, upgrade) { if (!handler || typeof handler !== 'object') { throw new InvalidArgumentError('handler must be an object') } if (typeof handler.onConnect !== 'function') { throw new InvalidArgumentError('invalid onConnect method') } if (typeof handler.onError !== 'function') { throw new InvalidArgumentError('invalid onError method') } if (typeof handler.onBodySent !== 'function' && handler.onBodySent !== undefined) { throw new InvalidArgumentError('invalid onBodySent method') } if (upgrade || method === 'CONNECT') { if (typeof handler.onUpgrade !== 'function') { throw new InvalidArgumentError('invalid onUpgrade method') } } else { if (typeof handler.onHeaders !== 'function') { throw new InvalidArgumentError('invalid onHeaders method') } if (typeof handler.onData !== 'function') { throw new InvalidArgumentError('invalid onData method') } if (typeof handler.onComplete !== 'function') { throw new InvalidArgumentError('invalid onComplete method') } } } // A body is disturbed if it has been read from and it cannot // be re-used without losing state or data. function isDisturbed (body) { return !!(body && ( stream.isDisturbed ? stream.isDisturbed(body) || body[kBodyUsed] // TODO (fix): Why is body[kBodyUsed] needed? : body[kBodyUsed] || body.readableDidRead || (body._readableState && body._readableState.dataEmitted) || isReadableAborted(body) )) } function isErrored (body) { return !!(body && ( stream.isErrored ? stream.isErrored(body) : /state: 'errored'/.test(nodeUtil.inspect(body) ))) } function isReadable (body) { return !!(body && ( stream.isReadable ? stream.isReadable(body) : /state: 'readable'/.test(nodeUtil.inspect(body) ))) } function getSocketInfo (socket) { return { localAddress: socket.localAddress, localPort: socket.localPort, remoteAddress: socket.remoteAddress, remotePort: socket.remotePort, remoteFamily: socket.remoteFamily, timeout: socket.timeout, bytesWritten: socket.bytesWritten, bytesRead: socket.bytesRead } } async function * convertIterableToBuffer (iterable) { for await (const chunk of iterable) { yield Buffer.isBuffer(chunk) ? chunk : Buffer.from(chunk) } } let ReadableStream function ReadableStreamFrom (iterable) { if (!ReadableStream) { ReadableStream = (__nccwpck_require__(63774).ReadableStream) } if (ReadableStream.from) { return ReadableStream.from(convertIterableToBuffer(iterable)) } let iterator return new ReadableStream( { async start () { iterator = iterable[Symbol.asyncIterator]() }, async pull (controller) { const { done, value } = await iterator.next() if (done) { queueMicrotask(() => { controller.close() }) } else { const buf = Buffer.isBuffer(value) ? value : Buffer.from(value) controller.enqueue(new Uint8Array(buf)) } return controller.desiredSize > 0 }, async cancel (reason) { await iterator.return() } }, 0 ) } // The chunk should be a FormData instance and contains // all the required methods. function isFormDataLike (object) { return ( object && typeof object === 'object' && typeof object.append === 'function' && typeof object.delete === 'function' && typeof object.get === 'function' && typeof object.getAll === 'function' && typeof object.has === 'function' && typeof object.set === 'function' && object[Symbol.toStringTag] === 'FormData' ) } function throwIfAborted (signal) { if (!signal) { return } if (typeof signal.throwIfAborted === 'function') { signal.throwIfAborted() } else { if (signal.aborted) { // DOMException not available < v17.0.0 const err = new Error('The operation was aborted') err.name = 'AbortError' throw err } } } function addAbortListener (signal, listener) { if ('addEventListener' in signal) { signal.addEventListener('abort', listener, { once: true }) return () => signal.removeEventListener('abort', listener) } signal.addListener('abort', listener) return () => signal.removeListener('abort', listener) } const hasToWellFormed = !!String.prototype.toWellFormed /** * @param {string} val */ function toUSVString (val) { if (hasToWellFormed) { return `${val}`.toWellFormed() } else if (nodeUtil.toUSVString) { return nodeUtil.toUSVString(val) } return `${val}` } // Parsed accordingly to RFC 9110 // https://www.rfc-editor.org/rfc/rfc9110#field.content-range function parseRangeHeader (range) { if (range == null || range === '') return { start: 0, end: null, size: null } const m = range ? range.match(/^bytes (\d+)-(\d+)\/(\d+)?$/) : null return m ? { start: parseInt(m[1]), end: m[2] ? parseInt(m[2]) : null, size: m[3] ? parseInt(m[3]) : null } : null } const kEnumerableProperty = Object.create(null) kEnumerableProperty.enumerable = true module.exports = { kEnumerableProperty, nop, isDisturbed, isErrored, isReadable, toUSVString, isReadableAborted, isBlobLike, parseOrigin, parseURL, getServerName, isStream, isIterable, isAsyncIterable, isDestroyed, headerNameToString, parseRawHeaders, parseHeaders, parseKeepAliveTimeout, destroy, bodyLength, deepClone, ReadableStreamFrom, isBuffer, validateHandler, getSocketInfo, isFormDataLike, buildURL, throwIfAborted, addAbortListener, parseRangeHeader, nodeMajor, nodeMinor, nodeHasAutoSelectFamily: nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 13), safeHTTPMethods: ['GET', 'HEAD', 'OPTIONS', 'TRACE'] } /***/ }), /***/ 50001: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Dispatcher = __nccwpck_require__(28611) const { ClientDestroyedError, ClientClosedError, InvalidArgumentError } = __nccwpck_require__(68707) const { kDestroy, kClose, kDispatch, kInterceptors } = __nccwpck_require__(36443) const kDestroyed = Symbol('destroyed') const kClosed = Symbol('closed') const kOnDestroyed = Symbol('onDestroyed') const kOnClosed = Symbol('onClosed') const kInterceptedDispatch = Symbol('Intercepted Dispatch') class DispatcherBase extends Dispatcher { constructor () { super() this[kDestroyed] = false this[kOnDestroyed] = null this[kClosed] = false this[kOnClosed] = [] } get destroyed () { return this[kDestroyed] } get closed () { return this[kClosed] } get interceptors () { return this[kInterceptors] } set interceptors (newInterceptors) { if (newInterceptors) { for (let i = newInterceptors.length - 1; i >= 0; i--) { const interceptor = this[kInterceptors][i] if (typeof interceptor !== 'function') { throw new InvalidArgumentError('interceptor must be an function') } } } this[kInterceptors] = newInterceptors } close (callback) { if (callback === undefined) { return new Promise((resolve, reject) => { this.close((err, data) => { return err ? reject(err) : resolve(data) }) }) } if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } if (this[kDestroyed]) { queueMicrotask(() => callback(new ClientDestroyedError(), null)) return } if (this[kClosed]) { if (this[kOnClosed]) { this[kOnClosed].push(callback) } else { queueMicrotask(() => callback(null, null)) } return } this[kClosed] = true this[kOnClosed].push(callback) const onClosed = () => { const callbacks = this[kOnClosed] this[kOnClosed] = null for (let i = 0; i < callbacks.length; i++) { callbacks[i](null, null) } } // Should not error. this[kClose]() .then(() => this.destroy()) .then(() => { queueMicrotask(onClosed) }) } destroy (err, callback) { if (typeof err === 'function') { callback = err err = null } if (callback === undefined) { return new Promise((resolve, reject) => { this.destroy(err, (err, data) => { return err ? /* istanbul ignore next: should never error */ reject(err) : resolve(data) }) }) } if (typeof callback !== 'function') { throw new InvalidArgumentError('invalid callback') } if (this[kDestroyed]) { if (this[kOnDestroyed]) { this[kOnDestroyed].push(callback) } else { queueMicrotask(() => callback(null, null)) } return } if (!err) { err = new ClientDestroyedError() } this[kDestroyed] = true this[kOnDestroyed] = this[kOnDestroyed] || [] this[kOnDestroyed].push(callback) const onDestroyed = () => { const callbacks = this[kOnDestroyed] this[kOnDestroyed] = null for (let i = 0; i < callbacks.length; i++) { callbacks[i](null, null) } } // Should not error. this[kDestroy](err).then(() => { queueMicrotask(onDestroyed) }) } [kInterceptedDispatch] (opts, handler) { if (!this[kInterceptors] || this[kInterceptors].length === 0) { this[kInterceptedDispatch] = this[kDispatch] return this[kDispatch](opts, handler) } let dispatch = this[kDispatch].bind(this) for (let i = this[kInterceptors].length - 1; i >= 0; i--) { dispatch = this[kInterceptors][i](dispatch) } this[kInterceptedDispatch] = dispatch return dispatch(opts, handler) } dispatch (opts, handler) { if (!handler || typeof handler !== 'object') { throw new InvalidArgumentError('handler must be an object') } try { if (!opts || typeof opts !== 'object') { throw new InvalidArgumentError('opts must be an object.') } if (this[kDestroyed] || this[kOnDestroyed]) { throw new ClientDestroyedError() } if (this[kClosed]) { throw new ClientClosedError() } return this[kInterceptedDispatch](opts, handler) } catch (err) { if (typeof handler.onError !== 'function') { throw new InvalidArgumentError('invalid onError method') } handler.onError(err) return false } } } module.exports = DispatcherBase /***/ }), /***/ 28611: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const EventEmitter = __nccwpck_require__(24434) class Dispatcher extends EventEmitter { dispatch () { throw new Error('not implemented') } close () { throw new Error('not implemented') } destroy () { throw new Error('not implemented') } } module.exports = Dispatcher /***/ }), /***/ 8923: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Busboy = __nccwpck_require__(89581) const util = __nccwpck_require__(3440) const { ReadableStreamFrom, isBlobLike, isReadableStreamLike, readableStreamClose, createDeferredPromise, fullyReadBody } = __nccwpck_require__(15523) const { FormData } = __nccwpck_require__(43073) const { kState } = __nccwpck_require__(89710) const { webidl } = __nccwpck_require__(74222) const { DOMException, structuredClone } = __nccwpck_require__(87326) const { Blob, File: NativeFile } = __nccwpck_require__(20181) const { kBodyUsed } = __nccwpck_require__(36443) const assert = __nccwpck_require__(42613) const { isErrored } = __nccwpck_require__(3440) const { isUint8Array, isArrayBuffer } = __nccwpck_require__(98253) const { File: UndiciFile } = __nccwpck_require__(63041) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(94322) let random try { const crypto = __nccwpck_require__(77598) random = (max) => crypto.randomInt(0, max) } catch { random = (max) => Math.floor(Math.random(max)) } let ReadableStream = globalThis.ReadableStream /** @type {globalThis['File']} */ const File = NativeFile ?? UndiciFile const textEncoder = new TextEncoder() const textDecoder = new TextDecoder() // https://fetch.spec.whatwg.org/#concept-bodyinit-extract function extractBody (object, keepalive = false) { if (!ReadableStream) { ReadableStream = (__nccwpck_require__(63774).ReadableStream) } // 1. Let stream be null. let stream = null // 2. If object is a ReadableStream object, then set stream to object. if (object instanceof ReadableStream) { stream = object } else if (isBlobLike(object)) { // 3. Otherwise, if object is a Blob object, set stream to the // result of running object’s get stream. stream = object.stream() } else { // 4. Otherwise, set stream to a new ReadableStream object, and set // up stream. stream = new ReadableStream({ async pull (controller) { controller.enqueue( typeof source === 'string' ? textEncoder.encode(source) : source ) queueMicrotask(() => readableStreamClose(controller)) }, start () {}, type: undefined }) } // 5. Assert: stream is a ReadableStream object. assert(isReadableStreamLike(stream)) // 6. Let action be null. let action = null // 7. Let source be null. let source = null // 8. Let length be null. let length = null // 9. Let type be null. let type = null // 10. Switch on object: if (typeof object === 'string') { // Set source to the UTF-8 encoding of object. // Note: setting source to a Uint8Array here breaks some mocking assumptions. source = object // Set type to `text/plain;charset=UTF-8`. type = 'text/plain;charset=UTF-8' } else if (object instanceof URLSearchParams) { // URLSearchParams // spec says to run application/x-www-form-urlencoded on body.list // this is implemented in Node.js as apart of an URLSearchParams instance toString method // See: https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L490 // and https://github.com/nodejs/node/blob/e46c680bf2b211bbd52cf959ca17ee98c7f657f5/lib/internal/url.js#L1100 // Set source to the result of running the application/x-www-form-urlencoded serializer with object’s list. source = object.toString() // Set type to `application/x-www-form-urlencoded;charset=UTF-8`. type = 'application/x-www-form-urlencoded;charset=UTF-8' } else if (isArrayBuffer(object)) { // BufferSource/ArrayBuffer // Set source to a copy of the bytes held by object. source = new Uint8Array(object.slice()) } else if (ArrayBuffer.isView(object)) { // BufferSource/ArrayBufferView // Set source to a copy of the bytes held by object. source = new Uint8Array(object.buffer.slice(object.byteOffset, object.byteOffset + object.byteLength)) } else if (util.isFormDataLike(object)) { const boundary = `----formdata-undici-0${`${random(1e11)}`.padStart(11, '0')}` const prefix = `--${boundary}\r\nContent-Disposition: form-data` /*! formdata-polyfill. MIT License. Jimmy Wärting */ const escape = (str) => str.replace(/\n/g, '%0A').replace(/\r/g, '%0D').replace(/"/g, '%22') const normalizeLinefeeds = (value) => value.replace(/\r?\n|\r/g, '\r\n') // Set action to this step: run the multipart/form-data // encoding algorithm, with object’s entry list and UTF-8. // - This ensures that the body is immutable and can't be changed afterwords // - That the content-length is calculated in advance. // - And that all parts are pre-encoded and ready to be sent. const blobParts = [] const rn = new Uint8Array([13, 10]) // '\r\n' length = 0 let hasUnknownSizeValue = false for (const [name, value] of object) { if (typeof value === 'string') { const chunk = textEncoder.encode(prefix + `; name="${escape(normalizeLinefeeds(name))}"` + `\r\n\r\n${normalizeLinefeeds(value)}\r\n`) blobParts.push(chunk) length += chunk.byteLength } else { const chunk = textEncoder.encode(`${prefix}; name="${escape(normalizeLinefeeds(name))}"` + (value.name ? `; filename="${escape(value.name)}"` : '') + '\r\n' + `Content-Type: ${ value.type || 'application/octet-stream' }\r\n\r\n`) blobParts.push(chunk, value, rn) if (typeof value.size === 'number') { length += chunk.byteLength + value.size + rn.byteLength } else { hasUnknownSizeValue = true } } } const chunk = textEncoder.encode(`--${boundary}--`) blobParts.push(chunk) length += chunk.byteLength if (hasUnknownSizeValue) { length = null } // Set source to object. source = object action = async function * () { for (const part of blobParts) { if (part.stream) { yield * part.stream() } else { yield part } } } // Set type to `multipart/form-data; boundary=`, // followed by the multipart/form-data boundary string generated // by the multipart/form-data encoding algorithm. type = 'multipart/form-data; boundary=' + boundary } else if (isBlobLike(object)) { // Blob // Set source to object. source = object // Set length to object’s size. length = object.size // If object’s type attribute is not the empty byte sequence, set // type to its value. if (object.type) { type = object.type } } else if (typeof object[Symbol.asyncIterator] === 'function') { // If keepalive is true, then throw a TypeError. if (keepalive) { throw new TypeError('keepalive') } // If object is disturbed or locked, then throw a TypeError. if (util.isDisturbed(object) || object.locked) { throw new TypeError( 'Response body object should not be disturbed or locked' ) } stream = object instanceof ReadableStream ? object : ReadableStreamFrom(object) } // 11. If source is a byte sequence, then set action to a // step that returns source and length to source’s length. if (typeof source === 'string' || util.isBuffer(source)) { length = Buffer.byteLength(source) } // 12. If action is non-null, then run these steps in in parallel: if (action != null) { // Run action. let iterator stream = new ReadableStream({ async start () { iterator = action(object)[Symbol.asyncIterator]() }, async pull (controller) { const { value, done } = await iterator.next() if (done) { // When running action is done, close stream. queueMicrotask(() => { controller.close() }) } else { // Whenever one or more bytes are available and stream is not errored, // enqueue a Uint8Array wrapping an ArrayBuffer containing the available // bytes into stream. if (!isErrored(stream)) { controller.enqueue(new Uint8Array(value)) } } return controller.desiredSize > 0 }, async cancel (reason) { await iterator.return() }, type: undefined }) } // 13. Let body be a body whose stream is stream, source is source, // and length is length. const body = { stream, source, length } // 14. Return (body, type). return [body, type] } // https://fetch.spec.whatwg.org/#bodyinit-safely-extract function safelyExtractBody (object, keepalive = false) { if (!ReadableStream) { // istanbul ignore next ReadableStream = (__nccwpck_require__(63774).ReadableStream) } // To safely extract a body and a `Content-Type` value from // a byte sequence or BodyInit object object, run these steps: // 1. If object is a ReadableStream object, then: if (object instanceof ReadableStream) { // Assert: object is neither disturbed nor locked. // istanbul ignore next assert(!util.isDisturbed(object), 'The body has already been consumed.') // istanbul ignore next assert(!object.locked, 'The stream is locked.') } // 2. Return the results of extracting object. return extractBody(object, keepalive) } function cloneBody (body) { // To clone a body body, run these steps: // https://fetch.spec.whatwg.org/#concept-body-clone // 1. Let « out1, out2 » be the result of teeing body’s stream. const [out1, out2] = body.stream.tee() const out2Clone = structuredClone(out2, { transfer: [out2] }) // This, for whatever reasons, unrefs out2Clone which allows // the process to exit by itself. const [, finalClone] = out2Clone.tee() // 2. Set body’s stream to out1. body.stream = out1 // 3. Return a body whose stream is out2 and other members are copied from body. return { stream: finalClone, length: body.length, source: body.source } } async function * consumeBody (body) { if (body) { if (isUint8Array(body)) { yield body } else { const stream = body.stream if (util.isDisturbed(stream)) { throw new TypeError('The body has already been consumed.') } if (stream.locked) { throw new TypeError('The stream is locked.') } // Compat. stream[kBodyUsed] = true yield * stream } } } function throwIfAborted (state) { if (state.aborted) { throw new DOMException('The operation was aborted.', 'AbortError') } } function bodyMixinMethods (instance) { const methods = { blob () { // The blob() method steps are to return the result of // running consume body with this and the following step // given a byte sequence bytes: return a Blob whose // contents are bytes and whose type attribute is this’s // MIME type. return specConsumeBody(this, (bytes) => { let mimeType = bodyMimeType(this) if (mimeType === 'failure') { mimeType = '' } else if (mimeType) { mimeType = serializeAMimeType(mimeType) } // Return a Blob whose contents are bytes and type attribute // is mimeType. return new Blob([bytes], { type: mimeType }) }, instance) }, arrayBuffer () { // The arrayBuffer() method steps are to return the result // of running consume body with this and the following step // given a byte sequence bytes: return a new ArrayBuffer // whose contents are bytes. return specConsumeBody(this, (bytes) => { return new Uint8Array(bytes).buffer }, instance) }, text () { // The text() method steps are to return the result of running // consume body with this and UTF-8 decode. return specConsumeBody(this, utf8DecodeBytes, instance) }, json () { // The json() method steps are to return the result of running // consume body with this and parse JSON from bytes. return specConsumeBody(this, parseJSONFromBytes, instance) }, async formData () { webidl.brandCheck(this, instance) throwIfAborted(this[kState]) const contentType = this.headers.get('Content-Type') // If mimeType’s essence is "multipart/form-data", then: if (/multipart\/form-data/.test(contentType)) { const headers = {} for (const [key, value] of this.headers) headers[key.toLowerCase()] = value const responseFormData = new FormData() let busboy try { busboy = new Busboy({ headers, preservePath: true }) } catch (err) { throw new DOMException(`${err}`, 'AbortError') } busboy.on('field', (name, value) => { responseFormData.append(name, value) }) busboy.on('file', (name, value, filename, encoding, mimeType) => { const chunks = [] if (encoding === 'base64' || encoding.toLowerCase() === 'base64') { let base64chunk = '' value.on('data', (chunk) => { base64chunk += chunk.toString().replace(/[\r\n]/gm, '') const end = base64chunk.length - base64chunk.length % 4 chunks.push(Buffer.from(base64chunk.slice(0, end), 'base64')) base64chunk = base64chunk.slice(end) }) value.on('end', () => { chunks.push(Buffer.from(base64chunk, 'base64')) responseFormData.append(name, new File(chunks, filename, { type: mimeType })) }) } else { value.on('data', (chunk) => { chunks.push(chunk) }) value.on('end', () => { responseFormData.append(name, new File(chunks, filename, { type: mimeType })) }) } }) const busboyResolve = new Promise((resolve, reject) => { busboy.on('finish', resolve) busboy.on('error', (err) => reject(new TypeError(err))) }) if (this.body !== null) for await (const chunk of consumeBody(this[kState].body)) busboy.write(chunk) busboy.end() await busboyResolve return responseFormData } else if (/application\/x-www-form-urlencoded/.test(contentType)) { // Otherwise, if mimeType’s essence is "application/x-www-form-urlencoded", then: // 1. Let entries be the result of parsing bytes. let entries try { let text = '' // application/x-www-form-urlencoded parser will keep the BOM. // https://url.spec.whatwg.org/#concept-urlencoded-parser // Note that streaming decoder is stateful and cannot be reused const streamingDecoder = new TextDecoder('utf-8', { ignoreBOM: true }) for await (const chunk of consumeBody(this[kState].body)) { if (!isUint8Array(chunk)) { throw new TypeError('Expected Uint8Array chunk') } text += streamingDecoder.decode(chunk, { stream: true }) } text += streamingDecoder.decode() entries = new URLSearchParams(text) } catch (err) { // istanbul ignore next: Unclear when new URLSearchParams can fail on a string. // 2. If entries is failure, then throw a TypeError. throw Object.assign(new TypeError(), { cause: err }) } // 3. Return a new FormData object whose entries are entries. const formData = new FormData() for (const [name, value] of entries) { formData.append(name, value) } return formData } else { // Wait a tick before checking if the request has been aborted. // Otherwise, a TypeError can be thrown when an AbortError should. await Promise.resolve() throwIfAborted(this[kState]) // Otherwise, throw a TypeError. throw webidl.errors.exception({ header: `${instance.name}.formData`, message: 'Could not parse content as FormData.' }) } } } return methods } function mixinBody (prototype) { Object.assign(prototype.prototype, bodyMixinMethods(prototype)) } /** * @see https://fetch.spec.whatwg.org/#concept-body-consume-body * @param {Response|Request} object * @param {(value: unknown) => unknown} convertBytesToJSValue * @param {Response|Request} instance */ async function specConsumeBody (object, convertBytesToJSValue, instance) { webidl.brandCheck(object, instance) throwIfAborted(object[kState]) // 1. If object is unusable, then return a promise rejected // with a TypeError. if (bodyUnusable(object[kState].body)) { throw new TypeError('Body is unusable') } // 2. Let promise be a new promise. const promise = createDeferredPromise() // 3. Let errorSteps given error be to reject promise with error. const errorSteps = (error) => promise.reject(error) // 4. Let successSteps given a byte sequence data be to resolve // promise with the result of running convertBytesToJSValue // with data. If that threw an exception, then run errorSteps // with that exception. const successSteps = (data) => { try { promise.resolve(convertBytesToJSValue(data)) } catch (e) { errorSteps(e) } } // 5. If object’s body is null, then run successSteps with an // empty byte sequence. if (object[kState].body == null) { successSteps(new Uint8Array()) return promise.promise } // 6. Otherwise, fully read object’s body given successSteps, // errorSteps, and object’s relevant global object. await fullyReadBody(object[kState].body, successSteps, errorSteps) // 7. Return promise. return promise.promise } // https://fetch.spec.whatwg.org/#body-unusable function bodyUnusable (body) { // An object including the Body interface mixin is // said to be unusable if its body is non-null and // its body’s stream is disturbed or locked. return body != null && (body.stream.locked || util.isDisturbed(body.stream)) } /** * @see https://encoding.spec.whatwg.org/#utf-8-decode * @param {Buffer} buffer */ function utf8DecodeBytes (buffer) { if (buffer.length === 0) { return '' } // 1. Let buffer be the result of peeking three bytes from // ioQueue, converted to a byte sequence. // 2. If buffer is 0xEF 0xBB 0xBF, then read three // bytes from ioQueue. (Do nothing with those bytes.) if (buffer[0] === 0xEF && buffer[1] === 0xBB && buffer[2] === 0xBF) { buffer = buffer.subarray(3) } // 3. Process a queue with an instance of UTF-8’s // decoder, ioQueue, output, and "replacement". const output = textDecoder.decode(buffer) // 4. Return output. return output } /** * @see https://infra.spec.whatwg.org/#parse-json-bytes-to-a-javascript-value * @param {Uint8Array} bytes */ function parseJSONFromBytes (bytes) { return JSON.parse(utf8DecodeBytes(bytes)) } /** * @see https://fetch.spec.whatwg.org/#concept-body-mime-type * @param {import('./response').Response|import('./request').Request} object */ function bodyMimeType (object) { const { headersList } = object[kState] const contentType = headersList.get('content-type') if (contentType === null) { return 'failure' } return parseMIMEType(contentType) } module.exports = { extractBody, safelyExtractBody, cloneBody, mixinBody } /***/ }), /***/ 87326: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { MessageChannel, receiveMessageOnPort } = __nccwpck_require__(28167) const corsSafeListedMethods = ['GET', 'HEAD', 'POST'] const corsSafeListedMethodsSet = new Set(corsSafeListedMethods) const nullBodyStatus = [101, 204, 205, 304] const redirectStatus = [301, 302, 303, 307, 308] const redirectStatusSet = new Set(redirectStatus) // https://fetch.spec.whatwg.org/#block-bad-port const badPorts = [ '1', '7', '9', '11', '13', '15', '17', '19', '20', '21', '22', '23', '25', '37', '42', '43', '53', '69', '77', '79', '87', '95', '101', '102', '103', '104', '109', '110', '111', '113', '115', '117', '119', '123', '135', '137', '139', '143', '161', '179', '389', '427', '465', '512', '513', '514', '515', '526', '530', '531', '532', '540', '548', '554', '556', '563', '587', '601', '636', '989', '990', '993', '995', '1719', '1720', '1723', '2049', '3659', '4045', '5060', '5061', '6000', '6566', '6665', '6666', '6667', '6668', '6669', '6697', '10080' ] const badPortsSet = new Set(badPorts) // https://w3c.github.io/webappsec-referrer-policy/#referrer-policies const referrerPolicy = [ '', 'no-referrer', 'no-referrer-when-downgrade', 'same-origin', 'origin', 'strict-origin', 'origin-when-cross-origin', 'strict-origin-when-cross-origin', 'unsafe-url' ] const referrerPolicySet = new Set(referrerPolicy) const requestRedirect = ['follow', 'manual', 'error'] const safeMethods = ['GET', 'HEAD', 'OPTIONS', 'TRACE'] const safeMethodsSet = new Set(safeMethods) const requestMode = ['navigate', 'same-origin', 'no-cors', 'cors'] const requestCredentials = ['omit', 'same-origin', 'include'] const requestCache = [ 'default', 'no-store', 'reload', 'no-cache', 'force-cache', 'only-if-cached' ] // https://fetch.spec.whatwg.org/#request-body-header-name const requestBodyHeader = [ 'content-encoding', 'content-language', 'content-location', 'content-type', // See https://github.com/nodejs/undici/issues/2021 // 'Content-Length' is a forbidden header name, which is typically // removed in the Headers implementation. However, undici doesn't // filter out headers, so we add it here. 'content-length' ] // https://fetch.spec.whatwg.org/#enumdef-requestduplex const requestDuplex = [ 'half' ] // http://fetch.spec.whatwg.org/#forbidden-method const forbiddenMethods = ['CONNECT', 'TRACE', 'TRACK'] const forbiddenMethodsSet = new Set(forbiddenMethods) const subresource = [ 'audio', 'audioworklet', 'font', 'image', 'manifest', 'paintworklet', 'script', 'style', 'track', 'video', 'xslt', '' ] const subresourceSet = new Set(subresource) /** @type {globalThis['DOMException']} */ const DOMException = globalThis.DOMException ?? (() => { // DOMException was only made a global in Node v17.0.0, // but fetch supports >= v16.8. try { atob('~') } catch (err) { return Object.getPrototypeOf(err).constructor } })() let channel /** @type {globalThis['structuredClone']} */ const structuredClone = globalThis.structuredClone ?? // https://github.com/nodejs/node/blob/b27ae24dcc4251bad726d9d84baf678d1f707fed/lib/internal/structured_clone.js // structuredClone was added in v17.0.0, but fetch supports v16.8 function structuredClone (value, options = undefined) { if (arguments.length === 0) { throw new TypeError('missing argument') } if (!channel) { channel = new MessageChannel() } channel.port1.unref() channel.port2.unref() channel.port1.postMessage(value, options?.transfer) return receiveMessageOnPort(channel.port2).message } module.exports = { DOMException, structuredClone, subresource, forbiddenMethods, requestBodyHeader, referrerPolicy, requestRedirect, requestMode, requestCredentials, requestCache, redirectStatus, corsSafeListedMethods, nullBodyStatus, safeMethods, badPorts, requestDuplex, subresourceSet, badPortsSet, redirectStatusSet, corsSafeListedMethodsSet, safeMethodsSet, forbiddenMethodsSet, referrerPolicySet } /***/ }), /***/ 94322: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const assert = __nccwpck_require__(42613) const { atob } = __nccwpck_require__(20181) const { isomorphicDecode } = __nccwpck_require__(15523) const encoder = new TextEncoder() /** * @see https://mimesniff.spec.whatwg.org/#http-token-code-point */ const HTTP_TOKEN_CODEPOINTS = /^[!#$%&'*+-.^_|~A-Za-z0-9]+$/ const HTTP_WHITESPACE_REGEX = /(\u000A|\u000D|\u0009|\u0020)/ // eslint-disable-line /** * @see https://mimesniff.spec.whatwg.org/#http-quoted-string-token-code-point */ const HTTP_QUOTED_STRING_TOKENS = /[\u0009|\u0020-\u007E|\u0080-\u00FF]/ // eslint-disable-line // https://fetch.spec.whatwg.org/#data-url-processor /** @param {URL} dataURL */ function dataURLProcessor (dataURL) { // 1. Assert: dataURL’s scheme is "data". assert(dataURL.protocol === 'data:') // 2. Let input be the result of running the URL // serializer on dataURL with exclude fragment // set to true. let input = URLSerializer(dataURL, true) // 3. Remove the leading "data:" string from input. input = input.slice(5) // 4. Let position point at the start of input. const position = { position: 0 } // 5. Let mimeType be the result of collecting a // sequence of code points that are not equal // to U+002C (,), given position. let mimeType = collectASequenceOfCodePointsFast( ',', input, position ) // 6. Strip leading and trailing ASCII whitespace // from mimeType. // Undici implementation note: we need to store the // length because if the mimetype has spaces removed, // the wrong amount will be sliced from the input in // step #9 const mimeTypeLength = mimeType.length mimeType = removeASCIIWhitespace(mimeType, true, true) // 7. If position is past the end of input, then // return failure if (position.position >= input.length) { return 'failure' } // 8. Advance position by 1. position.position++ // 9. Let encodedBody be the remainder of input. const encodedBody = input.slice(mimeTypeLength + 1) // 10. Let body be the percent-decoding of encodedBody. let body = stringPercentDecode(encodedBody) // 11. If mimeType ends with U+003B (;), followed by // zero or more U+0020 SPACE, followed by an ASCII // case-insensitive match for "base64", then: if (/;(\u0020){0,}base64$/i.test(mimeType)) { // 1. Let stringBody be the isomorphic decode of body. const stringBody = isomorphicDecode(body) // 2. Set body to the forgiving-base64 decode of // stringBody. body = forgivingBase64(stringBody) // 3. If body is failure, then return failure. if (body === 'failure') { return 'failure' } // 4. Remove the last 6 code points from mimeType. mimeType = mimeType.slice(0, -6) // 5. Remove trailing U+0020 SPACE code points from mimeType, // if any. mimeType = mimeType.replace(/(\u0020)+$/, '') // 6. Remove the last U+003B (;) code point from mimeType. mimeType = mimeType.slice(0, -1) } // 12. If mimeType starts with U+003B (;), then prepend // "text/plain" to mimeType. if (mimeType.startsWith(';')) { mimeType = 'text/plain' + mimeType } // 13. Let mimeTypeRecord be the result of parsing // mimeType. let mimeTypeRecord = parseMIMEType(mimeType) // 14. If mimeTypeRecord is failure, then set // mimeTypeRecord to text/plain;charset=US-ASCII. if (mimeTypeRecord === 'failure') { mimeTypeRecord = parseMIMEType('text/plain;charset=US-ASCII') } // 15. Return a new data: URL struct whose MIME // type is mimeTypeRecord and body is body. // https://fetch.spec.whatwg.org/#data-url-struct return { mimeType: mimeTypeRecord, body } } // https://url.spec.whatwg.org/#concept-url-serializer /** * @param {URL} url * @param {boolean} excludeFragment */ function URLSerializer (url, excludeFragment = false) { if (!excludeFragment) { return url.href } const href = url.href const hashLength = url.hash.length return hashLength === 0 ? href : href.substring(0, href.length - hashLength) } // https://infra.spec.whatwg.org/#collect-a-sequence-of-code-points /** * @param {(char: string) => boolean} condition * @param {string} input * @param {{ position: number }} position */ function collectASequenceOfCodePoints (condition, input, position) { // 1. Let result be the empty string. let result = '' // 2. While position doesn’t point past the end of input and the // code point at position within input meets the condition condition: while (position.position < input.length && condition(input[position.position])) { // 1. Append that code point to the end of result. result += input[position.position] // 2. Advance position by 1. position.position++ } // 3. Return result. return result } /** * A faster collectASequenceOfCodePoints that only works when comparing a single character. * @param {string} char * @param {string} input * @param {{ position: number }} position */ function collectASequenceOfCodePointsFast (char, input, position) { const idx = input.indexOf(char, position.position) const start = position.position if (idx === -1) { position.position = input.length return input.slice(start) } position.position = idx return input.slice(start, position.position) } // https://url.spec.whatwg.org/#string-percent-decode /** @param {string} input */ function stringPercentDecode (input) { // 1. Let bytes be the UTF-8 encoding of input. const bytes = encoder.encode(input) // 2. Return the percent-decoding of bytes. return percentDecode(bytes) } // https://url.spec.whatwg.org/#percent-decode /** @param {Uint8Array} input */ function percentDecode (input) { // 1. Let output be an empty byte sequence. /** @type {number[]} */ const output = [] // 2. For each byte byte in input: for (let i = 0; i < input.length; i++) { const byte = input[i] // 1. If byte is not 0x25 (%), then append byte to output. if (byte !== 0x25) { output.push(byte) // 2. Otherwise, if byte is 0x25 (%) and the next two bytes // after byte in input are not in the ranges // 0x30 (0) to 0x39 (9), 0x41 (A) to 0x46 (F), // and 0x61 (a) to 0x66 (f), all inclusive, append byte // to output. } else if ( byte === 0x25 && !/^[0-9A-Fa-f]{2}$/i.test(String.fromCharCode(input[i + 1], input[i + 2])) ) { output.push(0x25) // 3. Otherwise: } else { // 1. Let bytePoint be the two bytes after byte in input, // decoded, and then interpreted as hexadecimal number. const nextTwoBytes = String.fromCharCode(input[i + 1], input[i + 2]) const bytePoint = Number.parseInt(nextTwoBytes, 16) // 2. Append a byte whose value is bytePoint to output. output.push(bytePoint) // 3. Skip the next two bytes in input. i += 2 } } // 3. Return output. return Uint8Array.from(output) } // https://mimesniff.spec.whatwg.org/#parse-a-mime-type /** @param {string} input */ function parseMIMEType (input) { // 1. Remove any leading and trailing HTTP whitespace // from input. input = removeHTTPWhitespace(input, true, true) // 2. Let position be a position variable for input, // initially pointing at the start of input. const position = { position: 0 } // 3. Let type be the result of collecting a sequence // of code points that are not U+002F (/) from // input, given position. const type = collectASequenceOfCodePointsFast( '/', input, position ) // 4. If type is the empty string or does not solely // contain HTTP token code points, then return failure. // https://mimesniff.spec.whatwg.org/#http-token-code-point if (type.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(type)) { return 'failure' } // 5. If position is past the end of input, then return // failure if (position.position > input.length) { return 'failure' } // 6. Advance position by 1. (This skips past U+002F (/).) position.position++ // 7. Let subtype be the result of collecting a sequence of // code points that are not U+003B (;) from input, given // position. let subtype = collectASequenceOfCodePointsFast( ';', input, position ) // 8. Remove any trailing HTTP whitespace from subtype. subtype = removeHTTPWhitespace(subtype, false, true) // 9. If subtype is the empty string or does not solely // contain HTTP token code points, then return failure. if (subtype.length === 0 || !HTTP_TOKEN_CODEPOINTS.test(subtype)) { return 'failure' } const typeLowercase = type.toLowerCase() const subtypeLowercase = subtype.toLowerCase() // 10. Let mimeType be a new MIME type record whose type // is type, in ASCII lowercase, and subtype is subtype, // in ASCII lowercase. // https://mimesniff.spec.whatwg.org/#mime-type const mimeType = { type: typeLowercase, subtype: subtypeLowercase, /** @type {Map} */ parameters: new Map(), // https://mimesniff.spec.whatwg.org/#mime-type-essence essence: `${typeLowercase}/${subtypeLowercase}` } // 11. While position is not past the end of input: while (position.position < input.length) { // 1. Advance position by 1. (This skips past U+003B (;).) position.position++ // 2. Collect a sequence of code points that are HTTP // whitespace from input given position. collectASequenceOfCodePoints( // https://fetch.spec.whatwg.org/#http-whitespace char => HTTP_WHITESPACE_REGEX.test(char), input, position ) // 3. Let parameterName be the result of collecting a // sequence of code points that are not U+003B (;) // or U+003D (=) from input, given position. let parameterName = collectASequenceOfCodePoints( (char) => char !== ';' && char !== '=', input, position ) // 4. Set parameterName to parameterName, in ASCII // lowercase. parameterName = parameterName.toLowerCase() // 5. If position is not past the end of input, then: if (position.position < input.length) { // 1. If the code point at position within input is // U+003B (;), then continue. if (input[position.position] === ';') { continue } // 2. Advance position by 1. (This skips past U+003D (=).) position.position++ } // 6. If position is past the end of input, then break. if (position.position > input.length) { break } // 7. Let parameterValue be null. let parameterValue = null // 8. If the code point at position within input is // U+0022 ("), then: if (input[position.position] === '"') { // 1. Set parameterValue to the result of collecting // an HTTP quoted string from input, given position // and the extract-value flag. parameterValue = collectAnHTTPQuotedString(input, position, true) // 2. Collect a sequence of code points that are not // U+003B (;) from input, given position. collectASequenceOfCodePointsFast( ';', input, position ) // 9. Otherwise: } else { // 1. Set parameterValue to the result of collecting // a sequence of code points that are not U+003B (;) // from input, given position. parameterValue = collectASequenceOfCodePointsFast( ';', input, position ) // 2. Remove any trailing HTTP whitespace from parameterValue. parameterValue = removeHTTPWhitespace(parameterValue, false, true) // 3. If parameterValue is the empty string, then continue. if (parameterValue.length === 0) { continue } } // 10. If all of the following are true // - parameterName is not the empty string // - parameterName solely contains HTTP token code points // - parameterValue solely contains HTTP quoted-string token code points // - mimeType’s parameters[parameterName] does not exist // then set mimeType’s parameters[parameterName] to parameterValue. if ( parameterName.length !== 0 && HTTP_TOKEN_CODEPOINTS.test(parameterName) && (parameterValue.length === 0 || HTTP_QUOTED_STRING_TOKENS.test(parameterValue)) && !mimeType.parameters.has(parameterName) ) { mimeType.parameters.set(parameterName, parameterValue) } } // 12. Return mimeType. return mimeType } // https://infra.spec.whatwg.org/#forgiving-base64-decode /** @param {string} data */ function forgivingBase64 (data) { // 1. Remove all ASCII whitespace from data. data = data.replace(/[\u0009\u000A\u000C\u000D\u0020]/g, '') // eslint-disable-line // 2. If data’s code point length divides by 4 leaving // no remainder, then: if (data.length % 4 === 0) { // 1. If data ends with one or two U+003D (=) code points, // then remove them from data. data = data.replace(/=?=$/, '') } // 3. If data’s code point length divides by 4 leaving // a remainder of 1, then return failure. if (data.length % 4 === 1) { return 'failure' } // 4. If data contains a code point that is not one of // U+002B (+) // U+002F (/) // ASCII alphanumeric // then return failure. if (/[^+/0-9A-Za-z]/.test(data)) { return 'failure' } const binary = atob(data) const bytes = new Uint8Array(binary.length) for (let byte = 0; byte < binary.length; byte++) { bytes[byte] = binary.charCodeAt(byte) } return bytes } // https://fetch.spec.whatwg.org/#collect-an-http-quoted-string // tests: https://fetch.spec.whatwg.org/#example-http-quoted-string /** * @param {string} input * @param {{ position: number }} position * @param {boolean?} extractValue */ function collectAnHTTPQuotedString (input, position, extractValue) { // 1. Let positionStart be position. const positionStart = position.position // 2. Let value be the empty string. let value = '' // 3. Assert: the code point at position within input // is U+0022 ("). assert(input[position.position] === '"') // 4. Advance position by 1. position.position++ // 5. While true: while (true) { // 1. Append the result of collecting a sequence of code points // that are not U+0022 (") or U+005C (\) from input, given // position, to value. value += collectASequenceOfCodePoints( (char) => char !== '"' && char !== '\\', input, position ) // 2. If position is past the end of input, then break. if (position.position >= input.length) { break } // 3. Let quoteOrBackslash be the code point at position within // input. const quoteOrBackslash = input[position.position] // 4. Advance position by 1. position.position++ // 5. If quoteOrBackslash is U+005C (\), then: if (quoteOrBackslash === '\\') { // 1. If position is past the end of input, then append // U+005C (\) to value and break. if (position.position >= input.length) { value += '\\' break } // 2. Append the code point at position within input to value. value += input[position.position] // 3. Advance position by 1. position.position++ // 6. Otherwise: } else { // 1. Assert: quoteOrBackslash is U+0022 ("). assert(quoteOrBackslash === '"') // 2. Break. break } } // 6. If the extract-value flag is set, then return value. if (extractValue) { return value } // 7. Return the code points from positionStart to position, // inclusive, within input. return input.slice(positionStart, position.position) } /** * @see https://mimesniff.spec.whatwg.org/#serialize-a-mime-type */ function serializeAMimeType (mimeType) { assert(mimeType !== 'failure') const { parameters, essence } = mimeType // 1. Let serialization be the concatenation of mimeType’s // type, U+002F (/), and mimeType’s subtype. let serialization = essence // 2. For each name → value of mimeType’s parameters: for (let [name, value] of parameters.entries()) { // 1. Append U+003B (;) to serialization. serialization += ';' // 2. Append name to serialization. serialization += name // 3. Append U+003D (=) to serialization. serialization += '=' // 4. If value does not solely contain HTTP token code // points or value is the empty string, then: if (!HTTP_TOKEN_CODEPOINTS.test(value)) { // 1. Precede each occurence of U+0022 (") or // U+005C (\) in value with U+005C (\). value = value.replace(/(\\|")/g, '\\$1') // 2. Prepend U+0022 (") to value. value = '"' + value // 3. Append U+0022 (") to value. value += '"' } // 5. Append value to serialization. serialization += value } // 3. Return serialization. return serialization } /** * @see https://fetch.spec.whatwg.org/#http-whitespace * @param {string} char */ function isHTTPWhiteSpace (char) { return char === '\r' || char === '\n' || char === '\t' || char === ' ' } /** * @see https://fetch.spec.whatwg.org/#http-whitespace * @param {string} str */ function removeHTTPWhitespace (str, leading = true, trailing = true) { let lead = 0 let trail = str.length - 1 if (leading) { for (; lead < str.length && isHTTPWhiteSpace(str[lead]); lead++); } if (trailing) { for (; trail > 0 && isHTTPWhiteSpace(str[trail]); trail--); } return str.slice(lead, trail + 1) } /** * @see https://infra.spec.whatwg.org/#ascii-whitespace * @param {string} char */ function isASCIIWhitespace (char) { return char === '\r' || char === '\n' || char === '\t' || char === '\f' || char === ' ' } /** * @see https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace */ function removeASCIIWhitespace (str, leading = true, trailing = true) { let lead = 0 let trail = str.length - 1 if (leading) { for (; lead < str.length && isASCIIWhitespace(str[lead]); lead++); } if (trailing) { for (; trail > 0 && isASCIIWhitespace(str[trail]); trail--); } return str.slice(lead, trail + 1) } module.exports = { dataURLProcessor, URLSerializer, collectASequenceOfCodePoints, collectASequenceOfCodePointsFast, stringPercentDecode, parseMIMEType, collectAnHTTPQuotedString, serializeAMimeType } /***/ }), /***/ 63041: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { Blob, File: NativeFile } = __nccwpck_require__(20181) const { types } = __nccwpck_require__(39023) const { kState } = __nccwpck_require__(89710) const { isBlobLike } = __nccwpck_require__(15523) const { webidl } = __nccwpck_require__(74222) const { parseMIMEType, serializeAMimeType } = __nccwpck_require__(94322) const { kEnumerableProperty } = __nccwpck_require__(3440) const encoder = new TextEncoder() class File extends Blob { constructor (fileBits, fileName, options = {}) { // The File constructor is invoked with two or three parameters, depending // on whether the optional dictionary parameter is used. When the File() // constructor is invoked, user agents must run the following steps: webidl.argumentLengthCheck(arguments, 2, { header: 'File constructor' }) fileBits = webidl.converters['sequence'](fileBits) fileName = webidl.converters.USVString(fileName) options = webidl.converters.FilePropertyBag(options) // 1. Let bytes be the result of processing blob parts given fileBits and // options. // Note: Blob handles this for us // 2. Let n be the fileName argument to the constructor. const n = fileName // 3. Process FilePropertyBag dictionary argument by running the following // substeps: // 1. If the type member is provided and is not the empty string, let t // be set to the type dictionary member. If t contains any characters // outside the range U+0020 to U+007E, then set t to the empty string // and return from these substeps. // 2. Convert every character in t to ASCII lowercase. let t = options.type let d // eslint-disable-next-line no-labels substep: { if (t) { t = parseMIMEType(t) if (t === 'failure') { t = '' // eslint-disable-next-line no-labels break substep } t = serializeAMimeType(t).toLowerCase() } // 3. If the lastModified member is provided, let d be set to the // lastModified dictionary member. If it is not provided, set d to the // current date and time represented as the number of milliseconds since // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). d = options.lastModified } // 4. Return a new File object F such that: // F refers to the bytes byte sequence. // F.size is set to the number of total bytes in bytes. // F.name is set to n. // F.type is set to t. // F.lastModified is set to d. super(processBlobParts(fileBits, options), { type: t }) this[kState] = { name: n, lastModified: d, type: t } } get name () { webidl.brandCheck(this, File) return this[kState].name } get lastModified () { webidl.brandCheck(this, File) return this[kState].lastModified } get type () { webidl.brandCheck(this, File) return this[kState].type } } class FileLike { constructor (blobLike, fileName, options = {}) { // TODO: argument idl type check // The File constructor is invoked with two or three parameters, depending // on whether the optional dictionary parameter is used. When the File() // constructor is invoked, user agents must run the following steps: // 1. Let bytes be the result of processing blob parts given fileBits and // options. // 2. Let n be the fileName argument to the constructor. const n = fileName // 3. Process FilePropertyBag dictionary argument by running the following // substeps: // 1. If the type member is provided and is not the empty string, let t // be set to the type dictionary member. If t contains any characters // outside the range U+0020 to U+007E, then set t to the empty string // and return from these substeps. // TODO const t = options.type // 2. Convert every character in t to ASCII lowercase. // TODO // 3. If the lastModified member is provided, let d be set to the // lastModified dictionary member. If it is not provided, set d to the // current date and time represented as the number of milliseconds since // the Unix Epoch (which is the equivalent of Date.now() [ECMA-262]). const d = options.lastModified ?? Date.now() // 4. Return a new File object F such that: // F refers to the bytes byte sequence. // F.size is set to the number of total bytes in bytes. // F.name is set to n. // F.type is set to t. // F.lastModified is set to d. this[kState] = { blobLike, name: n, type: t, lastModified: d } } stream (...args) { webidl.brandCheck(this, FileLike) return this[kState].blobLike.stream(...args) } arrayBuffer (...args) { webidl.brandCheck(this, FileLike) return this[kState].blobLike.arrayBuffer(...args) } slice (...args) { webidl.brandCheck(this, FileLike) return this[kState].blobLike.slice(...args) } text (...args) { webidl.brandCheck(this, FileLike) return this[kState].blobLike.text(...args) } get size () { webidl.brandCheck(this, FileLike) return this[kState].blobLike.size } get type () { webidl.brandCheck(this, FileLike) return this[kState].blobLike.type } get name () { webidl.brandCheck(this, FileLike) return this[kState].name } get lastModified () { webidl.brandCheck(this, FileLike) return this[kState].lastModified } get [Symbol.toStringTag] () { return 'File' } } Object.defineProperties(File.prototype, { [Symbol.toStringTag]: { value: 'File', configurable: true }, name: kEnumerableProperty, lastModified: kEnumerableProperty }) webidl.converters.Blob = webidl.interfaceConverter(Blob) webidl.converters.BlobPart = function (V, opts) { if (webidl.util.Type(V) === 'Object') { if (isBlobLike(V)) { return webidl.converters.Blob(V, { strict: false }) } if ( ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V) ) { return webidl.converters.BufferSource(V, opts) } } return webidl.converters.USVString(V, opts) } webidl.converters['sequence'] = webidl.sequenceConverter( webidl.converters.BlobPart ) // https://www.w3.org/TR/FileAPI/#dfn-FilePropertyBag webidl.converters.FilePropertyBag = webidl.dictionaryConverter([ { key: 'lastModified', converter: webidl.converters['long long'], get defaultValue () { return Date.now() } }, { key: 'type', converter: webidl.converters.DOMString, defaultValue: '' }, { key: 'endings', converter: (value) => { value = webidl.converters.DOMString(value) value = value.toLowerCase() if (value !== 'native') { value = 'transparent' } return value }, defaultValue: 'transparent' } ]) /** * @see https://www.w3.org/TR/FileAPI/#process-blob-parts * @param {(NodeJS.TypedArray|Blob|string)[]} parts * @param {{ type: string, endings: string }} options */ function processBlobParts (parts, options) { // 1. Let bytes be an empty sequence of bytes. /** @type {NodeJS.TypedArray[]} */ const bytes = [] // 2. For each element in parts: for (const element of parts) { // 1. If element is a USVString, run the following substeps: if (typeof element === 'string') { // 1. Let s be element. let s = element // 2. If the endings member of options is "native", set s // to the result of converting line endings to native // of element. if (options.endings === 'native') { s = convertLineEndingsNative(s) } // 3. Append the result of UTF-8 encoding s to bytes. bytes.push(encoder.encode(s)) } else if ( types.isAnyArrayBuffer(element) || types.isTypedArray(element) ) { // 2. If element is a BufferSource, get a copy of the // bytes held by the buffer source, and append those // bytes to bytes. if (!element.buffer) { // ArrayBuffer bytes.push(new Uint8Array(element)) } else { bytes.push( new Uint8Array(element.buffer, element.byteOffset, element.byteLength) ) } } else if (isBlobLike(element)) { // 3. If element is a Blob, append the bytes it represents // to bytes. bytes.push(element) } } // 3. Return bytes. return bytes } /** * @see https://www.w3.org/TR/FileAPI/#convert-line-endings-to-native * @param {string} s */ function convertLineEndingsNative (s) { // 1. Let native line ending be be the code point U+000A LF. let nativeLineEnding = '\n' // 2. If the underlying platform’s conventions are to // represent newlines as a carriage return and line feed // sequence, set native line ending to the code point // U+000D CR followed by the code point U+000A LF. if (process.platform === 'win32') { nativeLineEnding = '\r\n' } return s.replace(/\r?\n/g, nativeLineEnding) } // If this function is moved to ./util.js, some tools (such as // rollup) will warn about circular dependencies. See: // https://github.com/nodejs/undici/issues/1629 function isFileLike (object) { return ( (NativeFile && object instanceof NativeFile) || object instanceof File || ( object && (typeof object.stream === 'function' || typeof object.arrayBuffer === 'function') && object[Symbol.toStringTag] === 'File' ) ) } module.exports = { File, FileLike, isFileLike } /***/ }), /***/ 43073: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { isBlobLike, toUSVString, makeIterator } = __nccwpck_require__(15523) const { kState } = __nccwpck_require__(89710) const { File: UndiciFile, FileLike, isFileLike } = __nccwpck_require__(63041) const { webidl } = __nccwpck_require__(74222) const { Blob, File: NativeFile } = __nccwpck_require__(20181) /** @type {globalThis['File']} */ const File = NativeFile ?? UndiciFile // https://xhr.spec.whatwg.org/#formdata class FormData { constructor (form) { if (form !== undefined) { throw webidl.errors.conversionFailed({ prefix: 'FormData constructor', argument: 'Argument 1', types: ['undefined'] }) } this[kState] = [] } append (name, value, filename = undefined) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.append' }) if (arguments.length === 3 && !isBlobLike(value)) { throw new TypeError( "Failed to execute 'append' on 'FormData': parameter 2 is not of type 'Blob'" ) } // 1. Let value be value if given; otherwise blobValue. name = webidl.converters.USVString(name) value = isBlobLike(value) ? webidl.converters.Blob(value, { strict: false }) : webidl.converters.USVString(value) filename = arguments.length === 3 ? webidl.converters.USVString(filename) : undefined // 2. Let entry be the result of creating an entry with // name, value, and filename if given. const entry = makeEntry(name, value, filename) // 3. Append entry to this’s entry list. this[kState].push(entry) } delete (name) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.delete' }) name = webidl.converters.USVString(name) // The delete(name) method steps are to remove all entries whose name // is name from this’s entry list. this[kState] = this[kState].filter(entry => entry.name !== name) } get (name) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.get' }) name = webidl.converters.USVString(name) // 1. If there is no entry whose name is name in this’s entry list, // then return null. const idx = this[kState].findIndex((entry) => entry.name === name) if (idx === -1) { return null } // 2. Return the value of the first entry whose name is name from // this’s entry list. return this[kState][idx].value } getAll (name) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.getAll' }) name = webidl.converters.USVString(name) // 1. If there is no entry whose name is name in this’s entry list, // then return the empty list. // 2. Return the values of all entries whose name is name, in order, // from this’s entry list. return this[kState] .filter((entry) => entry.name === name) .map((entry) => entry.value) } has (name) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.has' }) name = webidl.converters.USVString(name) // The has(name) method steps are to return true if there is an entry // whose name is name in this’s entry list; otherwise false. return this[kState].findIndex((entry) => entry.name === name) !== -1 } set (name, value, filename = undefined) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 2, { header: 'FormData.set' }) if (arguments.length === 3 && !isBlobLike(value)) { throw new TypeError( "Failed to execute 'set' on 'FormData': parameter 2 is not of type 'Blob'" ) } // The set(name, value) and set(name, blobValue, filename) method steps // are: // 1. Let value be value if given; otherwise blobValue. name = webidl.converters.USVString(name) value = isBlobLike(value) ? webidl.converters.Blob(value, { strict: false }) : webidl.converters.USVString(value) filename = arguments.length === 3 ? toUSVString(filename) : undefined // 2. Let entry be the result of creating an entry with name, value, and // filename if given. const entry = makeEntry(name, value, filename) // 3. If there are entries in this’s entry list whose name is name, then // replace the first such entry with entry and remove the others. const idx = this[kState].findIndex((entry) => entry.name === name) if (idx !== -1) { this[kState] = [ ...this[kState].slice(0, idx), entry, ...this[kState].slice(idx + 1).filter((entry) => entry.name !== name) ] } else { // 4. Otherwise, append entry to this’s entry list. this[kState].push(entry) } } entries () { webidl.brandCheck(this, FormData) return makeIterator( () => this[kState].map(pair => [pair.name, pair.value]), 'FormData', 'key+value' ) } keys () { webidl.brandCheck(this, FormData) return makeIterator( () => this[kState].map(pair => [pair.name, pair.value]), 'FormData', 'key' ) } values () { webidl.brandCheck(this, FormData) return makeIterator( () => this[kState].map(pair => [pair.name, pair.value]), 'FormData', 'value' ) } /** * @param {(value: string, key: string, self: FormData) => void} callbackFn * @param {unknown} thisArg */ forEach (callbackFn, thisArg = globalThis) { webidl.brandCheck(this, FormData) webidl.argumentLengthCheck(arguments, 1, { header: 'FormData.forEach' }) if (typeof callbackFn !== 'function') { throw new TypeError( "Failed to execute 'forEach' on 'FormData': parameter 1 is not of type 'Function'." ) } for (const [key, value] of this) { callbackFn.apply(thisArg, [value, key, this]) } } } FormData.prototype[Symbol.iterator] = FormData.prototype.entries Object.defineProperties(FormData.prototype, { [Symbol.toStringTag]: { value: 'FormData', configurable: true } }) /** * @see https://html.spec.whatwg.org/multipage/form-control-infrastructure.html#create-an-entry * @param {string} name * @param {string|Blob} value * @param {?string} filename * @returns */ function makeEntry (name, value, filename) { // 1. Set name to the result of converting name into a scalar value string. // "To convert a string into a scalar value string, replace any surrogates // with U+FFFD." // see: https://nodejs.org/dist/latest-v18.x/docs/api/buffer.html#buftostringencoding-start-end name = Buffer.from(name).toString('utf8') // 2. If value is a string, then set value to the result of converting // value into a scalar value string. if (typeof value === 'string') { value = Buffer.from(value).toString('utf8') } else { // 3. Otherwise: // 1. If value is not a File object, then set value to a new File object, // representing the same bytes, whose name attribute value is "blob" if (!isFileLike(value)) { value = value instanceof Blob ? new File([value], 'blob', { type: value.type }) : new FileLike(value, 'blob', { type: value.type }) } // 2. If filename is given, then set value to a new File object, // representing the same bytes, whose name attribute is filename. if (filename !== undefined) { /** @type {FilePropertyBag} */ const options = { type: value.type, lastModified: value.lastModified } value = (NativeFile && value instanceof NativeFile) || value instanceof UndiciFile ? new File([value], filename, options) : new FileLike(value, filename, options) } } // 4. Return an entry whose name is name and whose value is value. return { name, value } } module.exports = { FormData } /***/ }), /***/ 75628: /***/ ((module) => { "use strict"; // In case of breaking changes, increase the version // number to avoid conflicts. const globalOrigin = Symbol.for('undici.globalOrigin.1') function getGlobalOrigin () { return globalThis[globalOrigin] } function setGlobalOrigin (newOrigin) { if (newOrigin === undefined) { Object.defineProperty(globalThis, globalOrigin, { value: undefined, writable: true, enumerable: false, configurable: false }) return } const parsedURL = new URL(newOrigin) if (parsedURL.protocol !== 'http:' && parsedURL.protocol !== 'https:') { throw new TypeError(`Only http & https urls are allowed, received ${parsedURL.protocol}`) } Object.defineProperty(globalThis, globalOrigin, { value: parsedURL, writable: true, enumerable: false, configurable: false }) } module.exports = { getGlobalOrigin, setGlobalOrigin } /***/ }), /***/ 26349: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // https://github.com/Ethan-Arrowood/undici-fetch const { kHeadersList, kConstruct } = __nccwpck_require__(36443) const { kGuard } = __nccwpck_require__(89710) const { kEnumerableProperty } = __nccwpck_require__(3440) const { makeIterator, isValidHeaderName, isValidHeaderValue } = __nccwpck_require__(15523) const util = __nccwpck_require__(39023) const { webidl } = __nccwpck_require__(74222) const assert = __nccwpck_require__(42613) const kHeadersMap = Symbol('headers map') const kHeadersSortedMap = Symbol('headers map sorted') /** * @param {number} code */ function isHTTPWhiteSpaceCharCode (code) { return code === 0x00a || code === 0x00d || code === 0x009 || code === 0x020 } /** * @see https://fetch.spec.whatwg.org/#concept-header-value-normalize * @param {string} potentialValue */ function headerValueNormalize (potentialValue) { // To normalize a byte sequence potentialValue, remove // any leading and trailing HTTP whitespace bytes from // potentialValue. let i = 0; let j = potentialValue.length while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(j - 1))) --j while (j > i && isHTTPWhiteSpaceCharCode(potentialValue.charCodeAt(i))) ++i return i === 0 && j === potentialValue.length ? potentialValue : potentialValue.substring(i, j) } function fill (headers, object) { // To fill a Headers object headers with a given object object, run these steps: // 1. If object is a sequence, then for each header in object: // Note: webidl conversion to array has already been done. if (Array.isArray(object)) { for (let i = 0; i < object.length; ++i) { const header = object[i] // 1. If header does not contain exactly two items, then throw a TypeError. if (header.length !== 2) { throw webidl.errors.exception({ header: 'Headers constructor', message: `expected name/value pair to be length 2, found ${header.length}.` }) } // 2. Append (header’s first item, header’s second item) to headers. appendHeader(headers, header[0], header[1]) } } else if (typeof object === 'object' && object !== null) { // Note: null should throw // 2. Otherwise, object is a record, then for each key → value in object, // append (key, value) to headers const keys = Object.keys(object) for (let i = 0; i < keys.length; ++i) { appendHeader(headers, keys[i], object[keys[i]]) } } else { throw webidl.errors.conversionFailed({ prefix: 'Headers constructor', argument: 'Argument 1', types: ['sequence>', 'record'] }) } } /** * @see https://fetch.spec.whatwg.org/#concept-headers-append */ function appendHeader (headers, name, value) { // 1. Normalize value. value = headerValueNormalize(value) // 2. If name is not a header name or value is not a // header value, then throw a TypeError. if (!isValidHeaderName(name)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.append', value: name, type: 'header name' }) } else if (!isValidHeaderValue(value)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.append', value, type: 'header value' }) } // 3. If headers’s guard is "immutable", then throw a TypeError. // 4. Otherwise, if headers’s guard is "request" and name is a // forbidden header name, return. // Note: undici does not implement forbidden header names if (headers[kGuard] === 'immutable') { throw new TypeError('immutable') } else if (headers[kGuard] === 'request-no-cors') { // 5. Otherwise, if headers’s guard is "request-no-cors": // TODO } // 6. Otherwise, if headers’s guard is "response" and name is a // forbidden response-header name, return. // 7. Append (name, value) to headers’s header list. return headers[kHeadersList].append(name, value) // 8. If headers’s guard is "request-no-cors", then remove // privileged no-CORS request headers from headers } class HeadersList { /** @type {[string, string][]|null} */ cookies = null constructor (init) { if (init instanceof HeadersList) { this[kHeadersMap] = new Map(init[kHeadersMap]) this[kHeadersSortedMap] = init[kHeadersSortedMap] this.cookies = init.cookies === null ? null : [...init.cookies] } else { this[kHeadersMap] = new Map(init) this[kHeadersSortedMap] = null } } // https://fetch.spec.whatwg.org/#header-list-contains contains (name) { // A header list list contains a header name name if list // contains a header whose name is a byte-case-insensitive // match for name. name = name.toLowerCase() return this[kHeadersMap].has(name) } clear () { this[kHeadersMap].clear() this[kHeadersSortedMap] = null this.cookies = null } // https://fetch.spec.whatwg.org/#concept-header-list-append append (name, value) { this[kHeadersSortedMap] = null // 1. If list contains name, then set name to the first such // header’s name. const lowercaseName = name.toLowerCase() const exists = this[kHeadersMap].get(lowercaseName) // 2. Append (name, value) to list. if (exists) { const delimiter = lowercaseName === 'cookie' ? '; ' : ', ' this[kHeadersMap].set(lowercaseName, { name: exists.name, value: `${exists.value}${delimiter}${value}` }) } else { this[kHeadersMap].set(lowercaseName, { name, value }) } if (lowercaseName === 'set-cookie') { this.cookies ??= [] this.cookies.push(value) } } // https://fetch.spec.whatwg.org/#concept-header-list-set set (name, value) { this[kHeadersSortedMap] = null const lowercaseName = name.toLowerCase() if (lowercaseName === 'set-cookie') { this.cookies = [value] } // 1. If list contains name, then set the value of // the first such header to value and remove the // others. // 2. Otherwise, append header (name, value) to list. this[kHeadersMap].set(lowercaseName, { name, value }) } // https://fetch.spec.whatwg.org/#concept-header-list-delete delete (name) { this[kHeadersSortedMap] = null name = name.toLowerCase() if (name === 'set-cookie') { this.cookies = null } this[kHeadersMap].delete(name) } // https://fetch.spec.whatwg.org/#concept-header-list-get get (name) { const value = this[kHeadersMap].get(name.toLowerCase()) // 1. If list does not contain name, then return null. // 2. Return the values of all headers in list whose name // is a byte-case-insensitive match for name, // separated from each other by 0x2C 0x20, in order. return value === undefined ? null : value.value } * [Symbol.iterator] () { // use the lowercased name for (const [name, { value }] of this[kHeadersMap]) { yield [name, value] } } get entries () { const headers = {} if (this[kHeadersMap].size) { for (const { name, value } of this[kHeadersMap].values()) { headers[name] = value } } return headers } } // https://fetch.spec.whatwg.org/#headers-class class Headers { constructor (init = undefined) { if (init === kConstruct) { return } this[kHeadersList] = new HeadersList() // The new Headers(init) constructor steps are: // 1. Set this’s guard to "none". this[kGuard] = 'none' // 2. If init is given, then fill this with init. if (init !== undefined) { init = webidl.converters.HeadersInit(init) fill(this, init) } } // https://fetch.spec.whatwg.org/#dom-headers-append append (name, value) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.append' }) name = webidl.converters.ByteString(name) value = webidl.converters.ByteString(value) return appendHeader(this, name, value) } // https://fetch.spec.whatwg.org/#dom-headers-delete delete (name) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.delete' }) name = webidl.converters.ByteString(name) // 1. If name is not a header name, then throw a TypeError. if (!isValidHeaderName(name)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.delete', value: name, type: 'header name' }) } // 2. If this’s guard is "immutable", then throw a TypeError. // 3. Otherwise, if this’s guard is "request" and name is a // forbidden header name, return. // 4. Otherwise, if this’s guard is "request-no-cors", name // is not a no-CORS-safelisted request-header name, and // name is not a privileged no-CORS request-header name, // return. // 5. Otherwise, if this’s guard is "response" and name is // a forbidden response-header name, return. // Note: undici does not implement forbidden header names if (this[kGuard] === 'immutable') { throw new TypeError('immutable') } else if (this[kGuard] === 'request-no-cors') { // TODO } // 6. If this’s header list does not contain name, then // return. if (!this[kHeadersList].contains(name)) { return } // 7. Delete name from this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this. this[kHeadersList].delete(name) } // https://fetch.spec.whatwg.org/#dom-headers-get get (name) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.get' }) name = webidl.converters.ByteString(name) // 1. If name is not a header name, then throw a TypeError. if (!isValidHeaderName(name)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.get', value: name, type: 'header name' }) } // 2. Return the result of getting name from this’s header // list. return this[kHeadersList].get(name) } // https://fetch.spec.whatwg.org/#dom-headers-has has (name) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.has' }) name = webidl.converters.ByteString(name) // 1. If name is not a header name, then throw a TypeError. if (!isValidHeaderName(name)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.has', value: name, type: 'header name' }) } // 2. Return true if this’s header list contains name; // otherwise false. return this[kHeadersList].contains(name) } // https://fetch.spec.whatwg.org/#dom-headers-set set (name, value) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 2, { header: 'Headers.set' }) name = webidl.converters.ByteString(name) value = webidl.converters.ByteString(value) // 1. Normalize value. value = headerValueNormalize(value) // 2. If name is not a header name or value is not a // header value, then throw a TypeError. if (!isValidHeaderName(name)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.set', value: name, type: 'header name' }) } else if (!isValidHeaderValue(value)) { throw webidl.errors.invalidArgument({ prefix: 'Headers.set', value, type: 'header value' }) } // 3. If this’s guard is "immutable", then throw a TypeError. // 4. Otherwise, if this’s guard is "request" and name is a // forbidden header name, return. // 5. Otherwise, if this’s guard is "request-no-cors" and // name/value is not a no-CORS-safelisted request-header, // return. // 6. Otherwise, if this’s guard is "response" and name is a // forbidden response-header name, return. // Note: undici does not implement forbidden header names if (this[kGuard] === 'immutable') { throw new TypeError('immutable') } else if (this[kGuard] === 'request-no-cors') { // TODO } // 7. Set (name, value) in this’s header list. // 8. If this’s guard is "request-no-cors", then remove // privileged no-CORS request headers from this this[kHeadersList].set(name, value) } // https://fetch.spec.whatwg.org/#dom-headers-getsetcookie getSetCookie () { webidl.brandCheck(this, Headers) // 1. If this’s header list does not contain `Set-Cookie`, then return « ». // 2. Return the values of all headers in this’s header list whose name is // a byte-case-insensitive match for `Set-Cookie`, in order. const list = this[kHeadersList].cookies if (list) { return [...list] } return [] } // https://fetch.spec.whatwg.org/#concept-header-list-sort-and-combine get [kHeadersSortedMap] () { if (this[kHeadersList][kHeadersSortedMap]) { return this[kHeadersList][kHeadersSortedMap] } // 1. Let headers be an empty list of headers with the key being the name // and value the value. const headers = [] // 2. Let names be the result of convert header names to a sorted-lowercase // set with all the names of the headers in list. const names = [...this[kHeadersList]].sort((a, b) => a[0] < b[0] ? -1 : 1) const cookies = this[kHeadersList].cookies // 3. For each name of names: for (let i = 0; i < names.length; ++i) { const [name, value] = names[i] // 1. If name is `set-cookie`, then: if (name === 'set-cookie') { // 1. Let values be a list of all values of headers in list whose name // is a byte-case-insensitive match for name, in order. // 2. For each value of values: // 1. Append (name, value) to headers. for (let j = 0; j < cookies.length; ++j) { headers.push([name, cookies[j]]) } } else { // 2. Otherwise: // 1. Let value be the result of getting name from list. // 2. Assert: value is non-null. assert(value !== null) // 3. Append (name, value) to headers. headers.push([name, value]) } } this[kHeadersList][kHeadersSortedMap] = headers // 4. Return headers. return headers } keys () { webidl.brandCheck(this, Headers) if (this[kGuard] === 'immutable') { const value = this[kHeadersSortedMap] return makeIterator(() => value, 'Headers', 'key') } return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', 'key' ) } values () { webidl.brandCheck(this, Headers) if (this[kGuard] === 'immutable') { const value = this[kHeadersSortedMap] return makeIterator(() => value, 'Headers', 'value') } return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', 'value' ) } entries () { webidl.brandCheck(this, Headers) if (this[kGuard] === 'immutable') { const value = this[kHeadersSortedMap] return makeIterator(() => value, 'Headers', 'key+value') } return makeIterator( () => [...this[kHeadersSortedMap].values()], 'Headers', 'key+value' ) } /** * @param {(value: string, key: string, self: Headers) => void} callbackFn * @param {unknown} thisArg */ forEach (callbackFn, thisArg = globalThis) { webidl.brandCheck(this, Headers) webidl.argumentLengthCheck(arguments, 1, { header: 'Headers.forEach' }) if (typeof callbackFn !== 'function') { throw new TypeError( "Failed to execute 'forEach' on 'Headers': parameter 1 is not of type 'Function'." ) } for (const [key, value] of this) { callbackFn.apply(thisArg, [value, key, this]) } } [Symbol.for('nodejs.util.inspect.custom')] () { webidl.brandCheck(this, Headers) return this[kHeadersList] } } Headers.prototype[Symbol.iterator] = Headers.prototype.entries Object.defineProperties(Headers.prototype, { append: kEnumerableProperty, delete: kEnumerableProperty, get: kEnumerableProperty, has: kEnumerableProperty, set: kEnumerableProperty, getSetCookie: kEnumerableProperty, keys: kEnumerableProperty, values: kEnumerableProperty, entries: kEnumerableProperty, forEach: kEnumerableProperty, [Symbol.iterator]: { enumerable: false }, [Symbol.toStringTag]: { value: 'Headers', configurable: true }, [util.inspect.custom]: { enumerable: false } }) webidl.converters.HeadersInit = function (V) { if (webidl.util.Type(V) === 'Object') { if (V[Symbol.iterator]) { return webidl.converters['sequence>'](V) } return webidl.converters['record'](V) } throw webidl.errors.conversionFailed({ prefix: 'Headers constructor', argument: 'Argument 1', types: ['sequence>', 'record'] }) } module.exports = { fill, Headers, HeadersList } /***/ }), /***/ 12315: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // https://github.com/Ethan-Arrowood/undici-fetch const { Response, makeNetworkError, makeAppropriateNetworkError, filterResponse, makeResponse } = __nccwpck_require__(48676) const { Headers } = __nccwpck_require__(26349) const { Request, makeRequest } = __nccwpck_require__(25194) const zlib = __nccwpck_require__(43106) const { bytesMatch, makePolicyContainer, clonePolicyContainer, requestBadPort, TAOCheck, appendRequestOriginHeader, responseLocationURL, requestCurrentURL, setRequestReferrerPolicyOnRedirect, tryUpgradeRequestToAPotentiallyTrustworthyURL, createOpaqueTimingInfo, appendFetchMetadata, corsCheck, crossOriginResourcePolicyCheck, determineRequestsReferrer, coarsenedSharedCurrentTime, createDeferredPromise, isBlobLike, sameOrigin, isCancelled, isAborted, isErrorLike, fullyReadBody, readableStreamClose, isomorphicEncode, urlIsLocal, urlIsHttpHttpsScheme, urlHasHttpsScheme } = __nccwpck_require__(15523) const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(89710) const assert = __nccwpck_require__(42613) const { safelyExtractBody } = __nccwpck_require__(8923) const { redirectStatusSet, nullBodyStatus, safeMethodsSet, requestBodyHeader, subresourceSet, DOMException } = __nccwpck_require__(87326) const { kHeadersList } = __nccwpck_require__(36443) const EE = __nccwpck_require__(24434) const { Readable, pipeline } = __nccwpck_require__(2203) const { addAbortListener, isErrored, isReadable, nodeMajor, nodeMinor } = __nccwpck_require__(3440) const { dataURLProcessor, serializeAMimeType } = __nccwpck_require__(94322) const { TransformStream } = __nccwpck_require__(63774) const { getGlobalDispatcher } = __nccwpck_require__(32581) const { webidl } = __nccwpck_require__(74222) const { STATUS_CODES } = __nccwpck_require__(58611) const GET_OR_HEAD = ['GET', 'HEAD'] /** @type {import('buffer').resolveObjectURL} */ let resolveObjectURL let ReadableStream = globalThis.ReadableStream class Fetch extends EE { constructor (dispatcher) { super() this.dispatcher = dispatcher this.connection = null this.dump = false this.state = 'ongoing' // 2 terminated listeners get added per request, // but only 1 gets removed. If there are 20 redirects, // 21 listeners will be added. // See https://github.com/nodejs/undici/issues/1711 // TODO (fix): Find and fix root cause for leaked listener. this.setMaxListeners(21) } terminate (reason) { if (this.state !== 'ongoing') { return } this.state = 'terminated' this.connection?.destroy(reason) this.emit('terminated', reason) } // https://fetch.spec.whatwg.org/#fetch-controller-abort abort (error) { if (this.state !== 'ongoing') { return } // 1. Set controller’s state to "aborted". this.state = 'aborted' // 2. Let fallbackError be an "AbortError" DOMException. // 3. Set error to fallbackError if it is not given. if (!error) { error = new DOMException('The operation was aborted.', 'AbortError') } // 4. Let serializedError be StructuredSerialize(error). // If that threw an exception, catch it, and let // serializedError be StructuredSerialize(fallbackError). // 5. Set controller’s serialized abort reason to serializedError. this.serializedAbortReason = error this.connection?.destroy(error) this.emit('terminated', error) } } // https://fetch.spec.whatwg.org/#fetch-method function fetch (input, init = {}) { webidl.argumentLengthCheck(arguments, 1, { header: 'globalThis.fetch' }) // 1. Let p be a new promise. const p = createDeferredPromise() // 2. Let requestObject be the result of invoking the initial value of // Request as constructor with input and init as arguments. If this throws // an exception, reject p with it and return p. let requestObject try { requestObject = new Request(input, init) } catch (e) { p.reject(e) return p.promise } // 3. Let request be requestObject’s request. const request = requestObject[kState] // 4. If requestObject’s signal’s aborted flag is set, then: if (requestObject.signal.aborted) { // 1. Abort the fetch() call with p, request, null, and // requestObject’s signal’s abort reason. abortFetch(p, request, null, requestObject.signal.reason) // 2. Return p. return p.promise } // 5. Let globalObject be request’s client’s global object. const globalObject = request.client.globalObject // 6. If globalObject is a ServiceWorkerGlobalScope object, then set // request’s service-workers mode to "none". if (globalObject?.constructor?.name === 'ServiceWorkerGlobalScope') { request.serviceWorkers = 'none' } // 7. Let responseObject be null. let responseObject = null // 8. Let relevantRealm be this’s relevant Realm. const relevantRealm = null // 9. Let locallyAborted be false. let locallyAborted = false // 10. Let controller be null. let controller = null // 11. Add the following abort steps to requestObject’s signal: addAbortListener( requestObject.signal, () => { // 1. Set locallyAborted to true. locallyAborted = true // 2. Assert: controller is non-null. assert(controller != null) // 3. Abort controller with requestObject’s signal’s abort reason. controller.abort(requestObject.signal.reason) // 4. Abort the fetch() call with p, request, responseObject, // and requestObject’s signal’s abort reason. abortFetch(p, request, responseObject, requestObject.signal.reason) } ) // 12. Let handleFetchDone given response response be to finalize and // report timing with response, globalObject, and "fetch". const handleFetchDone = (response) => finalizeAndReportTiming(response, 'fetch') // 13. Set controller to the result of calling fetch given request, // with processResponseEndOfBody set to handleFetchDone, and processResponse // given response being these substeps: const processResponse = (response) => { // 1. If locallyAborted is true, terminate these substeps. if (locallyAborted) { return Promise.resolve() } // 2. If response’s aborted flag is set, then: if (response.aborted) { // 1. Let deserializedError be the result of deserialize a serialized // abort reason given controller’s serialized abort reason and // relevantRealm. // 2. Abort the fetch() call with p, request, responseObject, and // deserializedError. abortFetch(p, request, responseObject, controller.serializedAbortReason) return Promise.resolve() } // 3. If response is a network error, then reject p with a TypeError // and terminate these substeps. if (response.type === 'error') { p.reject( Object.assign(new TypeError('fetch failed'), { cause: response.error }) ) return Promise.resolve() } // 4. Set responseObject to the result of creating a Response object, // given response, "immutable", and relevantRealm. responseObject = new Response() responseObject[kState] = response responseObject[kRealm] = relevantRealm responseObject[kHeaders][kHeadersList] = response.headersList responseObject[kHeaders][kGuard] = 'immutable' responseObject[kHeaders][kRealm] = relevantRealm // 5. Resolve p with responseObject. p.resolve(responseObject) } controller = fetching({ request, processResponseEndOfBody: handleFetchDone, processResponse, dispatcher: init.dispatcher ?? getGlobalDispatcher() // undici }) // 14. Return p. return p.promise } // https://fetch.spec.whatwg.org/#finalize-and-report-timing function finalizeAndReportTiming (response, initiatorType = 'other') { // 1. If response is an aborted network error, then return. if (response.type === 'error' && response.aborted) { return } // 2. If response’s URL list is null or empty, then return. if (!response.urlList?.length) { return } // 3. Let originalURL be response’s URL list[0]. const originalURL = response.urlList[0] // 4. Let timingInfo be response’s timing info. let timingInfo = response.timingInfo // 5. Let cacheState be response’s cache state. let cacheState = response.cacheState // 6. If originalURL’s scheme is not an HTTP(S) scheme, then return. if (!urlIsHttpHttpsScheme(originalURL)) { return } // 7. If timingInfo is null, then return. if (timingInfo === null) { return } // 8. If response’s timing allow passed flag is not set, then: if (!response.timingAllowPassed) { // 1. Set timingInfo to a the result of creating an opaque timing info for timingInfo. timingInfo = createOpaqueTimingInfo({ startTime: timingInfo.startTime }) // 2. Set cacheState to the empty string. cacheState = '' } // 9. Set timingInfo’s end time to the coarsened shared current time // given global’s relevant settings object’s cross-origin isolated // capability. // TODO: given global’s relevant settings object’s cross-origin isolated // capability? timingInfo.endTime = coarsenedSharedCurrentTime() // 10. Set response’s timing info to timingInfo. response.timingInfo = timingInfo // 11. Mark resource timing for timingInfo, originalURL, initiatorType, // global, and cacheState. markResourceTiming( timingInfo, originalURL, initiatorType, globalThis, cacheState ) } // https://w3c.github.io/resource-timing/#dfn-mark-resource-timing function markResourceTiming (timingInfo, originalURL, initiatorType, globalThis, cacheState) { if (nodeMajor > 18 || (nodeMajor === 18 && nodeMinor >= 2)) { performance.markResourceTiming(timingInfo, originalURL.href, initiatorType, globalThis, cacheState) } } // https://fetch.spec.whatwg.org/#abort-fetch function abortFetch (p, request, responseObject, error) { // Note: AbortSignal.reason was added in node v17.2.0 // which would give us an undefined error to reject with. // Remove this once node v16 is no longer supported. if (!error) { error = new DOMException('The operation was aborted.', 'AbortError') } // 1. Reject promise with error. p.reject(error) // 2. If request’s body is not null and is readable, then cancel request’s // body with error. if (request.body != null && isReadable(request.body?.stream)) { request.body.stream.cancel(error).catch((err) => { if (err.code === 'ERR_INVALID_STATE') { // Node bug? return } throw err }) } // 3. If responseObject is null, then return. if (responseObject == null) { return } // 4. Let response be responseObject’s response. const response = responseObject[kState] // 5. If response’s body is not null and is readable, then error response’s // body with error. if (response.body != null && isReadable(response.body?.stream)) { response.body.stream.cancel(error).catch((err) => { if (err.code === 'ERR_INVALID_STATE') { // Node bug? return } throw err }) } } // https://fetch.spec.whatwg.org/#fetching function fetching ({ request, processRequestBodyChunkLength, processRequestEndOfBody, processResponse, processResponseEndOfBody, processResponseConsumeBody, useParallelQueue = false, dispatcher // undici }) { // 1. Let taskDestination be null. let taskDestination = null // 2. Let crossOriginIsolatedCapability be false. let crossOriginIsolatedCapability = false // 3. If request’s client is non-null, then: if (request.client != null) { // 1. Set taskDestination to request’s client’s global object. taskDestination = request.client.globalObject // 2. Set crossOriginIsolatedCapability to request’s client’s cross-origin // isolated capability. crossOriginIsolatedCapability = request.client.crossOriginIsolatedCapability } // 4. If useParallelQueue is true, then set taskDestination to the result of // starting a new parallel queue. // TODO // 5. Let timingInfo be a new fetch timing info whose start time and // post-redirect start time are the coarsened shared current time given // crossOriginIsolatedCapability. const currenTime = coarsenedSharedCurrentTime(crossOriginIsolatedCapability) const timingInfo = createOpaqueTimingInfo({ startTime: currenTime }) // 6. Let fetchParams be a new fetch params whose // request is request, // timing info is timingInfo, // process request body chunk length is processRequestBodyChunkLength, // process request end-of-body is processRequestEndOfBody, // process response is processResponse, // process response consume body is processResponseConsumeBody, // process response end-of-body is processResponseEndOfBody, // task destination is taskDestination, // and cross-origin isolated capability is crossOriginIsolatedCapability. const fetchParams = { controller: new Fetch(dispatcher), request, timingInfo, processRequestBodyChunkLength, processRequestEndOfBody, processResponse, processResponseConsumeBody, processResponseEndOfBody, taskDestination, crossOriginIsolatedCapability } // 7. If request’s body is a byte sequence, then set request’s body to // request’s body as a body. // NOTE: Since fetching is only called from fetch, body should already be // extracted. assert(!request.body || request.body.stream) // 8. If request’s window is "client", then set request’s window to request’s // client, if request’s client’s global object is a Window object; otherwise // "no-window". if (request.window === 'client') { // TODO: What if request.client is null? request.window = request.client?.globalObject?.constructor?.name === 'Window' ? request.client : 'no-window' } // 9. If request’s origin is "client", then set request’s origin to request’s // client’s origin. if (request.origin === 'client') { // TODO: What if request.client is null? request.origin = request.client?.origin } // 10. If all of the following conditions are true: // TODO // 11. If request’s policy container is "client", then: if (request.policyContainer === 'client') { // 1. If request’s client is non-null, then set request’s policy // container to a clone of request’s client’s policy container. [HTML] if (request.client != null) { request.policyContainer = clonePolicyContainer( request.client.policyContainer ) } else { // 2. Otherwise, set request’s policy container to a new policy // container. request.policyContainer = makePolicyContainer() } } // 12. If request’s header list does not contain `Accept`, then: if (!request.headersList.contains('accept')) { // 1. Let value be `*/*`. const value = '*/*' // 2. A user agent should set value to the first matching statement, if // any, switching on request’s destination: // "document" // "frame" // "iframe" // `text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8` // "image" // `image/png,image/svg+xml,image/*;q=0.8,*/*;q=0.5` // "style" // `text/css,*/*;q=0.1` // TODO // 3. Append `Accept`/value to request’s header list. request.headersList.append('accept', value) } // 13. If request’s header list does not contain `Accept-Language`, then // user agents should append `Accept-Language`/an appropriate value to // request’s header list. if (!request.headersList.contains('accept-language')) { request.headersList.append('accept-language', '*') } // 14. If request’s priority is null, then use request’s initiator and // destination appropriately in setting request’s priority to a // user-agent-defined object. if (request.priority === null) { // TODO } // 15. If request is a subresource request, then: if (subresourceSet.has(request.destination)) { // TODO } // 16. Run main fetch given fetchParams. mainFetch(fetchParams) .catch(err => { fetchParams.controller.terminate(err) }) // 17. Return fetchParam's controller return fetchParams.controller } // https://fetch.spec.whatwg.org/#concept-main-fetch async function mainFetch (fetchParams, recursive = false) { // 1. Let request be fetchParams’s request. const request = fetchParams.request // 2. Let response be null. let response = null // 3. If request’s local-URLs-only flag is set and request’s current URL is // not local, then set response to a network error. if (request.localURLsOnly && !urlIsLocal(requestCurrentURL(request))) { response = makeNetworkError('local URLs only') } // 4. Run report Content Security Policy violations for request. // TODO // 5. Upgrade request to a potentially trustworthy URL, if appropriate. tryUpgradeRequestToAPotentiallyTrustworthyURL(request) // 6. If should request be blocked due to a bad port, should fetching request // be blocked as mixed content, or should request be blocked by Content // Security Policy returns blocked, then set response to a network error. if (requestBadPort(request) === 'blocked') { response = makeNetworkError('bad port') } // TODO: should fetching request be blocked as mixed content? // TODO: should request be blocked by Content Security Policy? // 7. If request’s referrer policy is the empty string, then set request’s // referrer policy to request’s policy container’s referrer policy. if (request.referrerPolicy === '') { request.referrerPolicy = request.policyContainer.referrerPolicy } // 8. If request’s referrer is not "no-referrer", then set request’s // referrer to the result of invoking determine request’s referrer. if (request.referrer !== 'no-referrer') { request.referrer = determineRequestsReferrer(request) } // 9. Set request’s current URL’s scheme to "https" if all of the following // conditions are true: // - request’s current URL’s scheme is "http" // - request’s current URL’s host is a domain // - Matching request’s current URL’s host per Known HSTS Host Domain Name // Matching results in either a superdomain match with an asserted // includeSubDomains directive or a congruent match (with or without an // asserted includeSubDomains directive). [HSTS] // TODO // 10. If recursive is false, then run the remaining steps in parallel. // TODO // 11. If response is null, then set response to the result of running // the steps corresponding to the first matching statement: if (response === null) { response = await (async () => { const currentURL = requestCurrentURL(request) if ( // - request’s current URL’s origin is same origin with request’s origin, // and request’s response tainting is "basic" (sameOrigin(currentURL, request.url) && request.responseTainting === 'basic') || // request’s current URL’s scheme is "data" (currentURL.protocol === 'data:') || // - request’s mode is "navigate" or "websocket" (request.mode === 'navigate' || request.mode === 'websocket') ) { // 1. Set request’s response tainting to "basic". request.responseTainting = 'basic' // 2. Return the result of running scheme fetch given fetchParams. return await schemeFetch(fetchParams) } // request’s mode is "same-origin" if (request.mode === 'same-origin') { // 1. Return a network error. return makeNetworkError('request mode cannot be "same-origin"') } // request’s mode is "no-cors" if (request.mode === 'no-cors') { // 1. If request’s redirect mode is not "follow", then return a network // error. if (request.redirect !== 'follow') { return makeNetworkError( 'redirect mode cannot be "follow" for "no-cors" request' ) } // 2. Set request’s response tainting to "opaque". request.responseTainting = 'opaque' // 3. Return the result of running scheme fetch given fetchParams. return await schemeFetch(fetchParams) } // request’s current URL’s scheme is not an HTTP(S) scheme if (!urlIsHttpHttpsScheme(requestCurrentURL(request))) { // Return a network error. return makeNetworkError('URL scheme must be a HTTP(S) scheme') } // - request’s use-CORS-preflight flag is set // - request’s unsafe-request flag is set and either request’s method is // not a CORS-safelisted method or CORS-unsafe request-header names with // request’s header list is not empty // 1. Set request’s response tainting to "cors". // 2. Let corsWithPreflightResponse be the result of running HTTP fetch // given fetchParams and true. // 3. If corsWithPreflightResponse is a network error, then clear cache // entries using request. // 4. Return corsWithPreflightResponse. // TODO // Otherwise // 1. Set request’s response tainting to "cors". request.responseTainting = 'cors' // 2. Return the result of running HTTP fetch given fetchParams. return await httpFetch(fetchParams) })() } // 12. If recursive is true, then return response. if (recursive) { return response } // 13. If response is not a network error and response is not a filtered // response, then: if (response.status !== 0 && !response.internalResponse) { // If request’s response tainting is "cors", then: if (request.responseTainting === 'cors') { // 1. Let headerNames be the result of extracting header list values // given `Access-Control-Expose-Headers` and response’s header list. // TODO // 2. If request’s credentials mode is not "include" and headerNames // contains `*`, then set response’s CORS-exposed header-name list to // all unique header names in response’s header list. // TODO // 3. Otherwise, if headerNames is not null or failure, then set // response’s CORS-exposed header-name list to headerNames. // TODO } // Set response to the following filtered response with response as its // internal response, depending on request’s response tainting: if (request.responseTainting === 'basic') { response = filterResponse(response, 'basic') } else if (request.responseTainting === 'cors') { response = filterResponse(response, 'cors') } else if (request.responseTainting === 'opaque') { response = filterResponse(response, 'opaque') } else { assert(false) } } // 14. Let internalResponse be response, if response is a network error, // and response’s internal response otherwise. let internalResponse = response.status === 0 ? response : response.internalResponse // 15. If internalResponse’s URL list is empty, then set it to a clone of // request’s URL list. if (internalResponse.urlList.length === 0) { internalResponse.urlList.push(...request.urlList) } // 16. If request’s timing allow failed flag is unset, then set // internalResponse’s timing allow passed flag. if (!request.timingAllowFailed) { response.timingAllowPassed = true } // 17. If response is not a network error and any of the following returns // blocked // - should internalResponse to request be blocked as mixed content // - should internalResponse to request be blocked by Content Security Policy // - should internalResponse to request be blocked due to its MIME type // - should internalResponse to request be blocked due to nosniff // TODO // 18. If response’s type is "opaque", internalResponse’s status is 206, // internalResponse’s range-requested flag is set, and request’s header // list does not contain `Range`, then set response and internalResponse // to a network error. if ( response.type === 'opaque' && internalResponse.status === 206 && internalResponse.rangeRequested && !request.headers.contains('range') ) { response = internalResponse = makeNetworkError() } // 19. If response is not a network error and either request’s method is // `HEAD` or `CONNECT`, or internalResponse’s status is a null body status, // set internalResponse’s body to null and disregard any enqueuing toward // it (if any). if ( response.status !== 0 && (request.method === 'HEAD' || request.method === 'CONNECT' || nullBodyStatus.includes(internalResponse.status)) ) { internalResponse.body = null fetchParams.controller.dump = true } // 20. If request’s integrity metadata is not the empty string, then: if (request.integrity) { // 1. Let processBodyError be this step: run fetch finale given fetchParams // and a network error. const processBodyError = (reason) => fetchFinale(fetchParams, makeNetworkError(reason)) // 2. If request’s response tainting is "opaque", or response’s body is null, // then run processBodyError and abort these steps. if (request.responseTainting === 'opaque' || response.body == null) { processBodyError(response.error) return } // 3. Let processBody given bytes be these steps: const processBody = (bytes) => { // 1. If bytes do not match request’s integrity metadata, // then run processBodyError and abort these steps. [SRI] if (!bytesMatch(bytes, request.integrity)) { processBodyError('integrity mismatch') return } // 2. Set response’s body to bytes as a body. response.body = safelyExtractBody(bytes)[0] // 3. Run fetch finale given fetchParams and response. fetchFinale(fetchParams, response) } // 4. Fully read response’s body given processBody and processBodyError. await fullyReadBody(response.body, processBody, processBodyError) } else { // 21. Otherwise, run fetch finale given fetchParams and response. fetchFinale(fetchParams, response) } } // https://fetch.spec.whatwg.org/#concept-scheme-fetch // given a fetch params fetchParams function schemeFetch (fetchParams) { // Note: since the connection is destroyed on redirect, which sets fetchParams to a // cancelled state, we do not want this condition to trigger *unless* there have been // no redirects. See https://github.com/nodejs/undici/issues/1776 // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. if (isCancelled(fetchParams) && fetchParams.request.redirectCount === 0) { return Promise.resolve(makeAppropriateNetworkError(fetchParams)) } // 2. Let request be fetchParams’s request. const { request } = fetchParams const { protocol: scheme } = requestCurrentURL(request) // 3. Switch on request’s current URL’s scheme and run the associated steps: switch (scheme) { case 'about:': { // If request’s current URL’s path is the string "blank", then return a new response // whose status message is `OK`, header list is « (`Content-Type`, `text/html;charset=utf-8`) », // and body is the empty byte sequence as a body. // Otherwise, return a network error. return Promise.resolve(makeNetworkError('about scheme is not supported')) } case 'blob:': { if (!resolveObjectURL) { resolveObjectURL = (__nccwpck_require__(20181).resolveObjectURL) } // 1. Let blobURLEntry be request’s current URL’s blob URL entry. const blobURLEntry = requestCurrentURL(request) // https://github.com/web-platform-tests/wpt/blob/7b0ebaccc62b566a1965396e5be7bb2bc06f841f/FileAPI/url/resources/fetch-tests.js#L52-L56 // Buffer.resolveObjectURL does not ignore URL queries. if (blobURLEntry.search.length !== 0) { return Promise.resolve(makeNetworkError('NetworkError when attempting to fetch resource.')) } const blobURLEntryObject = resolveObjectURL(blobURLEntry.toString()) // 2. If request’s method is not `GET`, blobURLEntry is null, or blobURLEntry’s // object is not a Blob object, then return a network error. if (request.method !== 'GET' || !isBlobLike(blobURLEntryObject)) { return Promise.resolve(makeNetworkError('invalid method')) } // 3. Let bodyWithType be the result of safely extracting blobURLEntry’s object. const bodyWithType = safelyExtractBody(blobURLEntryObject) // 4. Let body be bodyWithType’s body. const body = bodyWithType[0] // 5. Let length be body’s length, serialized and isomorphic encoded. const length = isomorphicEncode(`${body.length}`) // 6. Let type be bodyWithType’s type if it is non-null; otherwise the empty byte sequence. const type = bodyWithType[1] ?? '' // 7. Return a new response whose status message is `OK`, header list is // « (`Content-Length`, length), (`Content-Type`, type) », and body is body. const response = makeResponse({ statusText: 'OK', headersList: [ ['content-length', { name: 'Content-Length', value: length }], ['content-type', { name: 'Content-Type', value: type }] ] }) response.body = body return Promise.resolve(response) } case 'data:': { // 1. Let dataURLStruct be the result of running the // data: URL processor on request’s current URL. const currentURL = requestCurrentURL(request) const dataURLStruct = dataURLProcessor(currentURL) // 2. If dataURLStruct is failure, then return a // network error. if (dataURLStruct === 'failure') { return Promise.resolve(makeNetworkError('failed to fetch the data URL')) } // 3. Let mimeType be dataURLStruct’s MIME type, serialized. const mimeType = serializeAMimeType(dataURLStruct.mimeType) // 4. Return a response whose status message is `OK`, // header list is « (`Content-Type`, mimeType) », // and body is dataURLStruct’s body as a body. return Promise.resolve(makeResponse({ statusText: 'OK', headersList: [ ['content-type', { name: 'Content-Type', value: mimeType }] ], body: safelyExtractBody(dataURLStruct.body)[0] })) } case 'file:': { // For now, unfortunate as it is, file URLs are left as an exercise for the reader. // When in doubt, return a network error. return Promise.resolve(makeNetworkError('not implemented... yet...')) } case 'http:': case 'https:': { // Return the result of running HTTP fetch given fetchParams. return httpFetch(fetchParams) .catch((err) => makeNetworkError(err)) } default: { return Promise.resolve(makeNetworkError('unknown scheme')) } } } // https://fetch.spec.whatwg.org/#finalize-response function finalizeResponse (fetchParams, response) { // 1. Set fetchParams’s request’s done flag. fetchParams.request.done = true // 2, If fetchParams’s process response done is not null, then queue a fetch // task to run fetchParams’s process response done given response, with // fetchParams’s task destination. if (fetchParams.processResponseDone != null) { queueMicrotask(() => fetchParams.processResponseDone(response)) } } // https://fetch.spec.whatwg.org/#fetch-finale function fetchFinale (fetchParams, response) { // 1. If response is a network error, then: if (response.type === 'error') { // 1. Set response’s URL list to « fetchParams’s request’s URL list[0] ». response.urlList = [fetchParams.request.urlList[0]] // 2. Set response’s timing info to the result of creating an opaque timing // info for fetchParams’s timing info. response.timingInfo = createOpaqueTimingInfo({ startTime: fetchParams.timingInfo.startTime }) } // 2. Let processResponseEndOfBody be the following steps: const processResponseEndOfBody = () => { // 1. Set fetchParams’s request’s done flag. fetchParams.request.done = true // If fetchParams’s process response end-of-body is not null, // then queue a fetch task to run fetchParams’s process response // end-of-body given response with fetchParams’s task destination. if (fetchParams.processResponseEndOfBody != null) { queueMicrotask(() => fetchParams.processResponseEndOfBody(response)) } } // 3. If fetchParams’s process response is non-null, then queue a fetch task // to run fetchParams’s process response given response, with fetchParams’s // task destination. if (fetchParams.processResponse != null) { queueMicrotask(() => fetchParams.processResponse(response)) } // 4. If response’s body is null, then run processResponseEndOfBody. if (response.body == null) { processResponseEndOfBody() } else { // 5. Otherwise: // 1. Let transformStream be a new a TransformStream. // 2. Let identityTransformAlgorithm be an algorithm which, given chunk, // enqueues chunk in transformStream. const identityTransformAlgorithm = (chunk, controller) => { controller.enqueue(chunk) } // 3. Set up transformStream with transformAlgorithm set to identityTransformAlgorithm // and flushAlgorithm set to processResponseEndOfBody. const transformStream = new TransformStream({ start () {}, transform: identityTransformAlgorithm, flush: processResponseEndOfBody }, { size () { return 1 } }, { size () { return 1 } }) // 4. Set response’s body to the result of piping response’s body through transformStream. response.body = { stream: response.body.stream.pipeThrough(transformStream) } } // 6. If fetchParams’s process response consume body is non-null, then: if (fetchParams.processResponseConsumeBody != null) { // 1. Let processBody given nullOrBytes be this step: run fetchParams’s // process response consume body given response and nullOrBytes. const processBody = (nullOrBytes) => fetchParams.processResponseConsumeBody(response, nullOrBytes) // 2. Let processBodyError be this step: run fetchParams’s process // response consume body given response and failure. const processBodyError = (failure) => fetchParams.processResponseConsumeBody(response, failure) // 3. If response’s body is null, then queue a fetch task to run processBody // given null, with fetchParams’s task destination. if (response.body == null) { queueMicrotask(() => processBody(null)) } else { // 4. Otherwise, fully read response’s body given processBody, processBodyError, // and fetchParams’s task destination. return fullyReadBody(response.body, processBody, processBodyError) } return Promise.resolve() } } // https://fetch.spec.whatwg.org/#http-fetch async function httpFetch (fetchParams) { // 1. Let request be fetchParams’s request. const request = fetchParams.request // 2. Let response be null. let response = null // 3. Let actualResponse be null. let actualResponse = null // 4. Let timingInfo be fetchParams’s timing info. const timingInfo = fetchParams.timingInfo // 5. If request’s service-workers mode is "all", then: if (request.serviceWorkers === 'all') { // TODO } // 6. If response is null, then: if (response === null) { // 1. If makeCORSPreflight is true and one of these conditions is true: // TODO // 2. If request’s redirect mode is "follow", then set request’s // service-workers mode to "none". if (request.redirect === 'follow') { request.serviceWorkers = 'none' } // 3. Set response and actualResponse to the result of running // HTTP-network-or-cache fetch given fetchParams. actualResponse = response = await httpNetworkOrCacheFetch(fetchParams) // 4. If request’s response tainting is "cors" and a CORS check // for request and response returns failure, then return a network error. if ( request.responseTainting === 'cors' && corsCheck(request, response) === 'failure' ) { return makeNetworkError('cors failure') } // 5. If the TAO check for request and response returns failure, then set // request’s timing allow failed flag. if (TAOCheck(request, response) === 'failure') { request.timingAllowFailed = true } } // 7. If either request’s response tainting or response’s type // is "opaque", and the cross-origin resource policy check with // request’s origin, request’s client, request’s destination, // and actualResponse returns blocked, then return a network error. if ( (request.responseTainting === 'opaque' || response.type === 'opaque') && crossOriginResourcePolicyCheck( request.origin, request.client, request.destination, actualResponse ) === 'blocked' ) { return makeNetworkError('blocked') } // 8. If actualResponse’s status is a redirect status, then: if (redirectStatusSet.has(actualResponse.status)) { // 1. If actualResponse’s status is not 303, request’s body is not null, // and the connection uses HTTP/2, then user agents may, and are even // encouraged to, transmit an RST_STREAM frame. // See, https://github.com/whatwg/fetch/issues/1288 if (request.redirect !== 'manual') { fetchParams.controller.connection.destroy() } // 2. Switch on request’s redirect mode: if (request.redirect === 'error') { // Set response to a network error. response = makeNetworkError('unexpected redirect') } else if (request.redirect === 'manual') { // Set response to an opaque-redirect filtered response whose internal // response is actualResponse. // NOTE(spec): On the web this would return an `opaqueredirect` response, // but that doesn't make sense server side. // See https://github.com/nodejs/undici/issues/1193. response = actualResponse } else if (request.redirect === 'follow') { // Set response to the result of running HTTP-redirect fetch given // fetchParams and response. response = await httpRedirectFetch(fetchParams, response) } else { assert(false) } } // 9. Set response’s timing info to timingInfo. response.timingInfo = timingInfo // 10. Return response. return response } // https://fetch.spec.whatwg.org/#http-redirect-fetch function httpRedirectFetch (fetchParams, response) { // 1. Let request be fetchParams’s request. const request = fetchParams.request // 2. Let actualResponse be response, if response is not a filtered response, // and response’s internal response otherwise. const actualResponse = response.internalResponse ? response.internalResponse : response // 3. Let locationURL be actualResponse’s location URL given request’s current // URL’s fragment. let locationURL try { locationURL = responseLocationURL( actualResponse, requestCurrentURL(request).hash ) // 4. If locationURL is null, then return response. if (locationURL == null) { return response } } catch (err) { // 5. If locationURL is failure, then return a network error. return Promise.resolve(makeNetworkError(err)) } // 6. If locationURL’s scheme is not an HTTP(S) scheme, then return a network // error. if (!urlIsHttpHttpsScheme(locationURL)) { return Promise.resolve(makeNetworkError('URL scheme must be a HTTP(S) scheme')) } // 7. If request’s redirect count is 20, then return a network error. if (request.redirectCount === 20) { return Promise.resolve(makeNetworkError('redirect count exceeded')) } // 8. Increase request’s redirect count by 1. request.redirectCount += 1 // 9. If request’s mode is "cors", locationURL includes credentials, and // request’s origin is not same origin with locationURL’s origin, then return // a network error. if ( request.mode === 'cors' && (locationURL.username || locationURL.password) && !sameOrigin(request, locationURL) ) { return Promise.resolve(makeNetworkError('cross origin not allowed for request mode "cors"')) } // 10. If request’s response tainting is "cors" and locationURL includes // credentials, then return a network error. if ( request.responseTainting === 'cors' && (locationURL.username || locationURL.password) ) { return Promise.resolve(makeNetworkError( 'URL cannot contain credentials for request mode "cors"' )) } // 11. If actualResponse’s status is not 303, request’s body is non-null, // and request’s body’s source is null, then return a network error. if ( actualResponse.status !== 303 && request.body != null && request.body.source == null ) { return Promise.resolve(makeNetworkError()) } // 12. If one of the following is true // - actualResponse’s status is 301 or 302 and request’s method is `POST` // - actualResponse’s status is 303 and request’s method is not `GET` or `HEAD` if ( ([301, 302].includes(actualResponse.status) && request.method === 'POST') || (actualResponse.status === 303 && !GET_OR_HEAD.includes(request.method)) ) { // then: // 1. Set request’s method to `GET` and request’s body to null. request.method = 'GET' request.body = null // 2. For each headerName of request-body-header name, delete headerName from // request’s header list. for (const headerName of requestBodyHeader) { request.headersList.delete(headerName) } } // 13. If request’s current URL’s origin is not same origin with locationURL’s // origin, then for each headerName of CORS non-wildcard request-header name, // delete headerName from request’s header list. if (!sameOrigin(requestCurrentURL(request), locationURL)) { // https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name request.headersList.delete('authorization') // https://fetch.spec.whatwg.org/#authentication-entries request.headersList.delete('proxy-authorization', true) // "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement. request.headersList.delete('cookie') request.headersList.delete('host') } // 14. If request’s body is non-null, then set request’s body to the first return // value of safely extracting request’s body’s source. if (request.body != null) { assert(request.body.source != null) request.body = safelyExtractBody(request.body.source)[0] } // 15. Let timingInfo be fetchParams’s timing info. const timingInfo = fetchParams.timingInfo // 16. Set timingInfo’s redirect end time and post-redirect start time to the // coarsened shared current time given fetchParams’s cross-origin isolated // capability. timingInfo.redirectEndTime = timingInfo.postRedirectStartTime = coarsenedSharedCurrentTime(fetchParams.crossOriginIsolatedCapability) // 17. If timingInfo’s redirect start time is 0, then set timingInfo’s // redirect start time to timingInfo’s start time. if (timingInfo.redirectStartTime === 0) { timingInfo.redirectStartTime = timingInfo.startTime } // 18. Append locationURL to request’s URL list. request.urlList.push(locationURL) // 19. Invoke set request’s referrer policy on redirect on request and // actualResponse. setRequestReferrerPolicyOnRedirect(request, actualResponse) // 20. Return the result of running main fetch given fetchParams and true. return mainFetch(fetchParams, true) } // https://fetch.spec.whatwg.org/#http-network-or-cache-fetch async function httpNetworkOrCacheFetch ( fetchParams, isAuthenticationFetch = false, isNewConnectionFetch = false ) { // 1. Let request be fetchParams’s request. const request = fetchParams.request // 2. Let httpFetchParams be null. let httpFetchParams = null // 3. Let httpRequest be null. let httpRequest = null // 4. Let response be null. let response = null // 5. Let storedResponse be null. // TODO: cache // 6. Let httpCache be null. const httpCache = null // 7. Let the revalidatingFlag be unset. const revalidatingFlag = false // 8. Run these steps, but abort when the ongoing fetch is terminated: // 1. If request’s window is "no-window" and request’s redirect mode is // "error", then set httpFetchParams to fetchParams and httpRequest to // request. if (request.window === 'no-window' && request.redirect === 'error') { httpFetchParams = fetchParams httpRequest = request } else { // Otherwise: // 1. Set httpRequest to a clone of request. httpRequest = makeRequest(request) // 2. Set httpFetchParams to a copy of fetchParams. httpFetchParams = { ...fetchParams } // 3. Set httpFetchParams’s request to httpRequest. httpFetchParams.request = httpRequest } // 3. Let includeCredentials be true if one of const includeCredentials = request.credentials === 'include' || (request.credentials === 'same-origin' && request.responseTainting === 'basic') // 4. Let contentLength be httpRequest’s body’s length, if httpRequest’s // body is non-null; otherwise null. const contentLength = httpRequest.body ? httpRequest.body.length : null // 5. Let contentLengthHeaderValue be null. let contentLengthHeaderValue = null // 6. If httpRequest’s body is null and httpRequest’s method is `POST` or // `PUT`, then set contentLengthHeaderValue to `0`. if ( httpRequest.body == null && ['POST', 'PUT'].includes(httpRequest.method) ) { contentLengthHeaderValue = '0' } // 7. If contentLength is non-null, then set contentLengthHeaderValue to // contentLength, serialized and isomorphic encoded. if (contentLength != null) { contentLengthHeaderValue = isomorphicEncode(`${contentLength}`) } // 8. If contentLengthHeaderValue is non-null, then append // `Content-Length`/contentLengthHeaderValue to httpRequest’s header // list. if (contentLengthHeaderValue != null) { httpRequest.headersList.append('content-length', contentLengthHeaderValue) } // 9. If contentLengthHeaderValue is non-null, then append (`Content-Length`, // contentLengthHeaderValue) to httpRequest’s header list. // 10. If contentLength is non-null and httpRequest’s keepalive is true, // then: if (contentLength != null && httpRequest.keepalive) { // NOTE: keepalive is a noop outside of browser context. } // 11. If httpRequest’s referrer is a URL, then append // `Referer`/httpRequest’s referrer, serialized and isomorphic encoded, // to httpRequest’s header list. if (httpRequest.referrer instanceof URL) { httpRequest.headersList.append('referer', isomorphicEncode(httpRequest.referrer.href)) } // 12. Append a request `Origin` header for httpRequest. appendRequestOriginHeader(httpRequest) // 13. Append the Fetch metadata headers for httpRequest. [FETCH-METADATA] appendFetchMetadata(httpRequest) // 14. If httpRequest’s header list does not contain `User-Agent`, then // user agents should append `User-Agent`/default `User-Agent` value to // httpRequest’s header list. if (!httpRequest.headersList.contains('user-agent')) { httpRequest.headersList.append('user-agent', typeof esbuildDetection === 'undefined' ? 'undici' : 'node') } // 15. If httpRequest’s cache mode is "default" and httpRequest’s header // list contains `If-Modified-Since`, `If-None-Match`, // `If-Unmodified-Since`, `If-Match`, or `If-Range`, then set // httpRequest’s cache mode to "no-store". if ( httpRequest.cache === 'default' && (httpRequest.headersList.contains('if-modified-since') || httpRequest.headersList.contains('if-none-match') || httpRequest.headersList.contains('if-unmodified-since') || httpRequest.headersList.contains('if-match') || httpRequest.headersList.contains('if-range')) ) { httpRequest.cache = 'no-store' } // 16. If httpRequest’s cache mode is "no-cache", httpRequest’s prevent // no-cache cache-control header modification flag is unset, and // httpRequest’s header list does not contain `Cache-Control`, then append // `Cache-Control`/`max-age=0` to httpRequest’s header list. if ( httpRequest.cache === 'no-cache' && !httpRequest.preventNoCacheCacheControlHeaderModification && !httpRequest.headersList.contains('cache-control') ) { httpRequest.headersList.append('cache-control', 'max-age=0') } // 17. If httpRequest’s cache mode is "no-store" or "reload", then: if (httpRequest.cache === 'no-store' || httpRequest.cache === 'reload') { // 1. If httpRequest’s header list does not contain `Pragma`, then append // `Pragma`/`no-cache` to httpRequest’s header list. if (!httpRequest.headersList.contains('pragma')) { httpRequest.headersList.append('pragma', 'no-cache') } // 2. If httpRequest’s header list does not contain `Cache-Control`, // then append `Cache-Control`/`no-cache` to httpRequest’s header list. if (!httpRequest.headersList.contains('cache-control')) { httpRequest.headersList.append('cache-control', 'no-cache') } } // 18. If httpRequest’s header list contains `Range`, then append // `Accept-Encoding`/`identity` to httpRequest’s header list. if (httpRequest.headersList.contains('range')) { httpRequest.headersList.append('accept-encoding', 'identity') } // 19. Modify httpRequest’s header list per HTTP. Do not append a given // header if httpRequest’s header list contains that header’s name. // TODO: https://github.com/whatwg/fetch/issues/1285#issuecomment-896560129 if (!httpRequest.headersList.contains('accept-encoding')) { if (urlHasHttpsScheme(requestCurrentURL(httpRequest))) { httpRequest.headersList.append('accept-encoding', 'br, gzip, deflate') } else { httpRequest.headersList.append('accept-encoding', 'gzip, deflate') } } httpRequest.headersList.delete('host') // 20. If includeCredentials is true, then: if (includeCredentials) { // 1. If the user agent is not configured to block cookies for httpRequest // (see section 7 of [COOKIES]), then: // TODO: credentials // 2. If httpRequest’s header list does not contain `Authorization`, then: // TODO: credentials } // 21. If there’s a proxy-authentication entry, use it as appropriate. // TODO: proxy-authentication // 22. Set httpCache to the result of determining the HTTP cache // partition, given httpRequest. // TODO: cache // 23. If httpCache is null, then set httpRequest’s cache mode to // "no-store". if (httpCache == null) { httpRequest.cache = 'no-store' } // 24. If httpRequest’s cache mode is neither "no-store" nor "reload", // then: if (httpRequest.mode !== 'no-store' && httpRequest.mode !== 'reload') { // TODO: cache } // 9. If aborted, then return the appropriate network error for fetchParams. // TODO // 10. If response is null, then: if (response == null) { // 1. If httpRequest’s cache mode is "only-if-cached", then return a // network error. if (httpRequest.mode === 'only-if-cached') { return makeNetworkError('only if cached') } // 2. Let forwardResponse be the result of running HTTP-network fetch // given httpFetchParams, includeCredentials, and isNewConnectionFetch. const forwardResponse = await httpNetworkFetch( httpFetchParams, includeCredentials, isNewConnectionFetch ) // 3. If httpRequest’s method is unsafe and forwardResponse’s status is // in the range 200 to 399, inclusive, invalidate appropriate stored // responses in httpCache, as per the "Invalidation" chapter of HTTP // Caching, and set storedResponse to null. [HTTP-CACHING] if ( !safeMethodsSet.has(httpRequest.method) && forwardResponse.status >= 200 && forwardResponse.status <= 399 ) { // TODO: cache } // 4. If the revalidatingFlag is set and forwardResponse’s status is 304, // then: if (revalidatingFlag && forwardResponse.status === 304) { // TODO: cache } // 5. If response is null, then: if (response == null) { // 1. Set response to forwardResponse. response = forwardResponse // 2. Store httpRequest and forwardResponse in httpCache, as per the // "Storing Responses in Caches" chapter of HTTP Caching. [HTTP-CACHING] // TODO: cache } } // 11. Set response’s URL list to a clone of httpRequest’s URL list. response.urlList = [...httpRequest.urlList] // 12. If httpRequest’s header list contains `Range`, then set response’s // range-requested flag. if (httpRequest.headersList.contains('range')) { response.rangeRequested = true } // 13. Set response’s request-includes-credentials to includeCredentials. response.requestIncludesCredentials = includeCredentials // 14. If response’s status is 401, httpRequest’s response tainting is not // "cors", includeCredentials is true, and request’s window is an environment // settings object, then: // TODO // 15. If response’s status is 407, then: if (response.status === 407) { // 1. If request’s window is "no-window", then return a network error. if (request.window === 'no-window') { return makeNetworkError() } // 2. ??? // 3. If fetchParams is canceled, then return the appropriate network error for fetchParams. if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams) } // 4. Prompt the end user as appropriate in request’s window and store // the result as a proxy-authentication entry. [HTTP-AUTH] // TODO: Invoke some kind of callback? // 5. Set response to the result of running HTTP-network-or-cache fetch given // fetchParams. // TODO return makeNetworkError('proxy authentication required') } // 16. If all of the following are true if ( // response’s status is 421 response.status === 421 && // isNewConnectionFetch is false !isNewConnectionFetch && // request’s body is null, or request’s body is non-null and request’s body’s source is non-null (request.body == null || request.body.source != null) ) { // then: // 1. If fetchParams is canceled, then return the appropriate network error for fetchParams. if (isCancelled(fetchParams)) { return makeAppropriateNetworkError(fetchParams) } // 2. Set response to the result of running HTTP-network-or-cache // fetch given fetchParams, isAuthenticationFetch, and true. // TODO (spec): The spec doesn't specify this but we need to cancel // the active response before we can start a new one. // https://github.com/whatwg/fetch/issues/1293 fetchParams.controller.connection.destroy() response = await httpNetworkOrCacheFetch( fetchParams, isAuthenticationFetch, true ) } // 17. If isAuthenticationFetch is true, then create an authentication entry if (isAuthenticationFetch) { // TODO } // 18. Return response. return response } // https://fetch.spec.whatwg.org/#http-network-fetch async function httpNetworkFetch ( fetchParams, includeCredentials = false, forceNewConnection = false ) { assert(!fetchParams.controller.connection || fetchParams.controller.connection.destroyed) fetchParams.controller.connection = { abort: null, destroyed: false, destroy (err) { if (!this.destroyed) { this.destroyed = true this.abort?.(err ?? new DOMException('The operation was aborted.', 'AbortError')) } } } // 1. Let request be fetchParams’s request. const request = fetchParams.request // 2. Let response be null. let response = null // 3. Let timingInfo be fetchParams’s timing info. const timingInfo = fetchParams.timingInfo // 4. Let httpCache be the result of determining the HTTP cache partition, // given request. // TODO: cache const httpCache = null // 5. If httpCache is null, then set request’s cache mode to "no-store". if (httpCache == null) { request.cache = 'no-store' } // 6. Let networkPartitionKey be the result of determining the network // partition key given request. // TODO // 7. Let newConnection be "yes" if forceNewConnection is true; otherwise // "no". const newConnection = forceNewConnection ? 'yes' : 'no' // eslint-disable-line no-unused-vars // 8. Switch on request’s mode: if (request.mode === 'websocket') { // Let connection be the result of obtaining a WebSocket connection, // given request’s current URL. // TODO } else { // Let connection be the result of obtaining a connection, given // networkPartitionKey, request’s current URL’s origin, // includeCredentials, and forceNewConnection. // TODO } // 9. Run these steps, but abort when the ongoing fetch is terminated: // 1. If connection is failure, then return a network error. // 2. Set timingInfo’s final connection timing info to the result of // calling clamp and coarsen connection timing info with connection’s // timing info, timingInfo’s post-redirect start time, and fetchParams’s // cross-origin isolated capability. // 3. If connection is not an HTTP/2 connection, request’s body is non-null, // and request’s body’s source is null, then append (`Transfer-Encoding`, // `chunked`) to request’s header list. // 4. Set timingInfo’s final network-request start time to the coarsened // shared current time given fetchParams’s cross-origin isolated // capability. // 5. Set response to the result of making an HTTP request over connection // using request with the following caveats: // - Follow the relevant requirements from HTTP. [HTTP] [HTTP-SEMANTICS] // [HTTP-COND] [HTTP-CACHING] [HTTP-AUTH] // - If request’s body is non-null, and request’s body’s source is null, // then the user agent may have a buffer of up to 64 kibibytes and store // a part of request’s body in that buffer. If the user agent reads from // request’s body beyond that buffer’s size and the user agent needs to // resend request, then instead return a network error. // - Set timingInfo’s final network-response start time to the coarsened // shared current time given fetchParams’s cross-origin isolated capability, // immediately after the user agent’s HTTP parser receives the first byte // of the response (e.g., frame header bytes for HTTP/2 or response status // line for HTTP/1.x). // - Wait until all the headers are transmitted. // - Any responses whose status is in the range 100 to 199, inclusive, // and is not 101, are to be ignored, except for the purposes of setting // timingInfo’s final network-response start time above. // - If request’s header list contains `Transfer-Encoding`/`chunked` and // response is transferred via HTTP/1.0 or older, then return a network // error. // - If the HTTP request results in a TLS client certificate dialog, then: // 1. If request’s window is an environment settings object, make the // dialog available in request’s window. // 2. Otherwise, return a network error. // To transmit request’s body body, run these steps: let requestBody = null // 1. If body is null and fetchParams’s process request end-of-body is // non-null, then queue a fetch task given fetchParams’s process request // end-of-body and fetchParams’s task destination. if (request.body == null && fetchParams.processRequestEndOfBody) { queueMicrotask(() => fetchParams.processRequestEndOfBody()) } else if (request.body != null) { // 2. Otherwise, if body is non-null: // 1. Let processBodyChunk given bytes be these steps: const processBodyChunk = async function * (bytes) { // 1. If the ongoing fetch is terminated, then abort these steps. if (isCancelled(fetchParams)) { return } // 2. Run this step in parallel: transmit bytes. yield bytes // 3. If fetchParams’s process request body is non-null, then run // fetchParams’s process request body given bytes’s length. fetchParams.processRequestBodyChunkLength?.(bytes.byteLength) } // 2. Let processEndOfBody be these steps: const processEndOfBody = () => { // 1. If fetchParams is canceled, then abort these steps. if (isCancelled(fetchParams)) { return } // 2. If fetchParams’s process request end-of-body is non-null, // then run fetchParams’s process request end-of-body. if (fetchParams.processRequestEndOfBody) { fetchParams.processRequestEndOfBody() } } // 3. Let processBodyError given e be these steps: const processBodyError = (e) => { // 1. If fetchParams is canceled, then abort these steps. if (isCancelled(fetchParams)) { return } // 2. If e is an "AbortError" DOMException, then abort fetchParams’s controller. if (e.name === 'AbortError') { fetchParams.controller.abort() } else { fetchParams.controller.terminate(e) } } // 4. Incrementally read request’s body given processBodyChunk, processEndOfBody, // processBodyError, and fetchParams’s task destination. requestBody = (async function * () { try { for await (const bytes of request.body.stream) { yield * processBodyChunk(bytes) } processEndOfBody() } catch (err) { processBodyError(err) } })() } try { // socket is only provided for websockets const { body, status, statusText, headersList, socket } = await dispatch({ body: requestBody }) if (socket) { response = makeResponse({ status, statusText, headersList, socket }) } else { const iterator = body[Symbol.asyncIterator]() fetchParams.controller.next = () => iterator.next() response = makeResponse({ status, statusText, headersList }) } } catch (err) { // 10. If aborted, then: if (err.name === 'AbortError') { // 1. If connection uses HTTP/2, then transmit an RST_STREAM frame. fetchParams.controller.connection.destroy() // 2. Return the appropriate network error for fetchParams. return makeAppropriateNetworkError(fetchParams, err) } return makeNetworkError(err) } // 11. Let pullAlgorithm be an action that resumes the ongoing fetch // if it is suspended. const pullAlgorithm = () => { fetchParams.controller.resume() } // 12. Let cancelAlgorithm be an algorithm that aborts fetchParams’s // controller with reason, given reason. const cancelAlgorithm = (reason) => { fetchParams.controller.abort(reason) } // 13. Let highWaterMark be a non-negative, non-NaN number, chosen by // the user agent. // TODO // 14. Let sizeAlgorithm be an algorithm that accepts a chunk object // and returns a non-negative, non-NaN, non-infinite number, chosen by the user agent. // TODO // 15. Let stream be a new ReadableStream. // 16. Set up stream with pullAlgorithm set to pullAlgorithm, // cancelAlgorithm set to cancelAlgorithm, highWaterMark set to // highWaterMark, and sizeAlgorithm set to sizeAlgorithm. if (!ReadableStream) { ReadableStream = (__nccwpck_require__(63774).ReadableStream) } const stream = new ReadableStream( { async start (controller) { fetchParams.controller.controller = controller }, async pull (controller) { await pullAlgorithm(controller) }, async cancel (reason) { await cancelAlgorithm(reason) } }, { highWaterMark: 0, size () { return 1 } } ) // 17. Run these steps, but abort when the ongoing fetch is terminated: // 1. Set response’s body to a new body whose stream is stream. response.body = { stream } // 2. If response is not a network error and request’s cache mode is // not "no-store", then update response in httpCache for request. // TODO // 3. If includeCredentials is true and the user agent is not configured // to block cookies for request (see section 7 of [COOKIES]), then run the // "set-cookie-string" parsing algorithm (see section 5.2 of [COOKIES]) on // the value of each header whose name is a byte-case-insensitive match for // `Set-Cookie` in response’s header list, if any, and request’s current URL. // TODO // 18. If aborted, then: // TODO // 19. Run these steps in parallel: // 1. Run these steps, but abort when fetchParams is canceled: fetchParams.controller.on('terminated', onAborted) fetchParams.controller.resume = async () => { // 1. While true while (true) { // 1-3. See onData... // 4. Set bytes to the result of handling content codings given // codings and bytes. let bytes let isFailure try { const { done, value } = await fetchParams.controller.next() if (isAborted(fetchParams)) { break } bytes = done ? undefined : value } catch (err) { if (fetchParams.controller.ended && !timingInfo.encodedBodySize) { // zlib doesn't like empty streams. bytes = undefined } else { bytes = err // err may be propagated from the result of calling readablestream.cancel, // which might not be an error. https://github.com/nodejs/undici/issues/2009 isFailure = true } } if (bytes === undefined) { // 2. Otherwise, if the bytes transmission for response’s message // body is done normally and stream is readable, then close // stream, finalize response for fetchParams and response, and // abort these in-parallel steps. readableStreamClose(fetchParams.controller.controller) finalizeResponse(fetchParams, response) return } // 5. Increase timingInfo’s decoded body size by bytes’s length. timingInfo.decodedBodySize += bytes?.byteLength ?? 0 // 6. If bytes is failure, then terminate fetchParams’s controller. if (isFailure) { fetchParams.controller.terminate(bytes) return } // 7. Enqueue a Uint8Array wrapping an ArrayBuffer containing bytes // into stream. fetchParams.controller.controller.enqueue(new Uint8Array(bytes)) // 8. If stream is errored, then terminate the ongoing fetch. if (isErrored(stream)) { fetchParams.controller.terminate() return } // 9. If stream doesn’t need more data ask the user agent to suspend // the ongoing fetch. if (!fetchParams.controller.controller.desiredSize) { return } } } // 2. If aborted, then: function onAborted (reason) { // 2. If fetchParams is aborted, then: if (isAborted(fetchParams)) { // 1. Set response’s aborted flag. response.aborted = true // 2. If stream is readable, then error stream with the result of // deserialize a serialized abort reason given fetchParams’s // controller’s serialized abort reason and an // implementation-defined realm. if (isReadable(stream)) { fetchParams.controller.controller.error( fetchParams.controller.serializedAbortReason ) } } else { // 3. Otherwise, if stream is readable, error stream with a TypeError. if (isReadable(stream)) { fetchParams.controller.controller.error(new TypeError('terminated', { cause: isErrorLike(reason) ? reason : undefined })) } } // 4. If connection uses HTTP/2, then transmit an RST_STREAM frame. // 5. Otherwise, the user agent should close connection unless it would be bad for performance to do so. fetchParams.controller.connection.destroy() } // 20. Return response. return response async function dispatch ({ body }) { const url = requestCurrentURL(request) /** @type {import('../..').Agent} */ const agent = fetchParams.controller.dispatcher return new Promise((resolve, reject) => agent.dispatch( { path: url.pathname + url.search, origin: url.origin, method: request.method, body: fetchParams.controller.dispatcher.isMockActive ? request.body && (request.body.source || request.body.stream) : body, headers: request.headersList.entries, maxRedirections: 0, upgrade: request.mode === 'websocket' ? 'websocket' : undefined }, { body: null, abort: null, onConnect (abort) { // TODO (fix): Do we need connection here? const { connection } = fetchParams.controller if (connection.destroyed) { abort(new DOMException('The operation was aborted.', 'AbortError')) } else { fetchParams.controller.on('terminated', abort) this.abort = connection.abort = abort } }, onHeaders (status, headersList, resume, statusText) { if (status < 200) { return } let codings = [] let location = '' const headers = new Headers() // For H2, the headers are a plain JS object // We distinguish between them and iterate accordingly if (Array.isArray(headersList)) { for (let n = 0; n < headersList.length; n += 2) { const key = headersList[n + 0].toString('latin1') const val = headersList[n + 1].toString('latin1') if (key.toLowerCase() === 'content-encoding') { // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 // "All content-coding values are case-insensitive..." codings = val.toLowerCase().split(',').map((x) => x.trim()) } else if (key.toLowerCase() === 'location') { location = val } headers[kHeadersList].append(key, val) } } else { const keys = Object.keys(headersList) for (const key of keys) { const val = headersList[key] if (key.toLowerCase() === 'content-encoding') { // https://www.rfc-editor.org/rfc/rfc7231#section-3.1.2.1 // "All content-coding values are case-insensitive..." codings = val.toLowerCase().split(',').map((x) => x.trim()).reverse() } else if (key.toLowerCase() === 'location') { location = val } headers[kHeadersList].append(key, val) } } this.body = new Readable({ read: resume }) const decoders = [] const willFollow = request.redirect === 'follow' && location && redirectStatusSet.has(status) // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Encoding if (request.method !== 'HEAD' && request.method !== 'CONNECT' && !nullBodyStatus.includes(status) && !willFollow) { for (const coding of codings) { // https://www.rfc-editor.org/rfc/rfc9112.html#section-7.2 if (coding === 'x-gzip' || coding === 'gzip') { decoders.push(zlib.createGunzip({ // Be less strict when decoding compressed responses, since sometimes // servers send slightly invalid responses that are still accepted // by common browsers. // Always using Z_SYNC_FLUSH is what cURL does. flush: zlib.constants.Z_SYNC_FLUSH, finishFlush: zlib.constants.Z_SYNC_FLUSH })) } else if (coding === 'deflate') { decoders.push(zlib.createInflate()) } else if (coding === 'br') { decoders.push(zlib.createBrotliDecompress()) } else { decoders.length = 0 break } } } resolve({ status, statusText, headersList: headers[kHeadersList], body: decoders.length ? pipeline(this.body, ...decoders, () => { }) : this.body.on('error', () => {}) }) return true }, onData (chunk) { if (fetchParams.controller.dump) { return } // 1. If one or more bytes have been transmitted from response’s // message body, then: // 1. Let bytes be the transmitted bytes. const bytes = chunk // 2. Let codings be the result of extracting header list values // given `Content-Encoding` and response’s header list. // See pullAlgorithm. // 3. Increase timingInfo’s encoded body size by bytes’s length. timingInfo.encodedBodySize += bytes.byteLength // 4. See pullAlgorithm... return this.body.push(bytes) }, onComplete () { if (this.abort) { fetchParams.controller.off('terminated', this.abort) } fetchParams.controller.ended = true this.body.push(null) }, onError (error) { if (this.abort) { fetchParams.controller.off('terminated', this.abort) } this.body?.destroy(error) fetchParams.controller.terminate(error) reject(error) }, onUpgrade (status, headersList, socket) { if (status !== 101) { return } const headers = new Headers() for (let n = 0; n < headersList.length; n += 2) { const key = headersList[n + 0].toString('latin1') const val = headersList[n + 1].toString('latin1') headers[kHeadersList].append(key, val) } resolve({ status, statusText: STATUS_CODES[status], headersList: headers[kHeadersList], socket }) return true } } )) } } module.exports = { fetch, Fetch, fetching, finalizeAndReportTiming } /***/ }), /***/ 25194: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* globals AbortController */ const { extractBody, mixinBody, cloneBody } = __nccwpck_require__(8923) const { Headers, fill: fillHeaders, HeadersList } = __nccwpck_require__(26349) const { FinalizationRegistry } = __nccwpck_require__(13194)() const util = __nccwpck_require__(3440) const { isValidHTTPToken, sameOrigin, normalizeMethod, makePolicyContainer, normalizeMethodRecord } = __nccwpck_require__(15523) const { forbiddenMethodsSet, corsSafeListedMethodsSet, referrerPolicy, requestRedirect, requestMode, requestCredentials, requestCache, requestDuplex } = __nccwpck_require__(87326) const { kEnumerableProperty } = util const { kHeaders, kSignal, kState, kGuard, kRealm } = __nccwpck_require__(89710) const { webidl } = __nccwpck_require__(74222) const { getGlobalOrigin } = __nccwpck_require__(75628) const { URLSerializer } = __nccwpck_require__(94322) const { kHeadersList, kConstruct } = __nccwpck_require__(36443) const assert = __nccwpck_require__(42613) const { getMaxListeners, setMaxListeners, getEventListeners, defaultMaxListeners } = __nccwpck_require__(24434) let TransformStream = globalThis.TransformStream const kAbortController = Symbol('abortController') const requestFinalizer = new FinalizationRegistry(({ signal, abort }) => { signal.removeEventListener('abort', abort) }) // https://fetch.spec.whatwg.org/#request-class class Request { // https://fetch.spec.whatwg.org/#dom-request constructor (input, init = {}) { if (input === kConstruct) { return } webidl.argumentLengthCheck(arguments, 1, { header: 'Request constructor' }) input = webidl.converters.RequestInfo(input) init = webidl.converters.RequestInit(init) // https://html.spec.whatwg.org/multipage/webappapis.html#environment-settings-object this[kRealm] = { settingsObject: { baseUrl: getGlobalOrigin(), get origin () { return this.baseUrl?.origin }, policyContainer: makePolicyContainer() } } // 1. Let request be null. let request = null // 2. Let fallbackMode be null. let fallbackMode = null // 3. Let baseURL be this’s relevant settings object’s API base URL. const baseUrl = this[kRealm].settingsObject.baseUrl // 4. Let signal be null. let signal = null // 5. If input is a string, then: if (typeof input === 'string') { // 1. Let parsedURL be the result of parsing input with baseURL. // 2. If parsedURL is failure, then throw a TypeError. let parsedURL try { parsedURL = new URL(input, baseUrl) } catch (err) { throw new TypeError('Failed to parse URL from ' + input, { cause: err }) } // 3. If parsedURL includes credentials, then throw a TypeError. if (parsedURL.username || parsedURL.password) { throw new TypeError( 'Request cannot be constructed from a URL that includes credentials: ' + input ) } // 4. Set request to a new request whose URL is parsedURL. request = makeRequest({ urlList: [parsedURL] }) // 5. Set fallbackMode to "cors". fallbackMode = 'cors' } else { // 6. Otherwise: // 7. Assert: input is a Request object. assert(input instanceof Request) // 8. Set request to input’s request. request = input[kState] // 9. Set signal to input’s signal. signal = input[kSignal] } // 7. Let origin be this’s relevant settings object’s origin. const origin = this[kRealm].settingsObject.origin // 8. Let window be "client". let window = 'client' // 9. If request’s window is an environment settings object and its origin // is same origin with origin, then set window to request’s window. if ( request.window?.constructor?.name === 'EnvironmentSettingsObject' && sameOrigin(request.window, origin) ) { window = request.window } // 10. If init["window"] exists and is non-null, then throw a TypeError. if (init.window != null) { throw new TypeError(`'window' option '${window}' must be null`) } // 11. If init["window"] exists, then set window to "no-window". if ('window' in init) { window = 'no-window' } // 12. Set request to a new request with the following properties: request = makeRequest({ // URL request’s URL. // undici implementation note: this is set as the first item in request's urlList in makeRequest // method request’s method. method: request.method, // header list A copy of request’s header list. // undici implementation note: headersList is cloned in makeRequest headersList: request.headersList, // unsafe-request flag Set. unsafeRequest: request.unsafeRequest, // client This’s relevant settings object. client: this[kRealm].settingsObject, // window window. window, // priority request’s priority. priority: request.priority, // origin request’s origin. The propagation of the origin is only significant for navigation requests // being handled by a service worker. In this scenario a request can have an origin that is different // from the current client. origin: request.origin, // referrer request’s referrer. referrer: request.referrer, // referrer policy request’s referrer policy. referrerPolicy: request.referrerPolicy, // mode request’s mode. mode: request.mode, // credentials mode request’s credentials mode. credentials: request.credentials, // cache mode request’s cache mode. cache: request.cache, // redirect mode request’s redirect mode. redirect: request.redirect, // integrity metadata request’s integrity metadata. integrity: request.integrity, // keepalive request’s keepalive. keepalive: request.keepalive, // reload-navigation flag request’s reload-navigation flag. reloadNavigation: request.reloadNavigation, // history-navigation flag request’s history-navigation flag. historyNavigation: request.historyNavigation, // URL list A clone of request’s URL list. urlList: [...request.urlList] }) const initHasKey = Object.keys(init).length !== 0 // 13. If init is not empty, then: if (initHasKey) { // 1. If request’s mode is "navigate", then set it to "same-origin". if (request.mode === 'navigate') { request.mode = 'same-origin' } // 2. Unset request’s reload-navigation flag. request.reloadNavigation = false // 3. Unset request’s history-navigation flag. request.historyNavigation = false // 4. Set request’s origin to "client". request.origin = 'client' // 5. Set request’s referrer to "client" request.referrer = 'client' // 6. Set request’s referrer policy to the empty string. request.referrerPolicy = '' // 7. Set request’s URL to request’s current URL. request.url = request.urlList[request.urlList.length - 1] // 8. Set request’s URL list to « request’s URL ». request.urlList = [request.url] } // 14. If init["referrer"] exists, then: if (init.referrer !== undefined) { // 1. Let referrer be init["referrer"]. const referrer = init.referrer // 2. If referrer is the empty string, then set request’s referrer to "no-referrer". if (referrer === '') { request.referrer = 'no-referrer' } else { // 1. Let parsedReferrer be the result of parsing referrer with // baseURL. // 2. If parsedReferrer is failure, then throw a TypeError. let parsedReferrer try { parsedReferrer = new URL(referrer, baseUrl) } catch (err) { throw new TypeError(`Referrer "${referrer}" is not a valid URL.`, { cause: err }) } // 3. If one of the following is true // - parsedReferrer’s scheme is "about" and path is the string "client" // - parsedReferrer’s origin is not same origin with origin // then set request’s referrer to "client". if ( (parsedReferrer.protocol === 'about:' && parsedReferrer.hostname === 'client') || (origin && !sameOrigin(parsedReferrer, this[kRealm].settingsObject.baseUrl)) ) { request.referrer = 'client' } else { // 4. Otherwise, set request’s referrer to parsedReferrer. request.referrer = parsedReferrer } } } // 15. If init["referrerPolicy"] exists, then set request’s referrer policy // to it. if (init.referrerPolicy !== undefined) { request.referrerPolicy = init.referrerPolicy } // 16. Let mode be init["mode"] if it exists, and fallbackMode otherwise. let mode if (init.mode !== undefined) { mode = init.mode } else { mode = fallbackMode } // 17. If mode is "navigate", then throw a TypeError. if (mode === 'navigate') { throw webidl.errors.exception({ header: 'Request constructor', message: 'invalid request mode navigate.' }) } // 18. If mode is non-null, set request’s mode to mode. if (mode != null) { request.mode = mode } // 19. If init["credentials"] exists, then set request’s credentials mode // to it. if (init.credentials !== undefined) { request.credentials = init.credentials } // 18. If init["cache"] exists, then set request’s cache mode to it. if (init.cache !== undefined) { request.cache = init.cache } // 21. If request’s cache mode is "only-if-cached" and request’s mode is // not "same-origin", then throw a TypeError. if (request.cache === 'only-if-cached' && request.mode !== 'same-origin') { throw new TypeError( "'only-if-cached' can be set only with 'same-origin' mode" ) } // 22. If init["redirect"] exists, then set request’s redirect mode to it. if (init.redirect !== undefined) { request.redirect = init.redirect } // 23. If init["integrity"] exists, then set request’s integrity metadata to it. if (init.integrity != null) { request.integrity = String(init.integrity) } // 24. If init["keepalive"] exists, then set request’s keepalive to it. if (init.keepalive !== undefined) { request.keepalive = Boolean(init.keepalive) } // 25. If init["method"] exists, then: if (init.method !== undefined) { // 1. Let method be init["method"]. let method = init.method // 2. If method is not a method or method is a forbidden method, then // throw a TypeError. if (!isValidHTTPToken(method)) { throw new TypeError(`'${method}' is not a valid HTTP method.`) } if (forbiddenMethodsSet.has(method.toUpperCase())) { throw new TypeError(`'${method}' HTTP method is unsupported.`) } // 3. Normalize method. method = normalizeMethodRecord[method] ?? normalizeMethod(method) // 4. Set request’s method to method. request.method = method } // 26. If init["signal"] exists, then set signal to it. if (init.signal !== undefined) { signal = init.signal } // 27. Set this’s request to request. this[kState] = request // 28. Set this’s signal to a new AbortSignal object with this’s relevant // Realm. // TODO: could this be simplified with AbortSignal.any // (https://dom.spec.whatwg.org/#dom-abortsignal-any) const ac = new AbortController() this[kSignal] = ac.signal this[kSignal][kRealm] = this[kRealm] // 29. If signal is not null, then make this’s signal follow signal. if (signal != null) { if ( !signal || typeof signal.aborted !== 'boolean' || typeof signal.addEventListener !== 'function' ) { throw new TypeError( "Failed to construct 'Request': member signal is not of type AbortSignal." ) } if (signal.aborted) { ac.abort(signal.reason) } else { // Keep a strong ref to ac while request object // is alive. This is needed to prevent AbortController // from being prematurely garbage collected. // See, https://github.com/nodejs/undici/issues/1926. this[kAbortController] = ac const acRef = new WeakRef(ac) const abort = function () { const ac = acRef.deref() if (ac !== undefined) { ac.abort(this.reason) } } // Third-party AbortControllers may not work with these. // See, https://github.com/nodejs/undici/pull/1910#issuecomment-1464495619. try { // If the max amount of listeners is equal to the default, increase it // This is only available in node >= v19.9.0 if (typeof getMaxListeners === 'function' && getMaxListeners(signal) === defaultMaxListeners) { setMaxListeners(100, signal) } else if (getEventListeners(signal, 'abort').length >= defaultMaxListeners) { setMaxListeners(100, signal) } } catch {} util.addAbortListener(signal, abort) requestFinalizer.register(ac, { signal, abort }) } } // 30. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is request’s header list and guard is // "request". this[kHeaders] = new Headers(kConstruct) this[kHeaders][kHeadersList] = request.headersList this[kHeaders][kGuard] = 'request' this[kHeaders][kRealm] = this[kRealm] // 31. If this’s request’s mode is "no-cors", then: if (mode === 'no-cors') { // 1. If this’s request’s method is not a CORS-safelisted method, // then throw a TypeError. if (!corsSafeListedMethodsSet.has(request.method)) { throw new TypeError( `'${request.method} is unsupported in no-cors mode.` ) } // 2. Set this’s headers’s guard to "request-no-cors". this[kHeaders][kGuard] = 'request-no-cors' } // 32. If init is not empty, then: if (initHasKey) { /** @type {HeadersList} */ const headersList = this[kHeaders][kHeadersList] // 1. Let headers be a copy of this’s headers and its associated header // list. // 2. If init["headers"] exists, then set headers to init["headers"]. const headers = init.headers !== undefined ? init.headers : new HeadersList(headersList) // 3. Empty this’s headers’s header list. headersList.clear() // 4. If headers is a Headers object, then for each header in its header // list, append header’s name/header’s value to this’s headers. if (headers instanceof HeadersList) { for (const [key, val] of headers) { headersList.append(key, val) } // Note: Copy the `set-cookie` meta-data. headersList.cookies = headers.cookies } else { // 5. Otherwise, fill this’s headers with headers. fillHeaders(this[kHeaders], headers) } } // 33. Let inputBody be input’s request’s body if input is a Request // object; otherwise null. const inputBody = input instanceof Request ? input[kState].body : null // 34. If either init["body"] exists and is non-null or inputBody is // non-null, and request’s method is `GET` or `HEAD`, then throw a // TypeError. if ( (init.body != null || inputBody != null) && (request.method === 'GET' || request.method === 'HEAD') ) { throw new TypeError('Request with GET/HEAD method cannot have body.') } // 35. Let initBody be null. let initBody = null // 36. If init["body"] exists and is non-null, then: if (init.body != null) { // 1. Let Content-Type be null. // 2. Set initBody and Content-Type to the result of extracting // init["body"], with keepalive set to request’s keepalive. const [extractedBody, contentType] = extractBody( init.body, request.keepalive ) initBody = extractedBody // 3, If Content-Type is non-null and this’s headers’s header list does // not contain `Content-Type`, then append `Content-Type`/Content-Type to // this’s headers. if (contentType && !this[kHeaders][kHeadersList].contains('content-type')) { this[kHeaders].append('content-type', contentType) } } // 37. Let inputOrInitBody be initBody if it is non-null; otherwise // inputBody. const inputOrInitBody = initBody ?? inputBody // 38. If inputOrInitBody is non-null and inputOrInitBody’s source is // null, then: if (inputOrInitBody != null && inputOrInitBody.source == null) { // 1. If initBody is non-null and init["duplex"] does not exist, // then throw a TypeError. if (initBody != null && init.duplex == null) { throw new TypeError('RequestInit: duplex option is required when sending a body.') } // 2. If this’s request’s mode is neither "same-origin" nor "cors", // then throw a TypeError. if (request.mode !== 'same-origin' && request.mode !== 'cors') { throw new TypeError( 'If request is made from ReadableStream, mode should be "same-origin" or "cors"' ) } // 3. Set this’s request’s use-CORS-preflight flag. request.useCORSPreflightFlag = true } // 39. Let finalBody be inputOrInitBody. let finalBody = inputOrInitBody // 40. If initBody is null and inputBody is non-null, then: if (initBody == null && inputBody != null) { // 1. If input is unusable, then throw a TypeError. if (util.isDisturbed(inputBody.stream) || inputBody.stream.locked) { throw new TypeError( 'Cannot construct a Request with a Request object that has already been used.' ) } // 2. Set finalBody to the result of creating a proxy for inputBody. if (!TransformStream) { TransformStream = (__nccwpck_require__(63774).TransformStream) } // https://streams.spec.whatwg.org/#readablestream-create-a-proxy const identityTransform = new TransformStream() inputBody.stream.pipeThrough(identityTransform) finalBody = { source: inputBody.source, length: inputBody.length, stream: identityTransform.readable } } // 41. Set this’s request’s body to finalBody. this[kState].body = finalBody } // Returns request’s HTTP method, which is "GET" by default. get method () { webidl.brandCheck(this, Request) // The method getter steps are to return this’s request’s method. return this[kState].method } // Returns the URL of request as a string. get url () { webidl.brandCheck(this, Request) // The url getter steps are to return this’s request’s URL, serialized. return URLSerializer(this[kState].url) } // Returns a Headers object consisting of the headers associated with request. // Note that headers added in the network layer by the user agent will not // be accounted for in this object, e.g., the "Host" header. get headers () { webidl.brandCheck(this, Request) // The headers getter steps are to return this’s headers. return this[kHeaders] } // Returns the kind of resource requested by request, e.g., "document" // or "script". get destination () { webidl.brandCheck(this, Request) // The destination getter are to return this’s request’s destination. return this[kState].destination } // Returns the referrer of request. Its value can be a same-origin URL if // explicitly set in init, the empty string to indicate no referrer, and // "about:client" when defaulting to the global’s default. This is used // during fetching to determine the value of the `Referer` header of the // request being made. get referrer () { webidl.brandCheck(this, Request) // 1. If this’s request’s referrer is "no-referrer", then return the // empty string. if (this[kState].referrer === 'no-referrer') { return '' } // 2. If this’s request’s referrer is "client", then return // "about:client". if (this[kState].referrer === 'client') { return 'about:client' } // Return this’s request’s referrer, serialized. return this[kState].referrer.toString() } // Returns the referrer policy associated with request. // This is used during fetching to compute the value of the request’s // referrer. get referrerPolicy () { webidl.brandCheck(this, Request) // The referrerPolicy getter steps are to return this’s request’s referrer policy. return this[kState].referrerPolicy } // Returns the mode associated with request, which is a string indicating // whether the request will use CORS, or will be restricted to same-origin // URLs. get mode () { webidl.brandCheck(this, Request) // The mode getter steps are to return this’s request’s mode. return this[kState].mode } // Returns the credentials mode associated with request, // which is a string indicating whether credentials will be sent with the // request always, never, or only when sent to a same-origin URL. get credentials () { // The credentials getter steps are to return this’s request’s credentials mode. return this[kState].credentials } // Returns the cache mode associated with request, // which is a string indicating how the request will // interact with the browser’s cache when fetching. get cache () { webidl.brandCheck(this, Request) // The cache getter steps are to return this’s request’s cache mode. return this[kState].cache } // Returns the redirect mode associated with request, // which is a string indicating how redirects for the // request will be handled during fetching. A request // will follow redirects by default. get redirect () { webidl.brandCheck(this, Request) // The redirect getter steps are to return this’s request’s redirect mode. return this[kState].redirect } // Returns request’s subresource integrity metadata, which is a // cryptographic hash of the resource being fetched. Its value // consists of multiple hashes separated by whitespace. [SRI] get integrity () { webidl.brandCheck(this, Request) // The integrity getter steps are to return this’s request’s integrity // metadata. return this[kState].integrity } // Returns a boolean indicating whether or not request can outlive the // global in which it was created. get keepalive () { webidl.brandCheck(this, Request) // The keepalive getter steps are to return this’s request’s keepalive. return this[kState].keepalive } // Returns a boolean indicating whether or not request is for a reload // navigation. get isReloadNavigation () { webidl.brandCheck(this, Request) // The isReloadNavigation getter steps are to return true if this’s // request’s reload-navigation flag is set; otherwise false. return this[kState].reloadNavigation } // Returns a boolean indicating whether or not request is for a history // navigation (a.k.a. back-foward navigation). get isHistoryNavigation () { webidl.brandCheck(this, Request) // The isHistoryNavigation getter steps are to return true if this’s request’s // history-navigation flag is set; otherwise false. return this[kState].historyNavigation } // Returns the signal associated with request, which is an AbortSignal // object indicating whether or not request has been aborted, and its // abort event handler. get signal () { webidl.brandCheck(this, Request) // The signal getter steps are to return this’s signal. return this[kSignal] } get body () { webidl.brandCheck(this, Request) return this[kState].body ? this[kState].body.stream : null } get bodyUsed () { webidl.brandCheck(this, Request) return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } get duplex () { webidl.brandCheck(this, Request) return 'half' } // Returns a clone of request. clone () { webidl.brandCheck(this, Request) // 1. If this is unusable, then throw a TypeError. if (this.bodyUsed || this.body?.locked) { throw new TypeError('unusable') } // 2. Let clonedRequest be the result of cloning this’s request. const clonedRequest = cloneRequest(this[kState]) // 3. Let clonedRequestObject be the result of creating a Request object, // given clonedRequest, this’s headers’s guard, and this’s relevant Realm. const clonedRequestObject = new Request(kConstruct) clonedRequestObject[kState] = clonedRequest clonedRequestObject[kRealm] = this[kRealm] clonedRequestObject[kHeaders] = new Headers(kConstruct) clonedRequestObject[kHeaders][kHeadersList] = clonedRequest.headersList clonedRequestObject[kHeaders][kGuard] = this[kHeaders][kGuard] clonedRequestObject[kHeaders][kRealm] = this[kHeaders][kRealm] // 4. Make clonedRequestObject’s signal follow this’s signal. const ac = new AbortController() if (this.signal.aborted) { ac.abort(this.signal.reason) } else { util.addAbortListener( this.signal, () => { ac.abort(this.signal.reason) } ) } clonedRequestObject[kSignal] = ac.signal // 4. Return clonedRequestObject. return clonedRequestObject } } mixinBody(Request) function makeRequest (init) { // https://fetch.spec.whatwg.org/#requests const request = { method: 'GET', localURLsOnly: false, unsafeRequest: false, body: null, client: null, reservedClient: null, replacesClientId: '', window: 'client', keepalive: false, serviceWorkers: 'all', initiator: '', destination: '', priority: null, origin: 'client', policyContainer: 'client', referrer: 'client', referrerPolicy: '', mode: 'no-cors', useCORSPreflightFlag: false, credentials: 'same-origin', useCredentials: false, cache: 'default', redirect: 'follow', integrity: '', cryptoGraphicsNonceMetadata: '', parserMetadata: '', reloadNavigation: false, historyNavigation: false, userActivation: false, taintedOrigin: false, redirectCount: 0, responseTainting: 'basic', preventNoCacheCacheControlHeaderModification: false, done: false, timingAllowFailed: false, ...init, headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList() } request.url = request.urlList[0] return request } // https://fetch.spec.whatwg.org/#concept-request-clone function cloneRequest (request) { // To clone a request request, run these steps: // 1. Let newRequest be a copy of request, except for its body. const newRequest = makeRequest({ ...request, body: null }) // 2. If request’s body is non-null, set newRequest’s body to the // result of cloning request’s body. if (request.body != null) { newRequest.body = cloneBody(request.body) } // 3. Return newRequest. return newRequest } Object.defineProperties(Request.prototype, { method: kEnumerableProperty, url: kEnumerableProperty, headers: kEnumerableProperty, redirect: kEnumerableProperty, clone: kEnumerableProperty, signal: kEnumerableProperty, duplex: kEnumerableProperty, destination: kEnumerableProperty, body: kEnumerableProperty, bodyUsed: kEnumerableProperty, isHistoryNavigation: kEnumerableProperty, isReloadNavigation: kEnumerableProperty, keepalive: kEnumerableProperty, integrity: kEnumerableProperty, cache: kEnumerableProperty, credentials: kEnumerableProperty, attribute: kEnumerableProperty, referrerPolicy: kEnumerableProperty, referrer: kEnumerableProperty, mode: kEnumerableProperty, [Symbol.toStringTag]: { value: 'Request', configurable: true } }) webidl.converters.Request = webidl.interfaceConverter( Request ) // https://fetch.spec.whatwg.org/#requestinfo webidl.converters.RequestInfo = function (V) { if (typeof V === 'string') { return webidl.converters.USVString(V) } if (V instanceof Request) { return webidl.converters.Request(V) } return webidl.converters.USVString(V) } webidl.converters.AbortSignal = webidl.interfaceConverter( AbortSignal ) // https://fetch.spec.whatwg.org/#requestinit webidl.converters.RequestInit = webidl.dictionaryConverter([ { key: 'method', converter: webidl.converters.ByteString }, { key: 'headers', converter: webidl.converters.HeadersInit }, { key: 'body', converter: webidl.nullableConverter( webidl.converters.BodyInit ) }, { key: 'referrer', converter: webidl.converters.USVString }, { key: 'referrerPolicy', converter: webidl.converters.DOMString, // https://w3c.github.io/webappsec-referrer-policy/#referrer-policy allowedValues: referrerPolicy }, { key: 'mode', converter: webidl.converters.DOMString, // https://fetch.spec.whatwg.org/#concept-request-mode allowedValues: requestMode }, { key: 'credentials', converter: webidl.converters.DOMString, // https://fetch.spec.whatwg.org/#requestcredentials allowedValues: requestCredentials }, { key: 'cache', converter: webidl.converters.DOMString, // https://fetch.spec.whatwg.org/#requestcache allowedValues: requestCache }, { key: 'redirect', converter: webidl.converters.DOMString, // https://fetch.spec.whatwg.org/#requestredirect allowedValues: requestRedirect }, { key: 'integrity', converter: webidl.converters.DOMString }, { key: 'keepalive', converter: webidl.converters.boolean }, { key: 'signal', converter: webidl.nullableConverter( (signal) => webidl.converters.AbortSignal( signal, { strict: false } ) ) }, { key: 'window', converter: webidl.converters.any }, { key: 'duplex', converter: webidl.converters.DOMString, allowedValues: requestDuplex } ]) module.exports = { Request, makeRequest } /***/ }), /***/ 48676: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { Headers, HeadersList, fill } = __nccwpck_require__(26349) const { extractBody, cloneBody, mixinBody } = __nccwpck_require__(8923) const util = __nccwpck_require__(3440) const { kEnumerableProperty } = util const { isValidReasonPhrase, isCancelled, isAborted, isBlobLike, serializeJavascriptValueToJSONString, isErrorLike, isomorphicEncode } = __nccwpck_require__(15523) const { redirectStatusSet, nullBodyStatus, DOMException } = __nccwpck_require__(87326) const { kState, kHeaders, kGuard, kRealm } = __nccwpck_require__(89710) const { webidl } = __nccwpck_require__(74222) const { FormData } = __nccwpck_require__(43073) const { getGlobalOrigin } = __nccwpck_require__(75628) const { URLSerializer } = __nccwpck_require__(94322) const { kHeadersList, kConstruct } = __nccwpck_require__(36443) const assert = __nccwpck_require__(42613) const { types } = __nccwpck_require__(39023) const ReadableStream = globalThis.ReadableStream || (__nccwpck_require__(63774).ReadableStream) const textEncoder = new TextEncoder('utf-8') // https://fetch.spec.whatwg.org/#response-class class Response { // Creates network error Response. static error () { // TODO const relevantRealm = { settingsObject: {} } // The static error() method steps are to return the result of creating a // Response object, given a new network error, "immutable", and this’s // relevant Realm. const responseObject = new Response() responseObject[kState] = makeNetworkError() responseObject[kRealm] = relevantRealm responseObject[kHeaders][kHeadersList] = responseObject[kState].headersList responseObject[kHeaders][kGuard] = 'immutable' responseObject[kHeaders][kRealm] = relevantRealm return responseObject } // https://fetch.spec.whatwg.org/#dom-response-json static json (data, init = {}) { webidl.argumentLengthCheck(arguments, 1, { header: 'Response.json' }) if (init !== null) { init = webidl.converters.ResponseInit(init) } // 1. Let bytes the result of running serialize a JavaScript value to JSON bytes on data. const bytes = textEncoder.encode( serializeJavascriptValueToJSONString(data) ) // 2. Let body be the result of extracting bytes. const body = extractBody(bytes) // 3. Let responseObject be the result of creating a Response object, given a new response, // "response", and this’s relevant Realm. const relevantRealm = { settingsObject: {} } const responseObject = new Response() responseObject[kRealm] = relevantRealm responseObject[kHeaders][kGuard] = 'response' responseObject[kHeaders][kRealm] = relevantRealm // 4. Perform initialize a response given responseObject, init, and (body, "application/json"). initializeResponse(responseObject, init, { body: body[0], type: 'application/json' }) // 5. Return responseObject. return responseObject } // Creates a redirect Response that redirects to url with status status. static redirect (url, status = 302) { const relevantRealm = { settingsObject: {} } webidl.argumentLengthCheck(arguments, 1, { header: 'Response.redirect' }) url = webidl.converters.USVString(url) status = webidl.converters['unsigned short'](status) // 1. Let parsedURL be the result of parsing url with current settings // object’s API base URL. // 2. If parsedURL is failure, then throw a TypeError. // TODO: base-URL? let parsedURL try { parsedURL = new URL(url, getGlobalOrigin()) } catch (err) { throw Object.assign(new TypeError('Failed to parse URL from ' + url), { cause: err }) } // 3. If status is not a redirect status, then throw a RangeError. if (!redirectStatusSet.has(status)) { throw new RangeError('Invalid status code ' + status) } // 4. Let responseObject be the result of creating a Response object, // given a new response, "immutable", and this’s relevant Realm. const responseObject = new Response() responseObject[kRealm] = relevantRealm responseObject[kHeaders][kGuard] = 'immutable' responseObject[kHeaders][kRealm] = relevantRealm // 5. Set responseObject’s response’s status to status. responseObject[kState].status = status // 6. Let value be parsedURL, serialized and isomorphic encoded. const value = isomorphicEncode(URLSerializer(parsedURL)) // 7. Append `Location`/value to responseObject’s response’s header list. responseObject[kState].headersList.append('location', value) // 8. Return responseObject. return responseObject } // https://fetch.spec.whatwg.org/#dom-response constructor (body = null, init = {}) { if (body !== null) { body = webidl.converters.BodyInit(body) } init = webidl.converters.ResponseInit(init) // TODO this[kRealm] = { settingsObject: {} } // 1. Set this’s response to a new response. this[kState] = makeResponse({}) // 2. Set this’s headers to a new Headers object with this’s relevant // Realm, whose header list is this’s response’s header list and guard // is "response". this[kHeaders] = new Headers(kConstruct) this[kHeaders][kGuard] = 'response' this[kHeaders][kHeadersList] = this[kState].headersList this[kHeaders][kRealm] = this[kRealm] // 3. Let bodyWithType be null. let bodyWithType = null // 4. If body is non-null, then set bodyWithType to the result of extracting body. if (body != null) { const [extractedBody, type] = extractBody(body) bodyWithType = { body: extractedBody, type } } // 5. Perform initialize a response given this, init, and bodyWithType. initializeResponse(this, init, bodyWithType) } // Returns response’s type, e.g., "cors". get type () { webidl.brandCheck(this, Response) // The type getter steps are to return this’s response’s type. return this[kState].type } // Returns response’s URL, if it has one; otherwise the empty string. get url () { webidl.brandCheck(this, Response) const urlList = this[kState].urlList // The url getter steps are to return the empty string if this’s // response’s URL is null; otherwise this’s response’s URL, // serialized with exclude fragment set to true. const url = urlList[urlList.length - 1] ?? null if (url === null) { return '' } return URLSerializer(url, true) } // Returns whether response was obtained through a redirect. get redirected () { webidl.brandCheck(this, Response) // The redirected getter steps are to return true if this’s response’s URL // list has more than one item; otherwise false. return this[kState].urlList.length > 1 } // Returns response’s status. get status () { webidl.brandCheck(this, Response) // The status getter steps are to return this’s response’s status. return this[kState].status } // Returns whether response’s status is an ok status. get ok () { webidl.brandCheck(this, Response) // The ok getter steps are to return true if this’s response’s status is an // ok status; otherwise false. return this[kState].status >= 200 && this[kState].status <= 299 } // Returns response’s status message. get statusText () { webidl.brandCheck(this, Response) // The statusText getter steps are to return this’s response’s status // message. return this[kState].statusText } // Returns response’s headers as Headers. get headers () { webidl.brandCheck(this, Response) // The headers getter steps are to return this’s headers. return this[kHeaders] } get body () { webidl.brandCheck(this, Response) return this[kState].body ? this[kState].body.stream : null } get bodyUsed () { webidl.brandCheck(this, Response) return !!this[kState].body && util.isDisturbed(this[kState].body.stream) } // Returns a clone of response. clone () { webidl.brandCheck(this, Response) // 1. If this is unusable, then throw a TypeError. if (this.bodyUsed || (this.body && this.body.locked)) { throw webidl.errors.exception({ header: 'Response.clone', message: 'Body has already been consumed.' }) } // 2. Let clonedResponse be the result of cloning this’s response. const clonedResponse = cloneResponse(this[kState]) // 3. Return the result of creating a Response object, given // clonedResponse, this’s headers’s guard, and this’s relevant Realm. const clonedResponseObject = new Response() clonedResponseObject[kState] = clonedResponse clonedResponseObject[kRealm] = this[kRealm] clonedResponseObject[kHeaders][kHeadersList] = clonedResponse.headersList clonedResponseObject[kHeaders][kGuard] = this[kHeaders][kGuard] clonedResponseObject[kHeaders][kRealm] = this[kHeaders][kRealm] return clonedResponseObject } } mixinBody(Response) Object.defineProperties(Response.prototype, { type: kEnumerableProperty, url: kEnumerableProperty, status: kEnumerableProperty, ok: kEnumerableProperty, redirected: kEnumerableProperty, statusText: kEnumerableProperty, headers: kEnumerableProperty, clone: kEnumerableProperty, body: kEnumerableProperty, bodyUsed: kEnumerableProperty, [Symbol.toStringTag]: { value: 'Response', configurable: true } }) Object.defineProperties(Response, { json: kEnumerableProperty, redirect: kEnumerableProperty, error: kEnumerableProperty }) // https://fetch.spec.whatwg.org/#concept-response-clone function cloneResponse (response) { // To clone a response response, run these steps: // 1. If response is a filtered response, then return a new identical // filtered response whose internal response is a clone of response’s // internal response. if (response.internalResponse) { return filterResponse( cloneResponse(response.internalResponse), response.type ) } // 2. Let newResponse be a copy of response, except for its body. const newResponse = makeResponse({ ...response, body: null }) // 3. If response’s body is non-null, then set newResponse’s body to the // result of cloning response’s body. if (response.body != null) { newResponse.body = cloneBody(response.body) } // 4. Return newResponse. return newResponse } function makeResponse (init) { return { aborted: false, rangeRequested: false, timingAllowPassed: false, requestIncludesCredentials: false, type: 'default', status: 200, timingInfo: null, cacheState: '', statusText: '', ...init, headersList: init.headersList ? new HeadersList(init.headersList) : new HeadersList(), urlList: init.urlList ? [...init.urlList] : [] } } function makeNetworkError (reason) { const isError = isErrorLike(reason) return makeResponse({ type: 'error', status: 0, error: isError ? reason : new Error(reason ? String(reason) : reason), aborted: reason && reason.name === 'AbortError' }) } function makeFilteredResponse (response, state) { state = { internalResponse: response, ...state } return new Proxy(response, { get (target, p) { return p in state ? state[p] : target[p] }, set (target, p, value) { assert(!(p in state)) target[p] = value return true } }) } // https://fetch.spec.whatwg.org/#concept-filtered-response function filterResponse (response, type) { // Set response to the following filtered response with response as its // internal response, depending on request’s response tainting: if (type === 'basic') { // A basic filtered response is a filtered response whose type is "basic" // and header list excludes any headers in internal response’s header list // whose name is a forbidden response-header name. // Note: undici does not implement forbidden response-header names return makeFilteredResponse(response, { type: 'basic', headersList: response.headersList }) } else if (type === 'cors') { // A CORS filtered response is a filtered response whose type is "cors" // and header list excludes any headers in internal response’s header // list whose name is not a CORS-safelisted response-header name, given // internal response’s CORS-exposed header-name list. // Note: undici does not implement CORS-safelisted response-header names return makeFilteredResponse(response, { type: 'cors', headersList: response.headersList }) } else if (type === 'opaque') { // An opaque filtered response is a filtered response whose type is // "opaque", URL list is the empty list, status is 0, status message // is the empty byte sequence, header list is empty, and body is null. return makeFilteredResponse(response, { type: 'opaque', urlList: Object.freeze([]), status: 0, statusText: '', body: null }) } else if (type === 'opaqueredirect') { // An opaque-redirect filtered response is a filtered response whose type // is "opaqueredirect", status is 0, status message is the empty byte // sequence, header list is empty, and body is null. return makeFilteredResponse(response, { type: 'opaqueredirect', status: 0, statusText: '', headersList: [], body: null }) } else { assert(false) } } // https://fetch.spec.whatwg.org/#appropriate-network-error function makeAppropriateNetworkError (fetchParams, err = null) { // 1. Assert: fetchParams is canceled. assert(isCancelled(fetchParams)) // 2. Return an aborted network error if fetchParams is aborted; // otherwise return a network error. return isAborted(fetchParams) ? makeNetworkError(Object.assign(new DOMException('The operation was aborted.', 'AbortError'), { cause: err })) : makeNetworkError(Object.assign(new DOMException('Request was cancelled.'), { cause: err })) } // https://whatpr.org/fetch/1392.html#initialize-a-response function initializeResponse (response, init, body) { // 1. If init["status"] is not in the range 200 to 599, inclusive, then // throw a RangeError. if (init.status !== null && (init.status < 200 || init.status > 599)) { throw new RangeError('init["status"] must be in the range of 200 to 599, inclusive.') } // 2. If init["statusText"] does not match the reason-phrase token production, // then throw a TypeError. if ('statusText' in init && init.statusText != null) { // See, https://datatracker.ietf.org/doc/html/rfc7230#section-3.1.2: // reason-phrase = *( HTAB / SP / VCHAR / obs-text ) if (!isValidReasonPhrase(String(init.statusText))) { throw new TypeError('Invalid statusText') } } // 3. Set response’s response’s status to init["status"]. if ('status' in init && init.status != null) { response[kState].status = init.status } // 4. Set response’s response’s status message to init["statusText"]. if ('statusText' in init && init.statusText != null) { response[kState].statusText = init.statusText } // 5. If init["headers"] exists, then fill response’s headers with init["headers"]. if ('headers' in init && init.headers != null) { fill(response[kHeaders], init.headers) } // 6. If body was given, then: if (body) { // 1. If response's status is a null body status, then throw a TypeError. if (nullBodyStatus.includes(response.status)) { throw webidl.errors.exception({ header: 'Response constructor', message: 'Invalid response status code ' + response.status }) } // 2. Set response's body to body's body. response[kState].body = body.body // 3. If body's type is non-null and response's header list does not contain // `Content-Type`, then append (`Content-Type`, body's type) to response's header list. if (body.type != null && !response[kState].headersList.contains('Content-Type')) { response[kState].headersList.append('content-type', body.type) } } } webidl.converters.ReadableStream = webidl.interfaceConverter( ReadableStream ) webidl.converters.FormData = webidl.interfaceConverter( FormData ) webidl.converters.URLSearchParams = webidl.interfaceConverter( URLSearchParams ) // https://fetch.spec.whatwg.org/#typedefdef-xmlhttprequestbodyinit webidl.converters.XMLHttpRequestBodyInit = function (V) { if (typeof V === 'string') { return webidl.converters.USVString(V) } if (isBlobLike(V)) { return webidl.converters.Blob(V, { strict: false }) } if (types.isArrayBuffer(V) || types.isTypedArray(V) || types.isDataView(V)) { return webidl.converters.BufferSource(V) } if (util.isFormDataLike(V)) { return webidl.converters.FormData(V, { strict: false }) } if (V instanceof URLSearchParams) { return webidl.converters.URLSearchParams(V) } return webidl.converters.DOMString(V) } // https://fetch.spec.whatwg.org/#bodyinit webidl.converters.BodyInit = function (V) { if (V instanceof ReadableStream) { return webidl.converters.ReadableStream(V) } // Note: the spec doesn't include async iterables, // this is an undici extension. if (V?.[Symbol.asyncIterator]) { return V } return webidl.converters.XMLHttpRequestBodyInit(V) } webidl.converters.ResponseInit = webidl.dictionaryConverter([ { key: 'status', converter: webidl.converters['unsigned short'], defaultValue: 200 }, { key: 'statusText', converter: webidl.converters.ByteString, defaultValue: '' }, { key: 'headers', converter: webidl.converters.HeadersInit } ]) module.exports = { makeNetworkError, makeResponse, makeAppropriateNetworkError, filterResponse, Response, cloneResponse } /***/ }), /***/ 89710: /***/ ((module) => { "use strict"; module.exports = { kUrl: Symbol('url'), kHeaders: Symbol('headers'), kSignal: Symbol('signal'), kState: Symbol('state'), kGuard: Symbol('guard'), kRealm: Symbol('realm') } /***/ }), /***/ 15523: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { redirectStatusSet, referrerPolicySet: referrerPolicyTokens, badPortsSet } = __nccwpck_require__(87326) const { getGlobalOrigin } = __nccwpck_require__(75628) const { performance } = __nccwpck_require__(82987) const { isBlobLike, toUSVString, ReadableStreamFrom } = __nccwpck_require__(3440) const assert = __nccwpck_require__(42613) const { isUint8Array } = __nccwpck_require__(98253) let supportedHashes = [] // https://nodejs.org/api/crypto.html#determining-if-crypto-support-is-unavailable /** @type {import('crypto')|undefined} */ let crypto try { crypto = __nccwpck_require__(76982) const possibleRelevantHashes = ['sha256', 'sha384', 'sha512'] supportedHashes = crypto.getHashes().filter((hash) => possibleRelevantHashes.includes(hash)) /* c8 ignore next 3 */ } catch { } function responseURL (response) { // https://fetch.spec.whatwg.org/#responses // A response has an associated URL. It is a pointer to the last URL // in response’s URL list and null if response’s URL list is empty. const urlList = response.urlList const length = urlList.length return length === 0 ? null : urlList[length - 1].toString() } // https://fetch.spec.whatwg.org/#concept-response-location-url function responseLocationURL (response, requestFragment) { // 1. If response’s status is not a redirect status, then return null. if (!redirectStatusSet.has(response.status)) { return null } // 2. Let location be the result of extracting header list values given // `Location` and response’s header list. let location = response.headersList.get('location') // 3. If location is a header value, then set location to the result of // parsing location with response’s URL. if (location !== null && isValidHeaderValue(location)) { location = new URL(location, responseURL(response)) } // 4. If location is a URL whose fragment is null, then set location’s // fragment to requestFragment. if (location && !location.hash) { location.hash = requestFragment } // 5. Return location. return location } /** @returns {URL} */ function requestCurrentURL (request) { return request.urlList[request.urlList.length - 1] } function requestBadPort (request) { // 1. Let url be request’s current URL. const url = requestCurrentURL(request) // 2. If url’s scheme is an HTTP(S) scheme and url’s port is a bad port, // then return blocked. if (urlIsHttpHttpsScheme(url) && badPortsSet.has(url.port)) { return 'blocked' } // 3. Return allowed. return 'allowed' } function isErrorLike (object) { return object instanceof Error || ( object?.constructor?.name === 'Error' || object?.constructor?.name === 'DOMException' ) } // Check whether |statusText| is a ByteString and // matches the Reason-Phrase token production. // RFC 2616: https://tools.ietf.org/html/rfc2616 // RFC 7230: https://tools.ietf.org/html/rfc7230 // "reason-phrase = *( HTAB / SP / VCHAR / obs-text )" // https://github.com/chromium/chromium/blob/94.0.4604.1/third_party/blink/renderer/core/fetch/response.cc#L116 function isValidReasonPhrase (statusText) { for (let i = 0; i < statusText.length; ++i) { const c = statusText.charCodeAt(i) if ( !( ( c === 0x09 || // HTAB (c >= 0x20 && c <= 0x7e) || // SP / VCHAR (c >= 0x80 && c <= 0xff) ) // obs-text ) ) { return false } } return true } /** * @see https://tools.ietf.org/html/rfc7230#section-3.2.6 * @param {number} c */ function isTokenCharCode (c) { switch (c) { case 0x22: case 0x28: case 0x29: case 0x2c: case 0x2f: case 0x3a: case 0x3b: case 0x3c: case 0x3d: case 0x3e: case 0x3f: case 0x40: case 0x5b: case 0x5c: case 0x5d: case 0x7b: case 0x7d: // DQUOTE and "(),/:;<=>?@[\]{}" return false default: // VCHAR %x21-7E return c >= 0x21 && c <= 0x7e } } /** * @param {string} characters */ function isValidHTTPToken (characters) { if (characters.length === 0) { return false } for (let i = 0; i < characters.length; ++i) { if (!isTokenCharCode(characters.charCodeAt(i))) { return false } } return true } /** * @see https://fetch.spec.whatwg.org/#header-name * @param {string} potentialValue */ function isValidHeaderName (potentialValue) { return isValidHTTPToken(potentialValue) } /** * @see https://fetch.spec.whatwg.org/#header-value * @param {string} potentialValue */ function isValidHeaderValue (potentialValue) { // - Has no leading or trailing HTTP tab or space bytes. // - Contains no 0x00 (NUL) or HTTP newline bytes. if ( potentialValue.startsWith('\t') || potentialValue.startsWith(' ') || potentialValue.endsWith('\t') || potentialValue.endsWith(' ') ) { return false } if ( potentialValue.includes('\0') || potentialValue.includes('\r') || potentialValue.includes('\n') ) { return false } return true } // https://w3c.github.io/webappsec-referrer-policy/#set-requests-referrer-policy-on-redirect function setRequestReferrerPolicyOnRedirect (request, actualResponse) { // Given a request request and a response actualResponse, this algorithm // updates request’s referrer policy according to the Referrer-Policy // header (if any) in actualResponse. // 1. Let policy be the result of executing § 8.1 Parse a referrer policy // from a Referrer-Policy header on actualResponse. // 8.1 Parse a referrer policy from a Referrer-Policy header // 1. Let policy-tokens be the result of extracting header list values given `Referrer-Policy` and response’s header list. const { headersList } = actualResponse // 2. Let policy be the empty string. // 3. For each token in policy-tokens, if token is a referrer policy and token is not the empty string, then set policy to token. // 4. Return policy. const policyHeader = (headersList.get('referrer-policy') ?? '').split(',') // Note: As the referrer-policy can contain multiple policies // separated by comma, we need to loop through all of them // and pick the first valid one. // Ref: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Referrer-Policy#specify_a_fallback_policy let policy = '' if (policyHeader.length > 0) { // The right-most policy takes precedence. // The left-most policy is the fallback. for (let i = policyHeader.length; i !== 0; i--) { const token = policyHeader[i - 1].trim() if (referrerPolicyTokens.has(token)) { policy = token break } } } // 2. If policy is not the empty string, then set request’s referrer policy to policy. if (policy !== '') { request.referrerPolicy = policy } } // https://fetch.spec.whatwg.org/#cross-origin-resource-policy-check function crossOriginResourcePolicyCheck () { // TODO return 'allowed' } // https://fetch.spec.whatwg.org/#concept-cors-check function corsCheck () { // TODO return 'success' } // https://fetch.spec.whatwg.org/#concept-tao-check function TAOCheck () { // TODO return 'success' } function appendFetchMetadata (httpRequest) { // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-dest-header // TODO // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-mode-header // 1. Assert: r’s url is a potentially trustworthy URL. // TODO // 2. Let header be a Structured Header whose value is a token. let header = null // 3. Set header’s value to r’s mode. header = httpRequest.mode // 4. Set a structured field value `Sec-Fetch-Mode`/header in r’s header list. httpRequest.headersList.set('sec-fetch-mode', header) // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-site-header // TODO // https://w3c.github.io/webappsec-fetch-metadata/#sec-fetch-user-header // TODO } // https://fetch.spec.whatwg.org/#append-a-request-origin-header function appendRequestOriginHeader (request) { // 1. Let serializedOrigin be the result of byte-serializing a request origin with request. let serializedOrigin = request.origin // 2. If request’s response tainting is "cors" or request’s mode is "websocket", then append (`Origin`, serializedOrigin) to request’s header list. if (request.responseTainting === 'cors' || request.mode === 'websocket') { if (serializedOrigin) { request.headersList.append('origin', serializedOrigin) } // 3. Otherwise, if request’s method is neither `GET` nor `HEAD`, then: } else if (request.method !== 'GET' && request.method !== 'HEAD') { // 1. Switch on request’s referrer policy: switch (request.referrerPolicy) { case 'no-referrer': // Set serializedOrigin to `null`. serializedOrigin = null break case 'no-referrer-when-downgrade': case 'strict-origin': case 'strict-origin-when-cross-origin': // If request’s origin is a tuple origin, its scheme is "https", and request’s current URL’s scheme is not "https", then set serializedOrigin to `null`. if (request.origin && urlHasHttpsScheme(request.origin) && !urlHasHttpsScheme(requestCurrentURL(request))) { serializedOrigin = null } break case 'same-origin': // If request’s origin is not same origin with request’s current URL’s origin, then set serializedOrigin to `null`. if (!sameOrigin(request, requestCurrentURL(request))) { serializedOrigin = null } break default: // Do nothing. } if (serializedOrigin) { // 2. Append (`Origin`, serializedOrigin) to request’s header list. request.headersList.append('origin', serializedOrigin) } } } function coarsenedSharedCurrentTime (crossOriginIsolatedCapability) { // TODO return performance.now() } // https://fetch.spec.whatwg.org/#create-an-opaque-timing-info function createOpaqueTimingInfo (timingInfo) { return { startTime: timingInfo.startTime ?? 0, redirectStartTime: 0, redirectEndTime: 0, postRedirectStartTime: timingInfo.startTime ?? 0, finalServiceWorkerStartTime: 0, finalNetworkResponseStartTime: 0, finalNetworkRequestStartTime: 0, endTime: 0, encodedBodySize: 0, decodedBodySize: 0, finalConnectionTimingInfo: null } } // https://html.spec.whatwg.org/multipage/origin.html#policy-container function makePolicyContainer () { // Note: the fetch spec doesn't make use of embedder policy or CSP list return { referrerPolicy: 'strict-origin-when-cross-origin' } } // https://html.spec.whatwg.org/multipage/origin.html#clone-a-policy-container function clonePolicyContainer (policyContainer) { return { referrerPolicy: policyContainer.referrerPolicy } } // https://w3c.github.io/webappsec-referrer-policy/#determine-requests-referrer function determineRequestsReferrer (request) { // 1. Let policy be request's referrer policy. const policy = request.referrerPolicy // Note: policy cannot (shouldn't) be null or an empty string. assert(policy) // 2. Let environment be request’s client. let referrerSource = null // 3. Switch on request’s referrer: if (request.referrer === 'client') { // Note: node isn't a browser and doesn't implement document/iframes, // so we bypass this step and replace it with our own. const globalOrigin = getGlobalOrigin() if (!globalOrigin || globalOrigin.origin === 'null') { return 'no-referrer' } // note: we need to clone it as it's mutated referrerSource = new URL(globalOrigin) } else if (request.referrer instanceof URL) { // Let referrerSource be request’s referrer. referrerSource = request.referrer } // 4. Let request’s referrerURL be the result of stripping referrerSource for // use as a referrer. let referrerURL = stripURLForReferrer(referrerSource) // 5. Let referrerOrigin be the result of stripping referrerSource for use as // a referrer, with the origin-only flag set to true. const referrerOrigin = stripURLForReferrer(referrerSource, true) // 6. If the result of serializing referrerURL is a string whose length is // greater than 4096, set referrerURL to referrerOrigin. if (referrerURL.toString().length > 4096) { referrerURL = referrerOrigin } const areSameOrigin = sameOrigin(request, referrerURL) const isNonPotentiallyTrustWorthy = isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(request.url) // 8. Execute the switch statements corresponding to the value of policy: switch (policy) { case 'origin': return referrerOrigin != null ? referrerOrigin : stripURLForReferrer(referrerSource, true) case 'unsafe-url': return referrerURL case 'same-origin': return areSameOrigin ? referrerOrigin : 'no-referrer' case 'origin-when-cross-origin': return areSameOrigin ? referrerURL : referrerOrigin case 'strict-origin-when-cross-origin': { const currentURL = requestCurrentURL(request) // 1. If the origin of referrerURL and the origin of request’s current // URL are the same, then return referrerURL. if (sameOrigin(referrerURL, currentURL)) { return referrerURL } // 2. If referrerURL is a potentially trustworthy URL and request’s // current URL is not a potentially trustworthy URL, then return no // referrer. if (isURLPotentiallyTrustworthy(referrerURL) && !isURLPotentiallyTrustworthy(currentURL)) { return 'no-referrer' } // 3. Return referrerOrigin. return referrerOrigin } case 'strict-origin': // eslint-disable-line /** * 1. If referrerURL is a potentially trustworthy URL and * request’s current URL is not a potentially trustworthy URL, * then return no referrer. * 2. Return referrerOrigin */ case 'no-referrer-when-downgrade': // eslint-disable-line /** * 1. If referrerURL is a potentially trustworthy URL and * request’s current URL is not a potentially trustworthy URL, * then return no referrer. * 2. Return referrerOrigin */ default: // eslint-disable-line return isNonPotentiallyTrustWorthy ? 'no-referrer' : referrerOrigin } } /** * @see https://w3c.github.io/webappsec-referrer-policy/#strip-url * @param {URL} url * @param {boolean|undefined} originOnly */ function stripURLForReferrer (url, originOnly) { // 1. Assert: url is a URL. assert(url instanceof URL) // 2. If url’s scheme is a local scheme, then return no referrer. if (url.protocol === 'file:' || url.protocol === 'about:' || url.protocol === 'blank:') { return 'no-referrer' } // 3. Set url’s username to the empty string. url.username = '' // 4. Set url’s password to the empty string. url.password = '' // 5. Set url’s fragment to null. url.hash = '' // 6. If the origin-only flag is true, then: if (originOnly) { // 1. Set url’s path to « the empty string ». url.pathname = '' // 2. Set url’s query to null. url.search = '' } // 7. Return url. return url } function isURLPotentiallyTrustworthy (url) { if (!(url instanceof URL)) { return false } // If child of about, return true if (url.href === 'about:blank' || url.href === 'about:srcdoc') { return true } // If scheme is data, return true if (url.protocol === 'data:') return true // If file, return true if (url.protocol === 'file:') return true return isOriginPotentiallyTrustworthy(url.origin) function isOriginPotentiallyTrustworthy (origin) { // If origin is explicitly null, return false if (origin == null || origin === 'null') return false const originAsURL = new URL(origin) // If secure, return true if (originAsURL.protocol === 'https:' || originAsURL.protocol === 'wss:') { return true } // If localhost or variants, return true if (/^127(?:\.[0-9]+){0,2}\.[0-9]+$|^\[(?:0*:)*?:?0*1\]$/.test(originAsURL.hostname) || (originAsURL.hostname === 'localhost' || originAsURL.hostname.includes('localhost.')) || (originAsURL.hostname.endsWith('.localhost'))) { return true } // If any other, return false return false } } /** * @see https://w3c.github.io/webappsec-subresource-integrity/#does-response-match-metadatalist * @param {Uint8Array} bytes * @param {string} metadataList */ function bytesMatch (bytes, metadataList) { // If node is not built with OpenSSL support, we cannot check // a request's integrity, so allow it by default (the spec will // allow requests if an invalid hash is given, as precedence). /* istanbul ignore if: only if node is built with --without-ssl */ if (crypto === undefined) { return true } // 1. Let parsedMetadata be the result of parsing metadataList. const parsedMetadata = parseMetadata(metadataList) // 2. If parsedMetadata is no metadata, return true. if (parsedMetadata === 'no metadata') { return true } // 3. If response is not eligible for integrity validation, return false. // TODO // 4. If parsedMetadata is the empty set, return true. if (parsedMetadata.length === 0) { return true } // 5. Let metadata be the result of getting the strongest // metadata from parsedMetadata. const strongest = getStrongestMetadata(parsedMetadata) const metadata = filterMetadataListByAlgorithm(parsedMetadata, strongest) // 6. For each item in metadata: for (const item of metadata) { // 1. Let algorithm be the alg component of item. const algorithm = item.algo // 2. Let expectedValue be the val component of item. const expectedValue = item.hash // See https://github.com/web-platform-tests/wpt/commit/e4c5cc7a5e48093220528dfdd1c4012dc3837a0e // "be liberal with padding". This is annoying, and it's not even in the spec. // 3. Let actualValue be the result of applying algorithm to bytes. let actualValue = crypto.createHash(algorithm).update(bytes).digest('base64') if (actualValue[actualValue.length - 1] === '=') { if (actualValue[actualValue.length - 2] === '=') { actualValue = actualValue.slice(0, -2) } else { actualValue = actualValue.slice(0, -1) } } // 4. If actualValue is a case-sensitive match for expectedValue, // return true. if (compareBase64Mixed(actualValue, expectedValue)) { return true } } // 7. Return false. return false } // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-hash-with-options // https://www.w3.org/TR/CSP2/#source-list-syntax // https://www.rfc-editor.org/rfc/rfc5234#appendix-B.1 const parseHashWithOptions = /(?sha256|sha384|sha512)-((?[A-Za-z0-9+/]+|[A-Za-z0-9_-]+)={0,2}(?:\s|$)( +[!-~]*)?)?/i /** * @see https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata * @param {string} metadata */ function parseMetadata (metadata) { // 1. Let result be the empty set. /** @type {{ algo: string, hash: string }[]} */ const result = [] // 2. Let empty be equal to true. let empty = true // 3. For each token returned by splitting metadata on spaces: for (const token of metadata.split(' ')) { // 1. Set empty to false. empty = false // 2. Parse token as a hash-with-options. const parsedToken = parseHashWithOptions.exec(token) // 3. If token does not parse, continue to the next token. if ( parsedToken === null || parsedToken.groups === undefined || parsedToken.groups.algo === undefined ) { // Note: Chromium blocks the request at this point, but Firefox // gives a warning that an invalid integrity was given. The // correct behavior is to ignore these, and subsequently not // check the integrity of the resource. continue } // 4. Let algorithm be the hash-algo component of token. const algorithm = parsedToken.groups.algo.toLowerCase() // 5. If algorithm is a hash function recognized by the user // agent, add the parsed token to result. if (supportedHashes.includes(algorithm)) { result.push(parsedToken.groups) } } // 4. Return no metadata if empty is true, otherwise return result. if (empty === true) { return 'no metadata' } return result } /** * @param {{ algo: 'sha256' | 'sha384' | 'sha512' }[]} metadataList */ function getStrongestMetadata (metadataList) { // Let algorithm be the algo component of the first item in metadataList. // Can be sha256 let algorithm = metadataList[0].algo // If the algorithm is sha512, then it is the strongest // and we can return immediately if (algorithm[3] === '5') { return algorithm } for (let i = 1; i < metadataList.length; ++i) { const metadata = metadataList[i] // If the algorithm is sha512, then it is the strongest // and we can break the loop immediately if (metadata.algo[3] === '5') { algorithm = 'sha512' break // If the algorithm is sha384, then a potential sha256 or sha384 is ignored } else if (algorithm[3] === '3') { continue // algorithm is sha256, check if algorithm is sha384 and if so, set it as // the strongest } else if (metadata.algo[3] === '3') { algorithm = 'sha384' } } return algorithm } function filterMetadataListByAlgorithm (metadataList, algorithm) { if (metadataList.length === 1) { return metadataList } let pos = 0 for (let i = 0; i < metadataList.length; ++i) { if (metadataList[i].algo === algorithm) { metadataList[pos++] = metadataList[i] } } metadataList.length = pos return metadataList } /** * Compares two base64 strings, allowing for base64url * in the second string. * * @param {string} actualValue always base64 * @param {string} expectedValue base64 or base64url * @returns {boolean} */ function compareBase64Mixed (actualValue, expectedValue) { if (actualValue.length !== expectedValue.length) { return false } for (let i = 0; i < actualValue.length; ++i) { if (actualValue[i] !== expectedValue[i]) { if ( (actualValue[i] === '+' && expectedValue[i] === '-') || (actualValue[i] === '/' && expectedValue[i] === '_') ) { continue } return false } } return true } // https://w3c.github.io/webappsec-upgrade-insecure-requests/#upgrade-request function tryUpgradeRequestToAPotentiallyTrustworthyURL (request) { // TODO } /** * @link {https://html.spec.whatwg.org/multipage/origin.html#same-origin} * @param {URL} A * @param {URL} B */ function sameOrigin (A, B) { // 1. If A and B are the same opaque origin, then return true. if (A.origin === B.origin && A.origin === 'null') { return true } // 2. If A and B are both tuple origins and their schemes, // hosts, and port are identical, then return true. if (A.protocol === B.protocol && A.hostname === B.hostname && A.port === B.port) { return true } // 3. Return false. return false } function createDeferredPromise () { let res let rej const promise = new Promise((resolve, reject) => { res = resolve rej = reject }) return { promise, resolve: res, reject: rej } } function isAborted (fetchParams) { return fetchParams.controller.state === 'aborted' } function isCancelled (fetchParams) { return fetchParams.controller.state === 'aborted' || fetchParams.controller.state === 'terminated' } const normalizeMethodRecord = { delete: 'DELETE', DELETE: 'DELETE', get: 'GET', GET: 'GET', head: 'HEAD', HEAD: 'HEAD', options: 'OPTIONS', OPTIONS: 'OPTIONS', post: 'POST', POST: 'POST', put: 'PUT', PUT: 'PUT' } // Note: object prototypes should not be able to be referenced. e.g. `Object#hasOwnProperty`. Object.setPrototypeOf(normalizeMethodRecord, null) /** * @see https://fetch.spec.whatwg.org/#concept-method-normalize * @param {string} method */ function normalizeMethod (method) { return normalizeMethodRecord[method.toLowerCase()] ?? method } // https://infra.spec.whatwg.org/#serialize-a-javascript-value-to-a-json-string function serializeJavascriptValueToJSONString (value) { // 1. Let result be ? Call(%JSON.stringify%, undefined, « value »). const result = JSON.stringify(value) // 2. If result is undefined, then throw a TypeError. if (result === undefined) { throw new TypeError('Value is not JSON serializable') } // 3. Assert: result is a string. assert(typeof result === 'string') // 4. Return result. return result } // https://tc39.es/ecma262/#sec-%25iteratorprototype%25-object const esIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())) /** * @see https://webidl.spec.whatwg.org/#dfn-iterator-prototype-object * @param {() => unknown[]} iterator * @param {string} name name of the instance * @param {'key'|'value'|'key+value'} kind */ function makeIterator (iterator, name, kind) { const object = { index: 0, kind, target: iterator } const i = { next () { // 1. Let interface be the interface for which the iterator prototype object exists. // 2. Let thisValue be the this value. // 3. Let object be ? ToObject(thisValue). // 4. If object is a platform object, then perform a security // check, passing: // 5. If object is not a default iterator object for interface, // then throw a TypeError. if (Object.getPrototypeOf(this) !== i) { throw new TypeError( `'next' called on an object that does not implement interface ${name} Iterator.` ) } // 6. Let index be object’s index. // 7. Let kind be object’s kind. // 8. Let values be object’s target's value pairs to iterate over. const { index, kind, target } = object const values = target() // 9. Let len be the length of values. const len = values.length // 10. If index is greater than or equal to len, then return // CreateIterResultObject(undefined, true). if (index >= len) { return { value: undefined, done: true } } // 11. Let pair be the entry in values at index index. const pair = values[index] // 12. Set object’s index to index + 1. object.index = index + 1 // 13. Return the iterator result for pair and kind. return iteratorResult(pair, kind) }, // The class string of an iterator prototype object for a given interface is the // result of concatenating the identifier of the interface and the string " Iterator". [Symbol.toStringTag]: `${name} Iterator` } // The [[Prototype]] internal slot of an iterator prototype object must be %IteratorPrototype%. Object.setPrototypeOf(i, esIteratorPrototype) // esIteratorPrototype needs to be the prototype of i // which is the prototype of an empty object. Yes, it's confusing. return Object.setPrototypeOf({}, i) } // https://webidl.spec.whatwg.org/#iterator-result function iteratorResult (pair, kind) { let result // 1. Let result be a value determined by the value of kind: switch (kind) { case 'key': { // 1. Let idlKey be pair’s key. // 2. Let key be the result of converting idlKey to an // ECMAScript value. // 3. result is key. result = pair[0] break } case 'value': { // 1. Let idlValue be pair’s value. // 2. Let value be the result of converting idlValue to // an ECMAScript value. // 3. result is value. result = pair[1] break } case 'key+value': { // 1. Let idlKey be pair’s key. // 2. Let idlValue be pair’s value. // 3. Let key be the result of converting idlKey to an // ECMAScript value. // 4. Let value be the result of converting idlValue to // an ECMAScript value. // 5. Let array be ! ArrayCreate(2). // 6. Call ! CreateDataProperty(array, "0", key). // 7. Call ! CreateDataProperty(array, "1", value). // 8. result is array. result = pair break } } // 2. Return CreateIterResultObject(result, false). return { value: result, done: false } } /** * @see https://fetch.spec.whatwg.org/#body-fully-read */ async function fullyReadBody (body, processBody, processBodyError) { // 1. If taskDestination is null, then set taskDestination to // the result of starting a new parallel queue. // 2. Let successSteps given a byte sequence bytes be to queue a // fetch task to run processBody given bytes, with taskDestination. const successSteps = processBody // 3. Let errorSteps be to queue a fetch task to run processBodyError, // with taskDestination. const errorSteps = processBodyError // 4. Let reader be the result of getting a reader for body’s stream. // If that threw an exception, then run errorSteps with that // exception and return. let reader try { reader = body.stream.getReader() } catch (e) { errorSteps(e) return } // 5. Read all bytes from reader, given successSteps and errorSteps. try { const result = await readAllBytes(reader) successSteps(result) } catch (e) { errorSteps(e) } } /** @type {ReadableStream} */ let ReadableStream = globalThis.ReadableStream function isReadableStreamLike (stream) { if (!ReadableStream) { ReadableStream = (__nccwpck_require__(63774).ReadableStream) } return stream instanceof ReadableStream || ( stream[Symbol.toStringTag] === 'ReadableStream' && typeof stream.tee === 'function' ) } const MAXIMUM_ARGUMENT_LENGTH = 65535 /** * @see https://infra.spec.whatwg.org/#isomorphic-decode * @param {number[]|Uint8Array} input */ function isomorphicDecode (input) { // 1. To isomorphic decode a byte sequence input, return a string whose code point // length is equal to input’s length and whose code points have the same values // as the values of input’s bytes, in the same order. if (input.length < MAXIMUM_ARGUMENT_LENGTH) { return String.fromCharCode(...input) } return input.reduce((previous, current) => previous + String.fromCharCode(current), '') } /** * @param {ReadableStreamController} controller */ function readableStreamClose (controller) { try { controller.close() } catch (err) { // TODO: add comment explaining why this error occurs. if (!err.message.includes('Controller is already closed')) { throw err } } } /** * @see https://infra.spec.whatwg.org/#isomorphic-encode * @param {string} input */ function isomorphicEncode (input) { // 1. Assert: input contains no code points greater than U+00FF. for (let i = 0; i < input.length; i++) { assert(input.charCodeAt(i) <= 0xFF) } // 2. Return a byte sequence whose length is equal to input’s code // point length and whose bytes have the same values as the // values of input’s code points, in the same order return input } /** * @see https://streams.spec.whatwg.org/#readablestreamdefaultreader-read-all-bytes * @see https://streams.spec.whatwg.org/#read-loop * @param {ReadableStreamDefaultReader} reader */ async function readAllBytes (reader) { const bytes = [] let byteLength = 0 while (true) { const { done, value: chunk } = await reader.read() if (done) { // 1. Call successSteps with bytes. return Buffer.concat(bytes, byteLength) } // 1. If chunk is not a Uint8Array object, call failureSteps // with a TypeError and abort these steps. if (!isUint8Array(chunk)) { throw new TypeError('Received non-Uint8Array chunk') } // 2. Append the bytes represented by chunk to bytes. bytes.push(chunk) byteLength += chunk.length // 3. Read-loop given reader, bytes, successSteps, and failureSteps. } } /** * @see https://fetch.spec.whatwg.org/#is-local * @param {URL} url */ function urlIsLocal (url) { assert('protocol' in url) // ensure it's a url object const protocol = url.protocol return protocol === 'about:' || protocol === 'blob:' || protocol === 'data:' } /** * @param {string|URL} url */ function urlHasHttpsScheme (url) { if (typeof url === 'string') { return url.startsWith('https:') } return url.protocol === 'https:' } /** * @see https://fetch.spec.whatwg.org/#http-scheme * @param {URL} url */ function urlIsHttpHttpsScheme (url) { assert('protocol' in url) // ensure it's a url object const protocol = url.protocol return protocol === 'http:' || protocol === 'https:' } /** * Fetch supports node >= 16.8.0, but Object.hasOwn was added in v16.9.0. */ const hasOwn = Object.hasOwn || ((dict, key) => Object.prototype.hasOwnProperty.call(dict, key)) module.exports = { isAborted, isCancelled, createDeferredPromise, ReadableStreamFrom, toUSVString, tryUpgradeRequestToAPotentiallyTrustworthyURL, coarsenedSharedCurrentTime, determineRequestsReferrer, makePolicyContainer, clonePolicyContainer, appendFetchMetadata, appendRequestOriginHeader, TAOCheck, corsCheck, crossOriginResourcePolicyCheck, createOpaqueTimingInfo, setRequestReferrerPolicyOnRedirect, isValidHTTPToken, requestBadPort, requestCurrentURL, responseURL, responseLocationURL, isBlobLike, isURLPotentiallyTrustworthy, isValidReasonPhrase, sameOrigin, normalizeMethod, serializeJavascriptValueToJSONString, makeIterator, isValidHeaderName, isValidHeaderValue, hasOwn, isErrorLike, fullyReadBody, bytesMatch, isReadableStreamLike, readableStreamClose, isomorphicEncode, isomorphicDecode, urlIsLocal, urlHasHttpsScheme, urlIsHttpHttpsScheme, readAllBytes, normalizeMethodRecord, parseMetadata } /***/ }), /***/ 74222: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { types } = __nccwpck_require__(39023) const { hasOwn, toUSVString } = __nccwpck_require__(15523) /** @type {import('../../types/webidl').Webidl} */ const webidl = {} webidl.converters = {} webidl.util = {} webidl.errors = {} webidl.errors.exception = function (message) { return new TypeError(`${message.header}: ${message.message}`) } webidl.errors.conversionFailed = function (context) { const plural = context.types.length === 1 ? '' : ' one of' const message = `${context.argument} could not be converted to` + `${plural}: ${context.types.join(', ')}.` return webidl.errors.exception({ header: context.prefix, message }) } webidl.errors.invalidArgument = function (context) { return webidl.errors.exception({ header: context.prefix, message: `"${context.value}" is an invalid ${context.type}.` }) } // https://webidl.spec.whatwg.org/#implements webidl.brandCheck = function (V, I, opts = undefined) { if (opts?.strict !== false && !(V instanceof I)) { throw new TypeError('Illegal invocation') } else { return V?.[Symbol.toStringTag] === I.prototype[Symbol.toStringTag] } } webidl.argumentLengthCheck = function ({ length }, min, ctx) { if (length < min) { throw webidl.errors.exception({ message: `${min} argument${min !== 1 ? 's' : ''} required, ` + `but${length ? ' only' : ''} ${length} found.`, ...ctx }) } } webidl.illegalConstructor = function () { throw webidl.errors.exception({ header: 'TypeError', message: 'Illegal constructor' }) } // https://tc39.es/ecma262/#sec-ecmascript-data-types-and-values webidl.util.Type = function (V) { switch (typeof V) { case 'undefined': return 'Undefined' case 'boolean': return 'Boolean' case 'string': return 'String' case 'symbol': return 'Symbol' case 'number': return 'Number' case 'bigint': return 'BigInt' case 'function': case 'object': { if (V === null) { return 'Null' } return 'Object' } } } // https://webidl.spec.whatwg.org/#abstract-opdef-converttoint webidl.util.ConvertToInt = function (V, bitLength, signedness, opts = {}) { let upperBound let lowerBound // 1. If bitLength is 64, then: if (bitLength === 64) { // 1. Let upperBound be 2^53 − 1. upperBound = Math.pow(2, 53) - 1 // 2. If signedness is "unsigned", then let lowerBound be 0. if (signedness === 'unsigned') { lowerBound = 0 } else { // 3. Otherwise let lowerBound be −2^53 + 1. lowerBound = Math.pow(-2, 53) + 1 } } else if (signedness === 'unsigned') { // 2. Otherwise, if signedness is "unsigned", then: // 1. Let lowerBound be 0. lowerBound = 0 // 2. Let upperBound be 2^bitLength − 1. upperBound = Math.pow(2, bitLength) - 1 } else { // 3. Otherwise: // 1. Let lowerBound be -2^bitLength − 1. lowerBound = Math.pow(-2, bitLength) - 1 // 2. Let upperBound be 2^bitLength − 1 − 1. upperBound = Math.pow(2, bitLength - 1) - 1 } // 4. Let x be ? ToNumber(V). let x = Number(V) // 5. If x is −0, then set x to +0. if (x === 0) { x = 0 } // 6. If the conversion is to an IDL type associated // with the [EnforceRange] extended attribute, then: if (opts.enforceRange === true) { // 1. If x is NaN, +∞, or −∞, then throw a TypeError. if ( Number.isNaN(x) || x === Number.POSITIVE_INFINITY || x === Number.NEGATIVE_INFINITY ) { throw webidl.errors.exception({ header: 'Integer conversion', message: `Could not convert ${V} to an integer.` }) } // 2. Set x to IntegerPart(x). x = webidl.util.IntegerPart(x) // 3. If x < lowerBound or x > upperBound, then // throw a TypeError. if (x < lowerBound || x > upperBound) { throw webidl.errors.exception({ header: 'Integer conversion', message: `Value must be between ${lowerBound}-${upperBound}, got ${x}.` }) } // 4. Return x. return x } // 7. If x is not NaN and the conversion is to an IDL // type associated with the [Clamp] extended // attribute, then: if (!Number.isNaN(x) && opts.clamp === true) { // 1. Set x to min(max(x, lowerBound), upperBound). x = Math.min(Math.max(x, lowerBound), upperBound) // 2. Round x to the nearest integer, choosing the // even integer if it lies halfway between two, // and choosing +0 rather than −0. if (Math.floor(x) % 2 === 0) { x = Math.floor(x) } else { x = Math.ceil(x) } // 3. Return x. return x } // 8. If x is NaN, +0, +∞, or −∞, then return +0. if ( Number.isNaN(x) || (x === 0 && Object.is(0, x)) || x === Number.POSITIVE_INFINITY || x === Number.NEGATIVE_INFINITY ) { return 0 } // 9. Set x to IntegerPart(x). x = webidl.util.IntegerPart(x) // 10. Set x to x modulo 2^bitLength. x = x % Math.pow(2, bitLength) // 11. If signedness is "signed" and x ≥ 2^bitLength − 1, // then return x − 2^bitLength. if (signedness === 'signed' && x >= Math.pow(2, bitLength) - 1) { return x - Math.pow(2, bitLength) } // 12. Otherwise, return x. return x } // https://webidl.spec.whatwg.org/#abstract-opdef-integerpart webidl.util.IntegerPart = function (n) { // 1. Let r be floor(abs(n)). const r = Math.floor(Math.abs(n)) // 2. If n < 0, then return -1 × r. if (n < 0) { return -1 * r } // 3. Otherwise, return r. return r } // https://webidl.spec.whatwg.org/#es-sequence webidl.sequenceConverter = function (converter) { return (V) => { // 1. If Type(V) is not Object, throw a TypeError. if (webidl.util.Type(V) !== 'Object') { throw webidl.errors.exception({ header: 'Sequence', message: `Value of type ${webidl.util.Type(V)} is not an Object.` }) } // 2. Let method be ? GetMethod(V, @@iterator). /** @type {Generator} */ const method = V?.[Symbol.iterator]?.() const seq = [] // 3. If method is undefined, throw a TypeError. if ( method === undefined || typeof method.next !== 'function' ) { throw webidl.errors.exception({ header: 'Sequence', message: 'Object is not an iterator.' }) } // https://webidl.spec.whatwg.org/#create-sequence-from-iterable while (true) { const { done, value } = method.next() if (done) { break } seq.push(converter(value)) } return seq } } // https://webidl.spec.whatwg.org/#es-to-record webidl.recordConverter = function (keyConverter, valueConverter) { return (O) => { // 1. If Type(O) is not Object, throw a TypeError. if (webidl.util.Type(O) !== 'Object') { throw webidl.errors.exception({ header: 'Record', message: `Value of type ${webidl.util.Type(O)} is not an Object.` }) } // 2. Let result be a new empty instance of record. const result = {} if (!types.isProxy(O)) { // Object.keys only returns enumerable properties const keys = Object.keys(O) for (const key of keys) { // 1. Let typedKey be key converted to an IDL value of type K. const typedKey = keyConverter(key) // 2. Let value be ? Get(O, key). // 3. Let typedValue be value converted to an IDL value of type V. const typedValue = valueConverter(O[key]) // 4. Set result[typedKey] to typedValue. result[typedKey] = typedValue } // 5. Return result. return result } // 3. Let keys be ? O.[[OwnPropertyKeys]](). const keys = Reflect.ownKeys(O) // 4. For each key of keys. for (const key of keys) { // 1. Let desc be ? O.[[GetOwnProperty]](key). const desc = Reflect.getOwnPropertyDescriptor(O, key) // 2. If desc is not undefined and desc.[[Enumerable]] is true: if (desc?.enumerable) { // 1. Let typedKey be key converted to an IDL value of type K. const typedKey = keyConverter(key) // 2. Let value be ? Get(O, key). // 3. Let typedValue be value converted to an IDL value of type V. const typedValue = valueConverter(O[key]) // 4. Set result[typedKey] to typedValue. result[typedKey] = typedValue } } // 5. Return result. return result } } webidl.interfaceConverter = function (i) { return (V, opts = {}) => { if (opts.strict !== false && !(V instanceof i)) { throw webidl.errors.exception({ header: i.name, message: `Expected ${V} to be an instance of ${i.name}.` }) } return V } } webidl.dictionaryConverter = function (converters) { return (dictionary) => { const type = webidl.util.Type(dictionary) const dict = {} if (type === 'Null' || type === 'Undefined') { return dict } else if (type !== 'Object') { throw webidl.errors.exception({ header: 'Dictionary', message: `Expected ${dictionary} to be one of: Null, Undefined, Object.` }) } for (const options of converters) { const { key, defaultValue, required, converter } = options if (required === true) { if (!hasOwn(dictionary, key)) { throw webidl.errors.exception({ header: 'Dictionary', message: `Missing required key "${key}".` }) } } let value = dictionary[key] const hasDefault = hasOwn(options, 'defaultValue') // Only use defaultValue if value is undefined and // a defaultValue options was provided. if (hasDefault && value !== null) { value = value ?? defaultValue } // A key can be optional and have no default value. // When this happens, do not perform a conversion, // and do not assign the key a value. if (required || hasDefault || value !== undefined) { value = converter(value) if ( options.allowedValues && !options.allowedValues.includes(value) ) { throw webidl.errors.exception({ header: 'Dictionary', message: `${value} is not an accepted type. Expected one of ${options.allowedValues.join(', ')}.` }) } dict[key] = value } } return dict } } webidl.nullableConverter = function (converter) { return (V) => { if (V === null) { return V } return converter(V) } } // https://webidl.spec.whatwg.org/#es-DOMString webidl.converters.DOMString = function (V, opts = {}) { // 1. If V is null and the conversion is to an IDL type // associated with the [LegacyNullToEmptyString] // extended attribute, then return the DOMString value // that represents the empty string. if (V === null && opts.legacyNullToEmptyString) { return '' } // 2. Let x be ? ToString(V). if (typeof V === 'symbol') { throw new TypeError('Could not convert argument of type symbol to string.') } // 3. Return the IDL DOMString value that represents the // same sequence of code units as the one the // ECMAScript String value x represents. return String(V) } // https://webidl.spec.whatwg.org/#es-ByteString webidl.converters.ByteString = function (V) { // 1. Let x be ? ToString(V). // Note: DOMString converter perform ? ToString(V) const x = webidl.converters.DOMString(V) // 2. If the value of any element of x is greater than // 255, then throw a TypeError. for (let index = 0; index < x.length; index++) { if (x.charCodeAt(index) > 255) { throw new TypeError( 'Cannot convert argument to a ByteString because the character at ' + `index ${index} has a value of ${x.charCodeAt(index)} which is greater than 255.` ) } } // 3. Return an IDL ByteString value whose length is the // length of x, and where the value of each element is // the value of the corresponding element of x. return x } // https://webidl.spec.whatwg.org/#es-USVString webidl.converters.USVString = toUSVString // https://webidl.spec.whatwg.org/#es-boolean webidl.converters.boolean = function (V) { // 1. Let x be the result of computing ToBoolean(V). const x = Boolean(V) // 2. Return the IDL boolean value that is the one that represents // the same truth value as the ECMAScript Boolean value x. return x } // https://webidl.spec.whatwg.org/#es-any webidl.converters.any = function (V) { return V } // https://webidl.spec.whatwg.org/#es-long-long webidl.converters['long long'] = function (V) { // 1. Let x be ? ConvertToInt(V, 64, "signed"). const x = webidl.util.ConvertToInt(V, 64, 'signed') // 2. Return the IDL long long value that represents // the same numeric value as x. return x } // https://webidl.spec.whatwg.org/#es-unsigned-long-long webidl.converters['unsigned long long'] = function (V) { // 1. Let x be ? ConvertToInt(V, 64, "unsigned"). const x = webidl.util.ConvertToInt(V, 64, 'unsigned') // 2. Return the IDL unsigned long long value that // represents the same numeric value as x. return x } // https://webidl.spec.whatwg.org/#es-unsigned-long webidl.converters['unsigned long'] = function (V) { // 1. Let x be ? ConvertToInt(V, 32, "unsigned"). const x = webidl.util.ConvertToInt(V, 32, 'unsigned') // 2. Return the IDL unsigned long value that // represents the same numeric value as x. return x } // https://webidl.spec.whatwg.org/#es-unsigned-short webidl.converters['unsigned short'] = function (V, opts) { // 1. Let x be ? ConvertToInt(V, 16, "unsigned"). const x = webidl.util.ConvertToInt(V, 16, 'unsigned', opts) // 2. Return the IDL unsigned short value that represents // the same numeric value as x. return x } // https://webidl.spec.whatwg.org/#idl-ArrayBuffer webidl.converters.ArrayBuffer = function (V, opts = {}) { // 1. If Type(V) is not Object, or V does not have an // [[ArrayBufferData]] internal slot, then throw a // TypeError. // see: https://tc39.es/ecma262/#sec-properties-of-the-arraybuffer-instances // see: https://tc39.es/ecma262/#sec-properties-of-the-sharedarraybuffer-instances if ( webidl.util.Type(V) !== 'Object' || !types.isAnyArrayBuffer(V) ) { throw webidl.errors.conversionFailed({ prefix: `${V}`, argument: `${V}`, types: ['ArrayBuffer'] }) } // 2. If the conversion is not to an IDL type associated // with the [AllowShared] extended attribute, and // IsSharedArrayBuffer(V) is true, then throw a // TypeError. if (opts.allowShared === false && types.isSharedArrayBuffer(V)) { throw webidl.errors.exception({ header: 'ArrayBuffer', message: 'SharedArrayBuffer is not allowed.' }) } // 3. If the conversion is not to an IDL type associated // with the [AllowResizable] extended attribute, and // IsResizableArrayBuffer(V) is true, then throw a // TypeError. // Note: resizable ArrayBuffers are currently a proposal. // 4. Return the IDL ArrayBuffer value that is a // reference to the same object as V. return V } webidl.converters.TypedArray = function (V, T, opts = {}) { // 1. Let T be the IDL type V is being converted to. // 2. If Type(V) is not Object, or V does not have a // [[TypedArrayName]] internal slot with a value // equal to T’s name, then throw a TypeError. if ( webidl.util.Type(V) !== 'Object' || !types.isTypedArray(V) || V.constructor.name !== T.name ) { throw webidl.errors.conversionFailed({ prefix: `${T.name}`, argument: `${V}`, types: [T.name] }) } // 3. If the conversion is not to an IDL type associated // with the [AllowShared] extended attribute, and // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is // true, then throw a TypeError. if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { throw webidl.errors.exception({ header: 'ArrayBuffer', message: 'SharedArrayBuffer is not allowed.' }) } // 4. If the conversion is not to an IDL type associated // with the [AllowResizable] extended attribute, and // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is // true, then throw a TypeError. // Note: resizable array buffers are currently a proposal // 5. Return the IDL value of type T that is a reference // to the same object as V. return V } webidl.converters.DataView = function (V, opts = {}) { // 1. If Type(V) is not Object, or V does not have a // [[DataView]] internal slot, then throw a TypeError. if (webidl.util.Type(V) !== 'Object' || !types.isDataView(V)) { throw webidl.errors.exception({ header: 'DataView', message: 'Object is not a DataView.' }) } // 2. If the conversion is not to an IDL type associated // with the [AllowShared] extended attribute, and // IsSharedArrayBuffer(V.[[ViewedArrayBuffer]]) is true, // then throw a TypeError. if (opts.allowShared === false && types.isSharedArrayBuffer(V.buffer)) { throw webidl.errors.exception({ header: 'ArrayBuffer', message: 'SharedArrayBuffer is not allowed.' }) } // 3. If the conversion is not to an IDL type associated // with the [AllowResizable] extended attribute, and // IsResizableArrayBuffer(V.[[ViewedArrayBuffer]]) is // true, then throw a TypeError. // Note: resizable ArrayBuffers are currently a proposal // 4. Return the IDL DataView value that is a reference // to the same object as V. return V } // https://webidl.spec.whatwg.org/#BufferSource webidl.converters.BufferSource = function (V, opts = {}) { if (types.isAnyArrayBuffer(V)) { return webidl.converters.ArrayBuffer(V, opts) } if (types.isTypedArray(V)) { return webidl.converters.TypedArray(V, V.constructor) } if (types.isDataView(V)) { return webidl.converters.DataView(V, opts) } throw new TypeError(`Could not convert ${V} to a BufferSource.`) } webidl.converters['sequence'] = webidl.sequenceConverter( webidl.converters.ByteString ) webidl.converters['sequence>'] = webidl.sequenceConverter( webidl.converters['sequence'] ) webidl.converters['record'] = webidl.recordConverter( webidl.converters.ByteString, webidl.converters.ByteString ) module.exports = { webidl } /***/ }), /***/ 40396: /***/ ((module) => { "use strict"; /** * @see https://encoding.spec.whatwg.org/#concept-encoding-get * @param {string|undefined} label */ function getEncoding (label) { if (!label) { return 'failure' } // 1. Remove any leading and trailing ASCII whitespace from label. // 2. If label is an ASCII case-insensitive match for any of the // labels listed in the table below, then return the // corresponding encoding; otherwise return failure. switch (label.trim().toLowerCase()) { case 'unicode-1-1-utf-8': case 'unicode11utf8': case 'unicode20utf8': case 'utf-8': case 'utf8': case 'x-unicode20utf8': return 'UTF-8' case '866': case 'cp866': case 'csibm866': case 'ibm866': return 'IBM866' case 'csisolatin2': case 'iso-8859-2': case 'iso-ir-101': case 'iso8859-2': case 'iso88592': case 'iso_8859-2': case 'iso_8859-2:1987': case 'l2': case 'latin2': return 'ISO-8859-2' case 'csisolatin3': case 'iso-8859-3': case 'iso-ir-109': case 'iso8859-3': case 'iso88593': case 'iso_8859-3': case 'iso_8859-3:1988': case 'l3': case 'latin3': return 'ISO-8859-3' case 'csisolatin4': case 'iso-8859-4': case 'iso-ir-110': case 'iso8859-4': case 'iso88594': case 'iso_8859-4': case 'iso_8859-4:1988': case 'l4': case 'latin4': return 'ISO-8859-4' case 'csisolatincyrillic': case 'cyrillic': case 'iso-8859-5': case 'iso-ir-144': case 'iso8859-5': case 'iso88595': case 'iso_8859-5': case 'iso_8859-5:1988': return 'ISO-8859-5' case 'arabic': case 'asmo-708': case 'csiso88596e': case 'csiso88596i': case 'csisolatinarabic': case 'ecma-114': case 'iso-8859-6': case 'iso-8859-6-e': case 'iso-8859-6-i': case 'iso-ir-127': case 'iso8859-6': case 'iso88596': case 'iso_8859-6': case 'iso_8859-6:1987': return 'ISO-8859-6' case 'csisolatingreek': case 'ecma-118': case 'elot_928': case 'greek': case 'greek8': case 'iso-8859-7': case 'iso-ir-126': case 'iso8859-7': case 'iso88597': case 'iso_8859-7': case 'iso_8859-7:1987': case 'sun_eu_greek': return 'ISO-8859-7' case 'csiso88598e': case 'csisolatinhebrew': case 'hebrew': case 'iso-8859-8': case 'iso-8859-8-e': case 'iso-ir-138': case 'iso8859-8': case 'iso88598': case 'iso_8859-8': case 'iso_8859-8:1988': case 'visual': return 'ISO-8859-8' case 'csiso88598i': case 'iso-8859-8-i': case 'logical': return 'ISO-8859-8-I' case 'csisolatin6': case 'iso-8859-10': case 'iso-ir-157': case 'iso8859-10': case 'iso885910': case 'l6': case 'latin6': return 'ISO-8859-10' case 'iso-8859-13': case 'iso8859-13': case 'iso885913': return 'ISO-8859-13' case 'iso-8859-14': case 'iso8859-14': case 'iso885914': return 'ISO-8859-14' case 'csisolatin9': case 'iso-8859-15': case 'iso8859-15': case 'iso885915': case 'iso_8859-15': case 'l9': return 'ISO-8859-15' case 'iso-8859-16': return 'ISO-8859-16' case 'cskoi8r': case 'koi': case 'koi8': case 'koi8-r': case 'koi8_r': return 'KOI8-R' case 'koi8-ru': case 'koi8-u': return 'KOI8-U' case 'csmacintosh': case 'mac': case 'macintosh': case 'x-mac-roman': return 'macintosh' case 'iso-8859-11': case 'iso8859-11': case 'iso885911': case 'tis-620': case 'windows-874': return 'windows-874' case 'cp1250': case 'windows-1250': case 'x-cp1250': return 'windows-1250' case 'cp1251': case 'windows-1251': case 'x-cp1251': return 'windows-1251' case 'ansi_x3.4-1968': case 'ascii': case 'cp1252': case 'cp819': case 'csisolatin1': case 'ibm819': case 'iso-8859-1': case 'iso-ir-100': case 'iso8859-1': case 'iso88591': case 'iso_8859-1': case 'iso_8859-1:1987': case 'l1': case 'latin1': case 'us-ascii': case 'windows-1252': case 'x-cp1252': return 'windows-1252' case 'cp1253': case 'windows-1253': case 'x-cp1253': return 'windows-1253' case 'cp1254': case 'csisolatin5': case 'iso-8859-9': case 'iso-ir-148': case 'iso8859-9': case 'iso88599': case 'iso_8859-9': case 'iso_8859-9:1989': case 'l5': case 'latin5': case 'windows-1254': case 'x-cp1254': return 'windows-1254' case 'cp1255': case 'windows-1255': case 'x-cp1255': return 'windows-1255' case 'cp1256': case 'windows-1256': case 'x-cp1256': return 'windows-1256' case 'cp1257': case 'windows-1257': case 'x-cp1257': return 'windows-1257' case 'cp1258': case 'windows-1258': case 'x-cp1258': return 'windows-1258' case 'x-mac-cyrillic': case 'x-mac-ukrainian': return 'x-mac-cyrillic' case 'chinese': case 'csgb2312': case 'csiso58gb231280': case 'gb2312': case 'gb_2312': case 'gb_2312-80': case 'gbk': case 'iso-ir-58': case 'x-gbk': return 'GBK' case 'gb18030': return 'gb18030' case 'big5': case 'big5-hkscs': case 'cn-big5': case 'csbig5': case 'x-x-big5': return 'Big5' case 'cseucpkdfmtjapanese': case 'euc-jp': case 'x-euc-jp': return 'EUC-JP' case 'csiso2022jp': case 'iso-2022-jp': return 'ISO-2022-JP' case 'csshiftjis': case 'ms932': case 'ms_kanji': case 'shift-jis': case 'shift_jis': case 'sjis': case 'windows-31j': case 'x-sjis': return 'Shift_JIS' case 'cseuckr': case 'csksc56011987': case 'euc-kr': case 'iso-ir-149': case 'korean': case 'ks_c_5601-1987': case 'ks_c_5601-1989': case 'ksc5601': case 'ksc_5601': case 'windows-949': return 'EUC-KR' case 'csiso2022kr': case 'hz-gb-2312': case 'iso-2022-cn': case 'iso-2022-cn-ext': case 'iso-2022-kr': case 'replacement': return 'replacement' case 'unicodefffe': case 'utf-16be': return 'UTF-16BE' case 'csunicode': case 'iso-10646-ucs-2': case 'ucs-2': case 'unicode': case 'unicodefeff': case 'utf-16': case 'utf-16le': return 'UTF-16LE' case 'x-user-defined': return 'x-user-defined' default: return 'failure' } } module.exports = { getEncoding } /***/ }), /***/ 82160: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { staticPropertyDescriptors, readOperation, fireAProgressEvent } = __nccwpck_require__(10165) const { kState, kError, kResult, kEvents, kAborted } = __nccwpck_require__(86812) const { webidl } = __nccwpck_require__(74222) const { kEnumerableProperty } = __nccwpck_require__(3440) class FileReader extends EventTarget { constructor () { super() this[kState] = 'empty' this[kResult] = null this[kError] = null this[kEvents] = { loadend: null, error: null, abort: null, load: null, progress: null, loadstart: null } } /** * @see https://w3c.github.io/FileAPI/#dfn-readAsArrayBuffer * @param {import('buffer').Blob} blob */ readAsArrayBuffer (blob) { webidl.brandCheck(this, FileReader) webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsArrayBuffer' }) blob = webidl.converters.Blob(blob, { strict: false }) // The readAsArrayBuffer(blob) method, when invoked, // must initiate a read operation for blob with ArrayBuffer. readOperation(this, blob, 'ArrayBuffer') } /** * @see https://w3c.github.io/FileAPI/#readAsBinaryString * @param {import('buffer').Blob} blob */ readAsBinaryString (blob) { webidl.brandCheck(this, FileReader) webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsBinaryString' }) blob = webidl.converters.Blob(blob, { strict: false }) // The readAsBinaryString(blob) method, when invoked, // must initiate a read operation for blob with BinaryString. readOperation(this, blob, 'BinaryString') } /** * @see https://w3c.github.io/FileAPI/#readAsDataText * @param {import('buffer').Blob} blob * @param {string?} encoding */ readAsText (blob, encoding = undefined) { webidl.brandCheck(this, FileReader) webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsText' }) blob = webidl.converters.Blob(blob, { strict: false }) if (encoding !== undefined) { encoding = webidl.converters.DOMString(encoding) } // The readAsText(blob, encoding) method, when invoked, // must initiate a read operation for blob with Text and encoding. readOperation(this, blob, 'Text', encoding) } /** * @see https://w3c.github.io/FileAPI/#dfn-readAsDataURL * @param {import('buffer').Blob} blob */ readAsDataURL (blob) { webidl.brandCheck(this, FileReader) webidl.argumentLengthCheck(arguments, 1, { header: 'FileReader.readAsDataURL' }) blob = webidl.converters.Blob(blob, { strict: false }) // The readAsDataURL(blob) method, when invoked, must // initiate a read operation for blob with DataURL. readOperation(this, blob, 'DataURL') } /** * @see https://w3c.github.io/FileAPI/#dfn-abort */ abort () { // 1. If this's state is "empty" or if this's state is // "done" set this's result to null and terminate // this algorithm. if (this[kState] === 'empty' || this[kState] === 'done') { this[kResult] = null return } // 2. If this's state is "loading" set this's state to // "done" and set this's result to null. if (this[kState] === 'loading') { this[kState] = 'done' this[kResult] = null } // 3. If there are any tasks from this on the file reading // task source in an affiliated task queue, then remove // those tasks from that task queue. this[kAborted] = true // 4. Terminate the algorithm for the read method being processed. // TODO // 5. Fire a progress event called abort at this. fireAProgressEvent('abort', this) // 6. If this's state is not "loading", fire a progress // event called loadend at this. if (this[kState] !== 'loading') { fireAProgressEvent('loadend', this) } } /** * @see https://w3c.github.io/FileAPI/#dom-filereader-readystate */ get readyState () { webidl.brandCheck(this, FileReader) switch (this[kState]) { case 'empty': return this.EMPTY case 'loading': return this.LOADING case 'done': return this.DONE } } /** * @see https://w3c.github.io/FileAPI/#dom-filereader-result */ get result () { webidl.brandCheck(this, FileReader) // The result attribute’s getter, when invoked, must return // this's result. return this[kResult] } /** * @see https://w3c.github.io/FileAPI/#dom-filereader-error */ get error () { webidl.brandCheck(this, FileReader) // The error attribute’s getter, when invoked, must return // this's error. return this[kError] } get onloadend () { webidl.brandCheck(this, FileReader) return this[kEvents].loadend } set onloadend (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].loadend) { this.removeEventListener('loadend', this[kEvents].loadend) } if (typeof fn === 'function') { this[kEvents].loadend = fn this.addEventListener('loadend', fn) } else { this[kEvents].loadend = null } } get onerror () { webidl.brandCheck(this, FileReader) return this[kEvents].error } set onerror (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].error) { this.removeEventListener('error', this[kEvents].error) } if (typeof fn === 'function') { this[kEvents].error = fn this.addEventListener('error', fn) } else { this[kEvents].error = null } } get onloadstart () { webidl.brandCheck(this, FileReader) return this[kEvents].loadstart } set onloadstart (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].loadstart) { this.removeEventListener('loadstart', this[kEvents].loadstart) } if (typeof fn === 'function') { this[kEvents].loadstart = fn this.addEventListener('loadstart', fn) } else { this[kEvents].loadstart = null } } get onprogress () { webidl.brandCheck(this, FileReader) return this[kEvents].progress } set onprogress (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].progress) { this.removeEventListener('progress', this[kEvents].progress) } if (typeof fn === 'function') { this[kEvents].progress = fn this.addEventListener('progress', fn) } else { this[kEvents].progress = null } } get onload () { webidl.brandCheck(this, FileReader) return this[kEvents].load } set onload (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].load) { this.removeEventListener('load', this[kEvents].load) } if (typeof fn === 'function') { this[kEvents].load = fn this.addEventListener('load', fn) } else { this[kEvents].load = null } } get onabort () { webidl.brandCheck(this, FileReader) return this[kEvents].abort } set onabort (fn) { webidl.brandCheck(this, FileReader) if (this[kEvents].abort) { this.removeEventListener('abort', this[kEvents].abort) } if (typeof fn === 'function') { this[kEvents].abort = fn this.addEventListener('abort', fn) } else { this[kEvents].abort = null } } } // https://w3c.github.io/FileAPI/#dom-filereader-empty FileReader.EMPTY = FileReader.prototype.EMPTY = 0 // https://w3c.github.io/FileAPI/#dom-filereader-loading FileReader.LOADING = FileReader.prototype.LOADING = 1 // https://w3c.github.io/FileAPI/#dom-filereader-done FileReader.DONE = FileReader.prototype.DONE = 2 Object.defineProperties(FileReader.prototype, { EMPTY: staticPropertyDescriptors, LOADING: staticPropertyDescriptors, DONE: staticPropertyDescriptors, readAsArrayBuffer: kEnumerableProperty, readAsBinaryString: kEnumerableProperty, readAsText: kEnumerableProperty, readAsDataURL: kEnumerableProperty, abort: kEnumerableProperty, readyState: kEnumerableProperty, result: kEnumerableProperty, error: kEnumerableProperty, onloadstart: kEnumerableProperty, onprogress: kEnumerableProperty, onload: kEnumerableProperty, onabort: kEnumerableProperty, onerror: kEnumerableProperty, onloadend: kEnumerableProperty, [Symbol.toStringTag]: { value: 'FileReader', writable: false, enumerable: false, configurable: true } }) Object.defineProperties(FileReader, { EMPTY: staticPropertyDescriptors, LOADING: staticPropertyDescriptors, DONE: staticPropertyDescriptors }) module.exports = { FileReader } /***/ }), /***/ 15976: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { webidl } = __nccwpck_require__(74222) const kState = Symbol('ProgressEvent state') /** * @see https://xhr.spec.whatwg.org/#progressevent */ class ProgressEvent extends Event { constructor (type, eventInitDict = {}) { type = webidl.converters.DOMString(type) eventInitDict = webidl.converters.ProgressEventInit(eventInitDict ?? {}) super(type, eventInitDict) this[kState] = { lengthComputable: eventInitDict.lengthComputable, loaded: eventInitDict.loaded, total: eventInitDict.total } } get lengthComputable () { webidl.brandCheck(this, ProgressEvent) return this[kState].lengthComputable } get loaded () { webidl.brandCheck(this, ProgressEvent) return this[kState].loaded } get total () { webidl.brandCheck(this, ProgressEvent) return this[kState].total } } webidl.converters.ProgressEventInit = webidl.dictionaryConverter([ { key: 'lengthComputable', converter: webidl.converters.boolean, defaultValue: false }, { key: 'loaded', converter: webidl.converters['unsigned long long'], defaultValue: 0 }, { key: 'total', converter: webidl.converters['unsigned long long'], defaultValue: 0 }, { key: 'bubbles', converter: webidl.converters.boolean, defaultValue: false }, { key: 'cancelable', converter: webidl.converters.boolean, defaultValue: false }, { key: 'composed', converter: webidl.converters.boolean, defaultValue: false } ]) module.exports = { ProgressEvent } /***/ }), /***/ 86812: /***/ ((module) => { "use strict"; module.exports = { kState: Symbol('FileReader state'), kResult: Symbol('FileReader result'), kError: Symbol('FileReader error'), kLastProgressEventFired: Symbol('FileReader last progress event fired timestamp'), kEvents: Symbol('FileReader events'), kAborted: Symbol('FileReader aborted') } /***/ }), /***/ 10165: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kState, kError, kResult, kAborted, kLastProgressEventFired } = __nccwpck_require__(86812) const { ProgressEvent } = __nccwpck_require__(15976) const { getEncoding } = __nccwpck_require__(40396) const { DOMException } = __nccwpck_require__(87326) const { serializeAMimeType, parseMIMEType } = __nccwpck_require__(94322) const { types } = __nccwpck_require__(39023) const { StringDecoder } = __nccwpck_require__(13193) const { btoa } = __nccwpck_require__(20181) /** @type {PropertyDescriptor} */ const staticPropertyDescriptors = { enumerable: true, writable: false, configurable: false } /** * @see https://w3c.github.io/FileAPI/#readOperation * @param {import('./filereader').FileReader} fr * @param {import('buffer').Blob} blob * @param {string} type * @param {string?} encodingName */ function readOperation (fr, blob, type, encodingName) { // 1. If fr’s state is "loading", throw an InvalidStateError // DOMException. if (fr[kState] === 'loading') { throw new DOMException('Invalid state', 'InvalidStateError') } // 2. Set fr’s state to "loading". fr[kState] = 'loading' // 3. Set fr’s result to null. fr[kResult] = null // 4. Set fr’s error to null. fr[kError] = null // 5. Let stream be the result of calling get stream on blob. /** @type {import('stream/web').ReadableStream} */ const stream = blob.stream() // 6. Let reader be the result of getting a reader from stream. const reader = stream.getReader() // 7. Let bytes be an empty byte sequence. /** @type {Uint8Array[]} */ const bytes = [] // 8. Let chunkPromise be the result of reading a chunk from // stream with reader. let chunkPromise = reader.read() // 9. Let isFirstChunk be true. let isFirstChunk = true // 10. In parallel, while true: // Note: "In parallel" just means non-blocking // Note 2: readOperation itself cannot be async as double // reading the body would then reject the promise, instead // of throwing an error. ;(async () => { while (!fr[kAborted]) { // 1. Wait for chunkPromise to be fulfilled or rejected. try { const { done, value } = await chunkPromise // 2. If chunkPromise is fulfilled, and isFirstChunk is // true, queue a task to fire a progress event called // loadstart at fr. if (isFirstChunk && !fr[kAborted]) { queueMicrotask(() => { fireAProgressEvent('loadstart', fr) }) } // 3. Set isFirstChunk to false. isFirstChunk = false // 4. If chunkPromise is fulfilled with an object whose // done property is false and whose value property is // a Uint8Array object, run these steps: if (!done && types.isUint8Array(value)) { // 1. Let bs be the byte sequence represented by the // Uint8Array object. // 2. Append bs to bytes. bytes.push(value) // 3. If roughly 50ms have passed since these steps // were last invoked, queue a task to fire a // progress event called progress at fr. if ( ( fr[kLastProgressEventFired] === undefined || Date.now() - fr[kLastProgressEventFired] >= 50 ) && !fr[kAborted] ) { fr[kLastProgressEventFired] = Date.now() queueMicrotask(() => { fireAProgressEvent('progress', fr) }) } // 4. Set chunkPromise to the result of reading a // chunk from stream with reader. chunkPromise = reader.read() } else if (done) { // 5. Otherwise, if chunkPromise is fulfilled with an // object whose done property is true, queue a task // to run the following steps and abort this algorithm: queueMicrotask(() => { // 1. Set fr’s state to "done". fr[kState] = 'done' // 2. Let result be the result of package data given // bytes, type, blob’s type, and encodingName. try { const result = packageData(bytes, type, blob.type, encodingName) // 4. Else: if (fr[kAborted]) { return } // 1. Set fr’s result to result. fr[kResult] = result // 2. Fire a progress event called load at the fr. fireAProgressEvent('load', fr) } catch (error) { // 3. If package data threw an exception error: // 1. Set fr’s error to error. fr[kError] = error // 2. Fire a progress event called error at fr. fireAProgressEvent('error', fr) } // 5. If fr’s state is not "loading", fire a progress // event called loadend at the fr. if (fr[kState] !== 'loading') { fireAProgressEvent('loadend', fr) } }) break } } catch (error) { if (fr[kAborted]) { return } // 6. Otherwise, if chunkPromise is rejected with an // error error, queue a task to run the following // steps and abort this algorithm: queueMicrotask(() => { // 1. Set fr’s state to "done". fr[kState] = 'done' // 2. Set fr’s error to error. fr[kError] = error // 3. Fire a progress event called error at fr. fireAProgressEvent('error', fr) // 4. If fr’s state is not "loading", fire a progress // event called loadend at fr. if (fr[kState] !== 'loading') { fireAProgressEvent('loadend', fr) } }) break } } })() } /** * @see https://w3c.github.io/FileAPI/#fire-a-progress-event * @see https://dom.spec.whatwg.org/#concept-event-fire * @param {string} e The name of the event * @param {import('./filereader').FileReader} reader */ function fireAProgressEvent (e, reader) { // The progress event e does not bubble. e.bubbles must be false // The progress event e is NOT cancelable. e.cancelable must be false const event = new ProgressEvent(e, { bubbles: false, cancelable: false }) reader.dispatchEvent(event) } /** * @see https://w3c.github.io/FileAPI/#blob-package-data * @param {Uint8Array[]} bytes * @param {string} type * @param {string?} mimeType * @param {string?} encodingName */ function packageData (bytes, type, mimeType, encodingName) { // 1. A Blob has an associated package data algorithm, given // bytes, a type, a optional mimeType, and a optional // encodingName, which switches on type and runs the // associated steps: switch (type) { case 'DataURL': { // 1. Return bytes as a DataURL [RFC2397] subject to // the considerations below: // * Use mimeType as part of the Data URL if it is // available in keeping with the Data URL // specification [RFC2397]. // * If mimeType is not available return a Data URL // without a media-type. [RFC2397]. // https://datatracker.ietf.org/doc/html/rfc2397#section-3 // dataurl := "data:" [ mediatype ] [ ";base64" ] "," data // mediatype := [ type "/" subtype ] *( ";" parameter ) // data := *urlchar // parameter := attribute "=" value let dataURL = 'data:' const parsed = parseMIMEType(mimeType || 'application/octet-stream') if (parsed !== 'failure') { dataURL += serializeAMimeType(parsed) } dataURL += ';base64,' const decoder = new StringDecoder('latin1') for (const chunk of bytes) { dataURL += btoa(decoder.write(chunk)) } dataURL += btoa(decoder.end()) return dataURL } case 'Text': { // 1. Let encoding be failure let encoding = 'failure' // 2. If the encodingName is present, set encoding to the // result of getting an encoding from encodingName. if (encodingName) { encoding = getEncoding(encodingName) } // 3. If encoding is failure, and mimeType is present: if (encoding === 'failure' && mimeType) { // 1. Let type be the result of parse a MIME type // given mimeType. const type = parseMIMEType(mimeType) // 2. If type is not failure, set encoding to the result // of getting an encoding from type’s parameters["charset"]. if (type !== 'failure') { encoding = getEncoding(type.parameters.get('charset')) } } // 4. If encoding is failure, then set encoding to UTF-8. if (encoding === 'failure') { encoding = 'UTF-8' } // 5. Decode bytes using fallback encoding encoding, and // return the result. return decode(bytes, encoding) } case 'ArrayBuffer': { // Return a new ArrayBuffer whose contents are bytes. const sequence = combineByteSequences(bytes) return sequence.buffer } case 'BinaryString': { // Return bytes as a binary string, in which every byte // is represented by a code unit of equal value [0..255]. let binaryString = '' const decoder = new StringDecoder('latin1') for (const chunk of bytes) { binaryString += decoder.write(chunk) } binaryString += decoder.end() return binaryString } } } /** * @see https://encoding.spec.whatwg.org/#decode * @param {Uint8Array[]} ioQueue * @param {string} encoding */ function decode (ioQueue, encoding) { const bytes = combineByteSequences(ioQueue) // 1. Let BOMEncoding be the result of BOM sniffing ioQueue. const BOMEncoding = BOMSniffing(bytes) let slice = 0 // 2. If BOMEncoding is non-null: if (BOMEncoding !== null) { // 1. Set encoding to BOMEncoding. encoding = BOMEncoding // 2. Read three bytes from ioQueue, if BOMEncoding is // UTF-8; otherwise read two bytes. // (Do nothing with those bytes.) slice = BOMEncoding === 'UTF-8' ? 3 : 2 } // 3. Process a queue with an instance of encoding’s // decoder, ioQueue, output, and "replacement". // 4. Return output. const sliced = bytes.slice(slice) return new TextDecoder(encoding).decode(sliced) } /** * @see https://encoding.spec.whatwg.org/#bom-sniff * @param {Uint8Array} ioQueue */ function BOMSniffing (ioQueue) { // 1. Let BOM be the result of peeking 3 bytes from ioQueue, // converted to a byte sequence. const [a, b, c] = ioQueue // 2. For each of the rows in the table below, starting with // the first one and going down, if BOM starts with the // bytes given in the first column, then return the // encoding given in the cell in the second column of that // row. Otherwise, return null. if (a === 0xEF && b === 0xBB && c === 0xBF) { return 'UTF-8' } else if (a === 0xFE && b === 0xFF) { return 'UTF-16BE' } else if (a === 0xFF && b === 0xFE) { return 'UTF-16LE' } return null } /** * @param {Uint8Array[]} sequences */ function combineByteSequences (sequences) { const size = sequences.reduce((a, b) => { return a + b.byteLength }, 0) let offset = 0 return sequences.reduce((a, b) => { a.set(b, offset) offset += b.byteLength return a }, new Uint8Array(size)) } module.exports = { staticPropertyDescriptors, readOperation, fireAProgressEvent } /***/ }), /***/ 32581: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // We include a version number for the Dispatcher API. In case of breaking changes, // this version number must be increased to avoid conflicts. const globalDispatcher = Symbol.for('undici.globalDispatcher.1') const { InvalidArgumentError } = __nccwpck_require__(68707) const Agent = __nccwpck_require__(59965) if (getGlobalDispatcher() === undefined) { setGlobalDispatcher(new Agent()) } function setGlobalDispatcher (agent) { if (!agent || typeof agent.dispatch !== 'function') { throw new InvalidArgumentError('Argument agent must implement Agent') } Object.defineProperty(globalThis, globalDispatcher, { value: agent, writable: true, enumerable: false, configurable: false }) } function getGlobalDispatcher () { return globalThis[globalDispatcher] } module.exports = { setGlobalDispatcher, getGlobalDispatcher } /***/ }), /***/ 78840: /***/ ((module) => { "use strict"; module.exports = class DecoratorHandler { constructor (handler) { this.handler = handler } onConnect (...args) { return this.handler.onConnect(...args) } onError (...args) { return this.handler.onError(...args) } onUpgrade (...args) { return this.handler.onUpgrade(...args) } onHeaders (...args) { return this.handler.onHeaders(...args) } onData (...args) { return this.handler.onData(...args) } onComplete (...args) { return this.handler.onComplete(...args) } onBodySent (...args) { return this.handler.onBodySent(...args) } } /***/ }), /***/ 48299: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const util = __nccwpck_require__(3440) const { kBodyUsed } = __nccwpck_require__(36443) const assert = __nccwpck_require__(42613) const { InvalidArgumentError } = __nccwpck_require__(68707) const EE = __nccwpck_require__(24434) const redirectableStatusCodes = [300, 301, 302, 303, 307, 308] const kBody = Symbol('body') class BodyAsyncIterable { constructor (body) { this[kBody] = body this[kBodyUsed] = false } async * [Symbol.asyncIterator] () { assert(!this[kBodyUsed], 'disturbed') this[kBodyUsed] = true yield * this[kBody] } } class RedirectHandler { constructor (dispatch, maxRedirections, opts, handler) { if (maxRedirections != null && (!Number.isInteger(maxRedirections) || maxRedirections < 0)) { throw new InvalidArgumentError('maxRedirections must be a positive number') } util.validateHandler(handler, opts.method, opts.upgrade) this.dispatch = dispatch this.location = null this.abort = null this.opts = { ...opts, maxRedirections: 0 } // opts must be a copy this.maxRedirections = maxRedirections this.handler = handler this.history = [] if (util.isStream(this.opts.body)) { // TODO (fix): Provide some way for the user to cache the file to e.g. /tmp // so that it can be dispatched again? // TODO (fix): Do we need 100-expect support to provide a way to do this properly? if (util.bodyLength(this.opts.body) === 0) { this.opts.body .on('data', function () { assert(false) }) } if (typeof this.opts.body.readableDidRead !== 'boolean') { this.opts.body[kBodyUsed] = false EE.prototype.on.call(this.opts.body, 'data', function () { this[kBodyUsed] = true }) } } else if (this.opts.body && typeof this.opts.body.pipeTo === 'function') { // TODO (fix): We can't access ReadableStream internal state // to determine whether or not it has been disturbed. This is just // a workaround. this.opts.body = new BodyAsyncIterable(this.opts.body) } else if ( this.opts.body && typeof this.opts.body !== 'string' && !ArrayBuffer.isView(this.opts.body) && util.isIterable(this.opts.body) ) { // TODO: Should we allow re-using iterable if !this.opts.idempotent // or through some other flag? this.opts.body = new BodyAsyncIterable(this.opts.body) } } onConnect (abort) { this.abort = abort this.handler.onConnect(abort, { history: this.history }) } onUpgrade (statusCode, headers, socket) { this.handler.onUpgrade(statusCode, headers, socket) } onError (error) { this.handler.onError(error) } onHeaders (statusCode, headers, resume, statusText) { this.location = this.history.length >= this.maxRedirections || util.isDisturbed(this.opts.body) ? null : parseLocation(statusCode, headers) if (this.opts.origin) { this.history.push(new URL(this.opts.path, this.opts.origin)) } if (!this.location) { return this.handler.onHeaders(statusCode, headers, resume, statusText) } const { origin, pathname, search } = util.parseURL(new URL(this.location, this.opts.origin && new URL(this.opts.path, this.opts.origin))) const path = search ? `${pathname}${search}` : pathname // Remove headers referring to the original URL. // By default it is Host only, unless it's a 303 (see below), which removes also all Content-* headers. // https://tools.ietf.org/html/rfc7231#section-6.4 this.opts.headers = cleanRequestHeaders(this.opts.headers, statusCode === 303, this.opts.origin !== origin) this.opts.path = path this.opts.origin = origin this.opts.maxRedirections = 0 this.opts.query = null // https://tools.ietf.org/html/rfc7231#section-6.4.4 // In case of HTTP 303, always replace method to be either HEAD or GET if (statusCode === 303 && this.opts.method !== 'HEAD') { this.opts.method = 'GET' this.opts.body = null } } onData (chunk) { if (this.location) { /* https://tools.ietf.org/html/rfc7231#section-6.4 TLDR: undici always ignores 3xx response bodies. Redirection is used to serve the requested resource from another URL, so it is assumes that no body is generated (and thus can be ignored). Even though generating a body is not prohibited. For status 301, 302, 303, 307 and 308 (the latter from RFC 7238), the specs mention that the body usually (which means it's optional and not mandated) contain just an hyperlink to the value of the Location response header, so the body can be ignored safely. For status 300, which is "Multiple Choices", the spec mentions both generating a Location response header AND a response body with the other possible location to follow. Since the spec explicitily chooses not to specify a format for such body and leave it to servers and browsers implementors, we ignore the body as there is no specified way to eventually parse it. */ } else { return this.handler.onData(chunk) } } onComplete (trailers) { if (this.location) { /* https://tools.ietf.org/html/rfc7231#section-6.4 TLDR: undici always ignores 3xx response trailers as they are not expected in case of redirections and neither are useful if present. See comment on onData method above for more detailed informations. */ this.location = null this.abort = null this.dispatch(this.opts, this) } else { this.handler.onComplete(trailers) } } onBodySent (chunk) { if (this.handler.onBodySent) { this.handler.onBodySent(chunk) } } } function parseLocation (statusCode, headers) { if (redirectableStatusCodes.indexOf(statusCode) === -1) { return null } for (let i = 0; i < headers.length; i += 2) { if (headers[i].toString().toLowerCase() === 'location') { return headers[i + 1] } } } // https://tools.ietf.org/html/rfc7231#section-6.4.4 function shouldRemoveHeader (header, removeContent, unknownOrigin) { if (header.length === 4) { return util.headerNameToString(header) === 'host' } if (removeContent && util.headerNameToString(header).startsWith('content-')) { return true } if (unknownOrigin && (header.length === 13 || header.length === 6 || header.length === 19)) { const name = util.headerNameToString(header) return name === 'authorization' || name === 'cookie' || name === 'proxy-authorization' } return false } // https://tools.ietf.org/html/rfc7231#section-6.4 function cleanRequestHeaders (headers, removeContent, unknownOrigin) { const ret = [] if (Array.isArray(headers)) { for (let i = 0; i < headers.length; i += 2) { if (!shouldRemoveHeader(headers[i], removeContent, unknownOrigin)) { ret.push(headers[i], headers[i + 1]) } } } else if (headers && typeof headers === 'object') { for (const key of Object.keys(headers)) { if (!shouldRemoveHeader(key, removeContent, unknownOrigin)) { ret.push(key, headers[key]) } } } else { assert(headers == null, 'headers must be an object or an array') } return ret } module.exports = RedirectHandler /***/ }), /***/ 53573: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const assert = __nccwpck_require__(42613) const { kRetryHandlerDefaultRetry } = __nccwpck_require__(36443) const { RequestRetryError } = __nccwpck_require__(68707) const { isDisturbed, parseHeaders, parseRangeHeader } = __nccwpck_require__(3440) function calculateRetryAfterHeader (retryAfter) { const current = Date.now() const diff = new Date(retryAfter).getTime() - current return diff } class RetryHandler { constructor (opts, handlers) { const { retryOptions, ...dispatchOpts } = opts const { // Retry scoped retry: retryFn, maxRetries, maxTimeout, minTimeout, timeoutFactor, // Response scoped methods, errorCodes, retryAfter, statusCodes } = retryOptions ?? {} this.dispatch = handlers.dispatch this.handler = handlers.handler this.opts = dispatchOpts this.abort = null this.aborted = false this.retryOpts = { retry: retryFn ?? RetryHandler[kRetryHandlerDefaultRetry], retryAfter: retryAfter ?? true, maxTimeout: maxTimeout ?? 30 * 1000, // 30s, timeout: minTimeout ?? 500, // .5s timeoutFactor: timeoutFactor ?? 2, maxRetries: maxRetries ?? 5, // What errors we should retry methods: methods ?? ['GET', 'HEAD', 'OPTIONS', 'PUT', 'DELETE', 'TRACE'], // Indicates which errors to retry statusCodes: statusCodes ?? [500, 502, 503, 504, 429], // List of errors to retry errorCodes: errorCodes ?? [ 'ECONNRESET', 'ECONNREFUSED', 'ENOTFOUND', 'ENETDOWN', 'ENETUNREACH', 'EHOSTDOWN', 'EHOSTUNREACH', 'EPIPE' ] } this.retryCount = 0 this.start = 0 this.end = null this.etag = null this.resume = null // Handle possible onConnect duplication this.handler.onConnect(reason => { this.aborted = true if (this.abort) { this.abort(reason) } else { this.reason = reason } }) } onRequestSent () { if (this.handler.onRequestSent) { this.handler.onRequestSent() } } onUpgrade (statusCode, headers, socket) { if (this.handler.onUpgrade) { this.handler.onUpgrade(statusCode, headers, socket) } } onConnect (abort) { if (this.aborted) { abort(this.reason) } else { this.abort = abort } } onBodySent (chunk) { if (this.handler.onBodySent) return this.handler.onBodySent(chunk) } static [kRetryHandlerDefaultRetry] (err, { state, opts }, cb) { const { statusCode, code, headers } = err const { method, retryOptions } = opts const { maxRetries, timeout, maxTimeout, timeoutFactor, statusCodes, errorCodes, methods } = retryOptions let { counter, currentTimeout } = state currentTimeout = currentTimeout != null && currentTimeout > 0 ? currentTimeout : timeout // Any code that is not a Undici's originated and allowed to retry if ( code && code !== 'UND_ERR_REQ_RETRY' && code !== 'UND_ERR_SOCKET' && !errorCodes.includes(code) ) { cb(err) return } // If a set of method are provided and the current method is not in the list if (Array.isArray(methods) && !methods.includes(method)) { cb(err) return } // If a set of status code are provided and the current status code is not in the list if ( statusCode != null && Array.isArray(statusCodes) && !statusCodes.includes(statusCode) ) { cb(err) return } // If we reached the max number of retries if (counter > maxRetries) { cb(err) return } let retryAfterHeader = headers != null && headers['retry-after'] if (retryAfterHeader) { retryAfterHeader = Number(retryAfterHeader) retryAfterHeader = isNaN(retryAfterHeader) ? calculateRetryAfterHeader(retryAfterHeader) : retryAfterHeader * 1e3 // Retry-After is in seconds } const retryTimeout = retryAfterHeader > 0 ? Math.min(retryAfterHeader, maxTimeout) : Math.min(currentTimeout * timeoutFactor ** counter, maxTimeout) state.currentTimeout = retryTimeout setTimeout(() => cb(null), retryTimeout) } onHeaders (statusCode, rawHeaders, resume, statusMessage) { const headers = parseHeaders(rawHeaders) this.retryCount += 1 if (statusCode >= 300) { this.abort( new RequestRetryError('Request failed', statusCode, { headers, count: this.retryCount }) ) return false } // Checkpoint for resume from where we left it if (this.resume != null) { this.resume = null if (statusCode !== 206) { return true } const contentRange = parseRangeHeader(headers['content-range']) // If no content range if (!contentRange) { this.abort( new RequestRetryError('Content-Range mismatch', statusCode, { headers, count: this.retryCount }) ) return false } // Let's start with a weak etag check if (this.etag != null && this.etag !== headers.etag) { this.abort( new RequestRetryError('ETag mismatch', statusCode, { headers, count: this.retryCount }) ) return false } const { start, size, end = size } = contentRange assert(this.start === start, 'content-range mismatch') assert(this.end == null || this.end === end, 'content-range mismatch') this.resume = resume return true } if (this.end == null) { if (statusCode === 206) { // First time we receive 206 const range = parseRangeHeader(headers['content-range']) if (range == null) { return this.handler.onHeaders( statusCode, rawHeaders, resume, statusMessage ) } const { start, size, end = size } = range assert( start != null && Number.isFinite(start) && this.start !== start, 'content-range mismatch' ) assert(Number.isFinite(start)) assert( end != null && Number.isFinite(end) && this.end !== end, 'invalid content-length' ) this.start = start this.end = end } // We make our best to checkpoint the body for further range headers if (this.end == null) { const contentLength = headers['content-length'] this.end = contentLength != null ? Number(contentLength) : null } assert(Number.isFinite(this.start)) assert( this.end == null || Number.isFinite(this.end), 'invalid content-length' ) this.resume = resume this.etag = headers.etag != null ? headers.etag : null return this.handler.onHeaders( statusCode, rawHeaders, resume, statusMessage ) } const err = new RequestRetryError('Request failed', statusCode, { headers, count: this.retryCount }) this.abort(err) return false } onData (chunk) { this.start += chunk.length return this.handler.onData(chunk) } onComplete (rawTrailers) { this.retryCount = 0 return this.handler.onComplete(rawTrailers) } onError (err) { if (this.aborted || isDisturbed(this.opts.body)) { return this.handler.onError(err) } this.retryOpts.retry( err, { state: { counter: this.retryCount++, currentTimeout: this.retryAfter }, opts: { retryOptions: this.retryOpts, ...this.opts } }, onRetry.bind(this) ) function onRetry (err) { if (err != null || this.aborted || isDisturbed(this.opts.body)) { return this.handler.onError(err) } if (this.start !== 0) { this.opts = { ...this.opts, headers: { ...this.opts.headers, range: `bytes=${this.start}-${this.end ?? ''}` } } } try { this.dispatch(this.opts, this) } catch (err) { this.handler.onError(err) } } } } module.exports = RetryHandler /***/ }), /***/ 64415: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const RedirectHandler = __nccwpck_require__(48299) function createRedirectInterceptor ({ maxRedirections: defaultMaxRedirections }) { return (dispatch) => { return function Intercept (opts, handler) { const { maxRedirections = defaultMaxRedirections } = opts if (!maxRedirections) { return dispatch(opts, handler) } const redirectHandler = new RedirectHandler(dispatch, maxRedirections, opts, handler) opts = { ...opts, maxRedirections: 0 } // Stop sub dispatcher from also redirecting. return dispatch(opts, redirectHandler) } } } module.exports = createRedirectInterceptor /***/ }), /***/ 52824: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SPECIAL_HEADERS = exports.HEADER_STATE = exports.MINOR = exports.MAJOR = exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS = exports.TOKEN = exports.STRICT_TOKEN = exports.HEX = exports.URL_CHAR = exports.STRICT_URL_CHAR = exports.USERINFO_CHARS = exports.MARK = exports.ALPHANUM = exports.NUM = exports.HEX_MAP = exports.NUM_MAP = exports.ALPHA = exports.FINISH = exports.H_METHOD_MAP = exports.METHOD_MAP = exports.METHODS_RTSP = exports.METHODS_ICE = exports.METHODS_HTTP = exports.METHODS = exports.LENIENT_FLAGS = exports.FLAGS = exports.TYPE = exports.ERROR = void 0; const utils_1 = __nccwpck_require__(50172); // C headers var ERROR; (function (ERROR) { ERROR[ERROR["OK"] = 0] = "OK"; ERROR[ERROR["INTERNAL"] = 1] = "INTERNAL"; ERROR[ERROR["STRICT"] = 2] = "STRICT"; ERROR[ERROR["LF_EXPECTED"] = 3] = "LF_EXPECTED"; ERROR[ERROR["UNEXPECTED_CONTENT_LENGTH"] = 4] = "UNEXPECTED_CONTENT_LENGTH"; ERROR[ERROR["CLOSED_CONNECTION"] = 5] = "CLOSED_CONNECTION"; ERROR[ERROR["INVALID_METHOD"] = 6] = "INVALID_METHOD"; ERROR[ERROR["INVALID_URL"] = 7] = "INVALID_URL"; ERROR[ERROR["INVALID_CONSTANT"] = 8] = "INVALID_CONSTANT"; ERROR[ERROR["INVALID_VERSION"] = 9] = "INVALID_VERSION"; ERROR[ERROR["INVALID_HEADER_TOKEN"] = 10] = "INVALID_HEADER_TOKEN"; ERROR[ERROR["INVALID_CONTENT_LENGTH"] = 11] = "INVALID_CONTENT_LENGTH"; ERROR[ERROR["INVALID_CHUNK_SIZE"] = 12] = "INVALID_CHUNK_SIZE"; ERROR[ERROR["INVALID_STATUS"] = 13] = "INVALID_STATUS"; ERROR[ERROR["INVALID_EOF_STATE"] = 14] = "INVALID_EOF_STATE"; ERROR[ERROR["INVALID_TRANSFER_ENCODING"] = 15] = "INVALID_TRANSFER_ENCODING"; ERROR[ERROR["CB_MESSAGE_BEGIN"] = 16] = "CB_MESSAGE_BEGIN"; ERROR[ERROR["CB_HEADERS_COMPLETE"] = 17] = "CB_HEADERS_COMPLETE"; ERROR[ERROR["CB_MESSAGE_COMPLETE"] = 18] = "CB_MESSAGE_COMPLETE"; ERROR[ERROR["CB_CHUNK_HEADER"] = 19] = "CB_CHUNK_HEADER"; ERROR[ERROR["CB_CHUNK_COMPLETE"] = 20] = "CB_CHUNK_COMPLETE"; ERROR[ERROR["PAUSED"] = 21] = "PAUSED"; ERROR[ERROR["PAUSED_UPGRADE"] = 22] = "PAUSED_UPGRADE"; ERROR[ERROR["PAUSED_H2_UPGRADE"] = 23] = "PAUSED_H2_UPGRADE"; ERROR[ERROR["USER"] = 24] = "USER"; })(ERROR = exports.ERROR || (exports.ERROR = {})); var TYPE; (function (TYPE) { TYPE[TYPE["BOTH"] = 0] = "BOTH"; TYPE[TYPE["REQUEST"] = 1] = "REQUEST"; TYPE[TYPE["RESPONSE"] = 2] = "RESPONSE"; })(TYPE = exports.TYPE || (exports.TYPE = {})); var FLAGS; (function (FLAGS) { FLAGS[FLAGS["CONNECTION_KEEP_ALIVE"] = 1] = "CONNECTION_KEEP_ALIVE"; FLAGS[FLAGS["CONNECTION_CLOSE"] = 2] = "CONNECTION_CLOSE"; FLAGS[FLAGS["CONNECTION_UPGRADE"] = 4] = "CONNECTION_UPGRADE"; FLAGS[FLAGS["CHUNKED"] = 8] = "CHUNKED"; FLAGS[FLAGS["UPGRADE"] = 16] = "UPGRADE"; FLAGS[FLAGS["CONTENT_LENGTH"] = 32] = "CONTENT_LENGTH"; FLAGS[FLAGS["SKIPBODY"] = 64] = "SKIPBODY"; FLAGS[FLAGS["TRAILING"] = 128] = "TRAILING"; // 1 << 8 is unused FLAGS[FLAGS["TRANSFER_ENCODING"] = 512] = "TRANSFER_ENCODING"; })(FLAGS = exports.FLAGS || (exports.FLAGS = {})); var LENIENT_FLAGS; (function (LENIENT_FLAGS) { LENIENT_FLAGS[LENIENT_FLAGS["HEADERS"] = 1] = "HEADERS"; LENIENT_FLAGS[LENIENT_FLAGS["CHUNKED_LENGTH"] = 2] = "CHUNKED_LENGTH"; LENIENT_FLAGS[LENIENT_FLAGS["KEEP_ALIVE"] = 4] = "KEEP_ALIVE"; })(LENIENT_FLAGS = exports.LENIENT_FLAGS || (exports.LENIENT_FLAGS = {})); var METHODS; (function (METHODS) { METHODS[METHODS["DELETE"] = 0] = "DELETE"; METHODS[METHODS["GET"] = 1] = "GET"; METHODS[METHODS["HEAD"] = 2] = "HEAD"; METHODS[METHODS["POST"] = 3] = "POST"; METHODS[METHODS["PUT"] = 4] = "PUT"; /* pathological */ METHODS[METHODS["CONNECT"] = 5] = "CONNECT"; METHODS[METHODS["OPTIONS"] = 6] = "OPTIONS"; METHODS[METHODS["TRACE"] = 7] = "TRACE"; /* WebDAV */ METHODS[METHODS["COPY"] = 8] = "COPY"; METHODS[METHODS["LOCK"] = 9] = "LOCK"; METHODS[METHODS["MKCOL"] = 10] = "MKCOL"; METHODS[METHODS["MOVE"] = 11] = "MOVE"; METHODS[METHODS["PROPFIND"] = 12] = "PROPFIND"; METHODS[METHODS["PROPPATCH"] = 13] = "PROPPATCH"; METHODS[METHODS["SEARCH"] = 14] = "SEARCH"; METHODS[METHODS["UNLOCK"] = 15] = "UNLOCK"; METHODS[METHODS["BIND"] = 16] = "BIND"; METHODS[METHODS["REBIND"] = 17] = "REBIND"; METHODS[METHODS["UNBIND"] = 18] = "UNBIND"; METHODS[METHODS["ACL"] = 19] = "ACL"; /* subversion */ METHODS[METHODS["REPORT"] = 20] = "REPORT"; METHODS[METHODS["MKACTIVITY"] = 21] = "MKACTIVITY"; METHODS[METHODS["CHECKOUT"] = 22] = "CHECKOUT"; METHODS[METHODS["MERGE"] = 23] = "MERGE"; /* upnp */ METHODS[METHODS["M-SEARCH"] = 24] = "M-SEARCH"; METHODS[METHODS["NOTIFY"] = 25] = "NOTIFY"; METHODS[METHODS["SUBSCRIBE"] = 26] = "SUBSCRIBE"; METHODS[METHODS["UNSUBSCRIBE"] = 27] = "UNSUBSCRIBE"; /* RFC-5789 */ METHODS[METHODS["PATCH"] = 28] = "PATCH"; METHODS[METHODS["PURGE"] = 29] = "PURGE"; /* CalDAV */ METHODS[METHODS["MKCALENDAR"] = 30] = "MKCALENDAR"; /* RFC-2068, section 19.6.1.2 */ METHODS[METHODS["LINK"] = 31] = "LINK"; METHODS[METHODS["UNLINK"] = 32] = "UNLINK"; /* icecast */ METHODS[METHODS["SOURCE"] = 33] = "SOURCE"; /* RFC-7540, section 11.6 */ METHODS[METHODS["PRI"] = 34] = "PRI"; /* RFC-2326 RTSP */ METHODS[METHODS["DESCRIBE"] = 35] = "DESCRIBE"; METHODS[METHODS["ANNOUNCE"] = 36] = "ANNOUNCE"; METHODS[METHODS["SETUP"] = 37] = "SETUP"; METHODS[METHODS["PLAY"] = 38] = "PLAY"; METHODS[METHODS["PAUSE"] = 39] = "PAUSE"; METHODS[METHODS["TEARDOWN"] = 40] = "TEARDOWN"; METHODS[METHODS["GET_PARAMETER"] = 41] = "GET_PARAMETER"; METHODS[METHODS["SET_PARAMETER"] = 42] = "SET_PARAMETER"; METHODS[METHODS["REDIRECT"] = 43] = "REDIRECT"; METHODS[METHODS["RECORD"] = 44] = "RECORD"; /* RAOP */ METHODS[METHODS["FLUSH"] = 45] = "FLUSH"; })(METHODS = exports.METHODS || (exports.METHODS = {})); exports.METHODS_HTTP = [ METHODS.DELETE, METHODS.GET, METHODS.HEAD, METHODS.POST, METHODS.PUT, METHODS.CONNECT, METHODS.OPTIONS, METHODS.TRACE, METHODS.COPY, METHODS.LOCK, METHODS.MKCOL, METHODS.MOVE, METHODS.PROPFIND, METHODS.PROPPATCH, METHODS.SEARCH, METHODS.UNLOCK, METHODS.BIND, METHODS.REBIND, METHODS.UNBIND, METHODS.ACL, METHODS.REPORT, METHODS.MKACTIVITY, METHODS.CHECKOUT, METHODS.MERGE, METHODS['M-SEARCH'], METHODS.NOTIFY, METHODS.SUBSCRIBE, METHODS.UNSUBSCRIBE, METHODS.PATCH, METHODS.PURGE, METHODS.MKCALENDAR, METHODS.LINK, METHODS.UNLINK, METHODS.PRI, // TODO(indutny): should we allow it with HTTP? METHODS.SOURCE, ]; exports.METHODS_ICE = [ METHODS.SOURCE, ]; exports.METHODS_RTSP = [ METHODS.OPTIONS, METHODS.DESCRIBE, METHODS.ANNOUNCE, METHODS.SETUP, METHODS.PLAY, METHODS.PAUSE, METHODS.TEARDOWN, METHODS.GET_PARAMETER, METHODS.SET_PARAMETER, METHODS.REDIRECT, METHODS.RECORD, METHODS.FLUSH, // For AirPlay METHODS.GET, METHODS.POST, ]; exports.METHOD_MAP = utils_1.enumToMap(METHODS); exports.H_METHOD_MAP = {}; Object.keys(exports.METHOD_MAP).forEach((key) => { if (/^H/.test(key)) { exports.H_METHOD_MAP[key] = exports.METHOD_MAP[key]; } }); var FINISH; (function (FINISH) { FINISH[FINISH["SAFE"] = 0] = "SAFE"; FINISH[FINISH["SAFE_WITH_CB"] = 1] = "SAFE_WITH_CB"; FINISH[FINISH["UNSAFE"] = 2] = "UNSAFE"; })(FINISH = exports.FINISH || (exports.FINISH = {})); exports.ALPHA = []; for (let i = 'A'.charCodeAt(0); i <= 'Z'.charCodeAt(0); i++) { // Upper case exports.ALPHA.push(String.fromCharCode(i)); // Lower case exports.ALPHA.push(String.fromCharCode(i + 0x20)); } exports.NUM_MAP = { 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, }; exports.HEX_MAP = { 0: 0, 1: 1, 2: 2, 3: 3, 4: 4, 5: 5, 6: 6, 7: 7, 8: 8, 9: 9, A: 0XA, B: 0XB, C: 0XC, D: 0XD, E: 0XE, F: 0XF, a: 0xa, b: 0xb, c: 0xc, d: 0xd, e: 0xe, f: 0xf, }; exports.NUM = [ '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ]; exports.ALPHANUM = exports.ALPHA.concat(exports.NUM); exports.MARK = ['-', '_', '.', '!', '~', '*', '\'', '(', ')']; exports.USERINFO_CHARS = exports.ALPHANUM .concat(exports.MARK) .concat(['%', ';', ':', '&', '=', '+', '$', ',']); // TODO(indutny): use RFC exports.STRICT_URL_CHAR = [ '!', '"', '$', '%', '&', '\'', '(', ')', '*', '+', ',', '-', '.', '/', ':', ';', '<', '=', '>', '@', '[', '\\', ']', '^', '_', '`', '{', '|', '}', '~', ].concat(exports.ALPHANUM); exports.URL_CHAR = exports.STRICT_URL_CHAR .concat(['\t', '\f']); // All characters with 0x80 bit set to 1 for (let i = 0x80; i <= 0xff; i++) { exports.URL_CHAR.push(i); } exports.HEX = exports.NUM.concat(['a', 'b', 'c', 'd', 'e', 'f', 'A', 'B', 'C', 'D', 'E', 'F']); /* Tokens as defined by rfc 2616. Also lowercases them. * token = 1* * separators = "(" | ")" | "<" | ">" | "@" * | "," | ";" | ":" | "\" | <"> * | "/" | "[" | "]" | "?" | "=" * | "{" | "}" | SP | HT */ exports.STRICT_TOKEN = [ '!', '#', '$', '%', '&', '\'', '*', '+', '-', '.', '^', '_', '`', '|', '~', ].concat(exports.ALPHANUM); exports.TOKEN = exports.STRICT_TOKEN.concat([' ']); /* * Verify that a char is a valid visible (printable) US-ASCII * character or %x80-FF */ exports.HEADER_CHARS = ['\t']; for (let i = 32; i <= 255; i++) { if (i !== 127) { exports.HEADER_CHARS.push(i); } } // ',' = \x44 exports.CONNECTION_TOKEN_CHARS = exports.HEADER_CHARS.filter((c) => c !== 44); exports.MAJOR = exports.NUM_MAP; exports.MINOR = exports.MAJOR; var HEADER_STATE; (function (HEADER_STATE) { HEADER_STATE[HEADER_STATE["GENERAL"] = 0] = "GENERAL"; HEADER_STATE[HEADER_STATE["CONNECTION"] = 1] = "CONNECTION"; HEADER_STATE[HEADER_STATE["CONTENT_LENGTH"] = 2] = "CONTENT_LENGTH"; HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING"] = 3] = "TRANSFER_ENCODING"; HEADER_STATE[HEADER_STATE["UPGRADE"] = 4] = "UPGRADE"; HEADER_STATE[HEADER_STATE["CONNECTION_KEEP_ALIVE"] = 5] = "CONNECTION_KEEP_ALIVE"; HEADER_STATE[HEADER_STATE["CONNECTION_CLOSE"] = 6] = "CONNECTION_CLOSE"; HEADER_STATE[HEADER_STATE["CONNECTION_UPGRADE"] = 7] = "CONNECTION_UPGRADE"; HEADER_STATE[HEADER_STATE["TRANSFER_ENCODING_CHUNKED"] = 8] = "TRANSFER_ENCODING_CHUNKED"; })(HEADER_STATE = exports.HEADER_STATE || (exports.HEADER_STATE = {})); exports.SPECIAL_HEADERS = { 'connection': HEADER_STATE.CONNECTION, 'content-length': HEADER_STATE.CONTENT_LENGTH, 'proxy-connection': HEADER_STATE.CONNECTION, 'transfer-encoding': HEADER_STATE.TRANSFER_ENCODING, 'upgrade': HEADER_STATE.UPGRADE, }; //# sourceMappingURL=constants.js.map /***/ }), /***/ 63870: /***/ ((module) => { module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCsLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC1kAIABBGGpCADcDACAAQgA3AwAgAEE4akIANwMAIABBMGpCADcDACAAQShqQgA3AwAgAEEgakIANwMAIABBEGpCADcDACAAQQhqQgA3AwAgAEHdATYCHEEAC3sBAX8CQCAAKAIMIgMNAAJAIAAoAgRFDQAgACABNgIECwJAIAAgASACEMSAgIAAIgMNACAAKAIMDwsgACADNgIcQQAhAyAAKAIEIgFFDQAgACABIAIgACgCCBGBgICAAAAiAUUNACAAIAI2AhQgACABNgIMIAEhAwsgAwvk8wEDDn8DfgR/I4CAgIAAQRBrIgMkgICAgAAgASEEIAEhBSABIQYgASEHIAEhCCABIQkgASEKIAEhCyABIQwgASENIAEhDiABIQ8CQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgACgCHCIQQX9qDt0B2gEB2QECAwQFBgcICQoLDA0O2AEPENcBERLWARMUFRYXGBkaG+AB3wEcHR7VAR8gISIjJCXUASYnKCkqKyzTAdIBLS7RAdABLzAxMjM0NTY3ODk6Ozw9Pj9AQUJDREVG2wFHSElKzwHOAUvNAUzMAU1OT1BRUlNUVVZXWFlaW1xdXl9gYWJjZGVmZ2hpamtsbW5vcHFyc3R1dnd4eXp7fH1+f4ABgQGCAYMBhAGFAYYBhwGIAYkBigGLAYwBjQGOAY8BkAGRAZIBkwGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwHLAcoBuAHJAbkByAG6AbsBvAG9Ab4BvwHAAcEBwgHDAcQBxQHGAQDcAQtBACEQDMYBC0EOIRAMxQELQQ0hEAzEAQtBDyEQDMMBC0EQIRAMwgELQRMhEAzBAQtBFCEQDMABC0EVIRAMvwELQRYhEAy+AQtBFyEQDL0BC0EYIRAMvAELQRkhEAy7AQtBGiEQDLoBC0EbIRAMuQELQRwhEAy4AQtBCCEQDLcBC0EdIRAMtgELQSAhEAy1AQtBHyEQDLQBC0EHIRAMswELQSEhEAyyAQtBIiEQDLEBC0EeIRAMsAELQSMhEAyvAQtBEiEQDK4BC0ERIRAMrQELQSQhEAysAQtBJSEQDKsBC0EmIRAMqgELQSchEAypAQtBwwEhEAyoAQtBKSEQDKcBC0ErIRAMpgELQSwhEAylAQtBLSEQDKQBC0EuIRAMowELQS8hEAyiAQtBxAEhEAyhAQtBMCEQDKABC0E0IRAMnwELQQwhEAyeAQtBMSEQDJ0BC0EyIRAMnAELQTMhEAybAQtBOSEQDJoBC0E1IRAMmQELQcUBIRAMmAELQQshEAyXAQtBOiEQDJYBC0E2IRAMlQELQQohEAyUAQtBNyEQDJMBC0E4IRAMkgELQTwhEAyRAQtBOyEQDJABC0E9IRAMjwELQQkhEAyOAQtBKCEQDI0BC0E+IRAMjAELQT8hEAyLAQtBwAAhEAyKAQtBwQAhEAyJAQtBwgAhEAyIAQtBwwAhEAyHAQtBxAAhEAyGAQtBxQAhEAyFAQtBxgAhEAyEAQtBKiEQDIMBC0HHACEQDIIBC0HIACEQDIEBC0HJACEQDIABC0HKACEQDH8LQcsAIRAMfgtBzQAhEAx9C0HMACEQDHwLQc4AIRAMewtBzwAhEAx6C0HQACEQDHkLQdEAIRAMeAtB0gAhEAx3C0HTACEQDHYLQdQAIRAMdQtB1gAhEAx0C0HVACEQDHMLQQYhEAxyC0HXACEQDHELQQUhEAxwC0HYACEQDG8LQQQhEAxuC0HZACEQDG0LQdoAIRAMbAtB2wAhEAxrC0HcACEQDGoLQQMhEAxpC0HdACEQDGgLQd4AIRAMZwtB3wAhEAxmC0HhACEQDGULQeAAIRAMZAtB4gAhEAxjC0HjACEQDGILQQIhEAxhC0HkACEQDGALQeUAIRAMXwtB5gAhEAxeC0HnACEQDF0LQegAIRAMXAtB6QAhEAxbC0HqACEQDFoLQesAIRAMWQtB7AAhEAxYC0HtACEQDFcLQe4AIRAMVgtB7wAhEAxVC0HwACEQDFQLQfEAIRAMUwtB8gAhEAxSC0HzACEQDFELQfQAIRAMUAtB9QAhEAxPC0H2ACEQDE4LQfcAIRAMTQtB+AAhEAxMC0H5ACEQDEsLQfoAIRAMSgtB+wAhEAxJC0H8ACEQDEgLQf0AIRAMRwtB/gAhEAxGC0H/ACEQDEULQYABIRAMRAtBgQEhEAxDC0GCASEQDEILQYMBIRAMQQtBhAEhEAxAC0GFASEQDD8LQYYBIRAMPgtBhwEhEAw9C0GIASEQDDwLQYkBIRAMOwtBigEhEAw6C0GLASEQDDkLQYwBIRAMOAtBjQEhEAw3C0GOASEQDDYLQY8BIRAMNQtBkAEhEAw0C0GRASEQDDMLQZIBIRAMMgtBkwEhEAwxC0GUASEQDDALQZUBIRAMLwtBlgEhEAwuC0GXASEQDC0LQZgBIRAMLAtBmQEhEAwrC0GaASEQDCoLQZsBIRAMKQtBnAEhEAwoC0GdASEQDCcLQZ4BIRAMJgtBnwEhEAwlC0GgASEQDCQLQaEBIRAMIwtBogEhEAwiC0GjASEQDCELQaQBIRAMIAtBpQEhEAwfC0GmASEQDB4LQacBIRAMHQtBqAEhEAwcC0GpASEQDBsLQaoBIRAMGgtBqwEhEAwZC0GsASEQDBgLQa0BIRAMFwtBrgEhEAwWC0EBIRAMFQtBrwEhEAwUC0GwASEQDBMLQbEBIRAMEgtBswEhEAwRC0GyASEQDBALQbQBIRAMDwtBtQEhEAwOC0G2ASEQDA0LQbcBIRAMDAtBuAEhEAwLC0G5ASEQDAoLQboBIRAMCQtBuwEhEAwIC0HGASEQDAcLQbwBIRAMBgtBvQEhEAwFC0G+ASEQDAQLQb8BIRAMAwtBwAEhEAwCC0HCASEQDAELQcEBIRALA0ACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQDscBAAECAwQFBgcICQoLDA0ODxAREhMUFRYXGBkaGxweHyAhIyUoP0BBREVGR0hJSktMTU9QUVJT3gNXWVtcXWBiZWZnaGlqa2xtb3BxcnN0dXZ3eHl6e3x9foABggGFAYYBhwGJAYsBjAGNAY4BjwGQAZEBlAGVAZYBlwGYAZkBmgGbAZwBnQGeAZ8BoAGhAaIBowGkAaUBpgGnAagBqQGqAasBrAGtAa4BrwGwAbEBsgGzAbQBtQG2AbcBuAG5AboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBxwHIAckBygHLAcwBzQHOAc8B0AHRAdIB0wHUAdUB1gHXAdgB2QHaAdsB3AHdAd4B4AHhAeIB4wHkAeUB5gHnAegB6QHqAesB7AHtAe4B7wHwAfEB8gHzAZkCpAKwAv4C/gILIAEiBCACRw3zAUHdASEQDP8DCyABIhAgAkcN3QFBwwEhEAz+AwsgASIBIAJHDZABQfcAIRAM/QMLIAEiASACRw2GAUHvACEQDPwDCyABIgEgAkcNf0HqACEQDPsDCyABIgEgAkcNe0HoACEQDPoDCyABIgEgAkcNeEHmACEQDPkDCyABIgEgAkcNGkEYIRAM+AMLIAEiASACRw0UQRIhEAz3AwsgASIBIAJHDVlBxQAhEAz2AwsgASIBIAJHDUpBPyEQDPUDCyABIgEgAkcNSEE8IRAM9AMLIAEiASACRw1BQTEhEAzzAwsgAC0ALkEBRg3rAwyHAgsgACABIgEgAhDAgICAAEEBRw3mASAAQgA3AyAM5wELIAAgASIBIAIQtICAgAAiEA3nASABIQEM9QILAkAgASIBIAJHDQBBBiEQDPADCyAAIAFBAWoiASACELuAgIAAIhAN6AEgASEBDDELIABCADcDIEESIRAM1QMLIAEiECACRw0rQR0hEAztAwsCQCABIgEgAkYNACABQQFqIQFBECEQDNQDC0EHIRAM7AMLIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN5QFBCCEQDOsDCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEUIRAM0gMLQQkhEAzqAwsgASEBIAApAyBQDeQBIAEhAQzyAgsCQCABIgEgAkcNAEELIRAM6QMLIAAgAUEBaiIBIAIQtoCAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3lASABIQEM8gILIAAgASIBIAIQuICAgAAiEA3mASABIQEMDQsgACABIgEgAhC6gICAACIQDecBIAEhAQzwAgsCQCABIgEgAkcNAEEPIRAM5QMLIAEtAAAiEEE7Rg0IIBBBDUcN6AEgAUEBaiEBDO8CCyAAIAEiASACELqAgIAAIhAN6AEgASEBDPICCwNAAkAgAS0AAEHwtYCAAGotAAAiEEEBRg0AIBBBAkcN6wEgACgCBCEQIABBADYCBCAAIBAgAUEBaiIBELmAgIAAIhAN6gEgASEBDPQCCyABQQFqIgEgAkcNAAtBEiEQDOIDCyAAIAEiASACELqAgIAAIhAN6QEgASEBDAoLIAEiASACRw0GQRshEAzgAwsCQCABIgEgAkcNAEEWIRAM4AMLIABBioCAgAA2AgggACABNgIEIAAgASACELiAgIAAIhAN6gEgASEBQSAhEAzGAwsCQCABIgEgAkYNAANAAkAgAS0AAEHwt4CAAGotAAAiEEECRg0AAkAgEEF/ag4E5QHsAQDrAewBCyABQQFqIQFBCCEQDMgDCyABQQFqIgEgAkcNAAtBFSEQDN8DC0EVIRAM3gMLA0ACQCABLQAAQfC5gIAAai0AACIQQQJGDQAgEEF/ag4E3gHsAeAB6wHsAQsgAUEBaiIBIAJHDQALQRghEAzdAwsCQCABIgEgAkYNACAAQYuAgIAANgIIIAAgATYCBCABIQFBByEQDMQDC0EZIRAM3AMLIAFBAWohAQwCCwJAIAEiFCACRw0AQRohEAzbAwsgFCEBAkAgFC0AAEFzag4U3QLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gIA7gILQQAhECAAQQA2AhwgAEGvi4CAADYCECAAQQI2AgwgACAUQQFqNgIUDNoDCwJAIAEtAAAiEEE7Rg0AIBBBDUcN6AEgAUEBaiEBDOUCCyABQQFqIQELQSIhEAy/AwsCQCABIhAgAkcNAEEcIRAM2AMLQgAhESAQIQEgEC0AAEFQag435wHmAQECAwQFBgcIAAAAAAAAAAkKCwwNDgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADxAREhMUAAtBHiEQDL0DC0ICIREM5QELQgMhEQzkAQtCBCERDOMBC0IFIREM4gELQgYhEQzhAQtCByERDOABC0IIIREM3wELQgkhEQzeAQtCCiERDN0BC0ILIREM3AELQgwhEQzbAQtCDSERDNoBC0IOIREM2QELQg8hEQzYAQtCCiERDNcBC0ILIREM1gELQgwhEQzVAQtCDSERDNQBC0IOIREM0wELQg8hEQzSAQtCACERAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAQLQAAQVBqDjflAeQBAAECAwQFBgfmAeYB5gHmAeYB5gHmAQgJCgsMDeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gEODxAREhPmAQtCAiERDOQBC0IDIREM4wELQgQhEQziAQtCBSERDOEBC0IGIREM4AELQgchEQzfAQtCCCERDN4BC0IJIREM3QELQgohEQzcAQtCCyERDNsBC0IMIREM2gELQg0hEQzZAQtCDiERDNgBC0IPIREM1wELQgohEQzWAQtCCyERDNUBC0IMIREM1AELQg0hEQzTAQtCDiERDNIBC0IPIREM0QELIABCACAAKQMgIhEgAiABIhBrrSISfSITIBMgEVYbNwMgIBEgElYiFEUN0gFBHyEQDMADCwJAIAEiASACRg0AIABBiYCAgAA2AgggACABNgIEIAEhAUEkIRAMpwMLQSAhEAy/AwsgACABIhAgAhC+gICAAEF/ag4FtgEAxQIB0QHSAQtBESEQDKQDCyAAQQE6AC8gECEBDLsDCyABIgEgAkcN0gFBJCEQDLsDCyABIg0gAkcNHkHGACEQDLoDCyAAIAEiASACELKAgIAAIhAN1AEgASEBDLUBCyABIhAgAkcNJkHQACEQDLgDCwJAIAEiASACRw0AQSghEAy4AwsgAEEANgIEIABBjICAgAA2AgggACABIAEQsYCAgAAiEA3TASABIQEM2AELAkAgASIQIAJHDQBBKSEQDLcDCyAQLQAAIgFBIEYNFCABQQlHDdMBIBBBAWohAQwVCwJAIAEiASACRg0AIAFBAWohAQwXC0EqIRAMtQMLAkAgASIQIAJHDQBBKyEQDLUDCwJAIBAtAAAiAUEJRg0AIAFBIEcN1QELIAAtACxBCEYN0wEgECEBDJEDCwJAIAEiASACRw0AQSwhEAy0AwsgAS0AAEEKRw3VASABQQFqIQEMyQILIAEiDiACRw3VAUEvIRAMsgMLA0ACQCABLQAAIhBBIEYNAAJAIBBBdmoOBADcAdwBANoBCyABIQEM4AELIAFBAWoiASACRw0AC0ExIRAMsQMLQTIhECABIhQgAkYNsAMgAiAUayAAKAIAIgFqIRUgFCABa0EDaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfC7gIAAai0AAEcNAQJAIAFBA0cNAEEGIQEMlgMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLEDCyAAQQA2AgAgFCEBDNkBC0EzIRAgASIUIAJGDa8DIAIgFGsgACgCACIBaiEVIBQgAWtBCGohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUH0u4CAAGotAABHDQECQCABQQhHDQBBBSEBDJUDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAywAwsgAEEANgIAIBQhAQzYAQtBNCEQIAEiFCACRg2uAyACIBRrIAAoAgAiAWohFSAUIAFrQQVqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw0BAkAgAUEFRw0AQQchAQyUAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMrwMLIABBADYCACAUIQEM1wELAkAgASIBIAJGDQADQAJAIAEtAABBgL6AgABqLQAAIhBBAUYNACAQQQJGDQogASEBDN0BCyABQQFqIgEgAkcNAAtBMCEQDK4DC0EwIRAMrQMLAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AIBBBdmoOBNkB2gHaAdkB2gELIAFBAWoiASACRw0AC0E4IRAMrQMLQTghEAysAwsDQAJAIAEtAAAiEEEgRg0AIBBBCUcNAwsgAUEBaiIBIAJHDQALQTwhEAyrAwsDQAJAIAEtAAAiEEEgRg0AAkACQCAQQXZqDgTaAQEB2gEACyAQQSxGDdsBCyABIQEMBAsgAUEBaiIBIAJHDQALQT8hEAyqAwsgASEBDNsBC0HAACEQIAEiFCACRg2oAyACIBRrIAAoAgAiAWohFiAUIAFrQQZqIRcCQANAIBQtAABBIHIgAUGAwICAAGotAABHDQEgAUEGRg2OAyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAypAwsgAEEANgIAIBQhAQtBNiEQDI4DCwJAIAEiDyACRw0AQcEAIRAMpwMLIABBjICAgAA2AgggACAPNgIEIA8hASAALQAsQX9qDgTNAdUB1wHZAYcDCyABQQFqIQEMzAELAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgciAQIBBBv39qQf8BcUEaSRtB/wFxIhBBCUYNACAQQSBGDQACQAJAAkACQCAQQZ1/ag4TAAMDAwMDAwMBAwMDAwMDAwMDAgMLIAFBAWohAUExIRAMkQMLIAFBAWohAUEyIRAMkAMLIAFBAWohAUEzIRAMjwMLIAEhAQzQAQsgAUEBaiIBIAJHDQALQTUhEAylAwtBNSEQDKQDCwJAIAEiASACRg0AA0ACQCABLQAAQYC8gIAAai0AAEEBRg0AIAEhAQzTAQsgAUEBaiIBIAJHDQALQT0hEAykAwtBPSEQDKMDCyAAIAEiASACELCAgIAAIhAN1gEgASEBDAELIBBBAWohAQtBPCEQDIcDCwJAIAEiASACRw0AQcIAIRAMoAMLAkADQAJAIAEtAABBd2oOGAAC/gL+AoQD/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4CAP4CCyABQQFqIgEgAkcNAAtBwgAhEAygAwsgAUEBaiEBIAAtAC1BAXFFDb0BIAEhAQtBLCEQDIUDCyABIgEgAkcN0wFBxAAhEAydAwsDQAJAIAEtAABBkMCAgABqLQAAQQFGDQAgASEBDLcCCyABQQFqIgEgAkcNAAtBxQAhEAycAwsgDS0AACIQQSBGDbMBIBBBOkcNgQMgACgCBCEBIABBADYCBCAAIAEgDRCvgICAACIBDdABIA1BAWohAQyzAgtBxwAhECABIg0gAkYNmgMgAiANayAAKAIAIgFqIRYgDSABa0EFaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGQwoCAAGotAABHDYADIAFBBUYN9AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmgMLQcgAIRAgASINIAJGDZkDIAIgDWsgACgCACIBaiEWIA0gAWtBCWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBlsKAgABqLQAARw3/AgJAIAFBCUcNAEECIQEM9QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJkDCwJAIAEiDSACRw0AQckAIRAMmQMLAkACQCANLQAAIgFBIHIgASABQb9/akH/AXFBGkkbQf8BcUGSf2oOBwCAA4ADgAOAA4ADAYADCyANQQFqIQFBPiEQDIADCyANQQFqIQFBPyEQDP8CC0HKACEQIAEiDSACRg2XAyACIA1rIAAoAgAiAWohFiANIAFrQQFqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaDCgIAAai0AAEcN/QIgAUEBRg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyXAwtBywAhECABIg0gAkYNlgMgAiANayAAKAIAIgFqIRYgDSABa0EOaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGiwoCAAGotAABHDfwCIAFBDkYN8AIgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlgMLQcwAIRAgASINIAJGDZUDIAIgDWsgACgCACIBaiEWIA0gAWtBD2ohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBwMKAgABqLQAARw37AgJAIAFBD0cNAEEDIQEM8QILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJUDC0HNACEQIAEiDSACRg2UAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQdDCgIAAai0AAEcN+gICQCABQQVHDQBBBCEBDPACCyABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyUAwsCQCABIg0gAkcNAEHOACEQDJQDCwJAAkACQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZ1/ag4TAP0C/QL9Av0C/QL9Av0C/QL9Av0C/QL9AgH9Av0C/QICA/0CCyANQQFqIQFBwQAhEAz9AgsgDUEBaiEBQcIAIRAM/AILIA1BAWohAUHDACEQDPsCCyANQQFqIQFBxAAhEAz6AgsCQCABIgEgAkYNACAAQY2AgIAANgIIIAAgATYCBCABIQFBxQAhEAz6AgtBzwAhEAySAwsgECEBAkACQCAQLQAAQXZqDgQBqAKoAgCoAgsgEEEBaiEBC0EnIRAM+AILAkAgASIBIAJHDQBB0QAhEAyRAwsCQCABLQAAQSBGDQAgASEBDI0BCyABQQFqIQEgAC0ALUEBcUUNxwEgASEBDIwBCyABIhcgAkcNyAFB0gAhEAyPAwtB0wAhECABIhQgAkYNjgMgAiAUayAAKAIAIgFqIRYgFCABa0EBaiEXA0AgFC0AACABQdbCgIAAai0AAEcNzAEgAUEBRg3HASABQQFqIQEgFEEBaiIUIAJHDQALIAAgFjYCAAyOAwsCQCABIgEgAkcNAEHVACEQDI4DCyABLQAAQQpHDcwBIAFBAWohAQzHAQsCQCABIgEgAkcNAEHWACEQDI0DCwJAAkAgAS0AAEF2ag4EAM0BzQEBzQELIAFBAWohAQzHAQsgAUEBaiEBQcoAIRAM8wILIAAgASIBIAIQroCAgAAiEA3LASABIQFBzQAhEAzyAgsgAC0AKUEiRg2FAwymAgsCQCABIgEgAkcNAEHbACEQDIoDC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgAS0AAEFQag4K1AHTAQABAgMEBQYI1QELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMzAELQQkhEEEBIRRBACEXQQAhFgzLAQsCQCABIgEgAkcNAEHdACEQDIkDCyABLQAAQS5HDcwBIAFBAWohAQymAgsgASIBIAJHDcwBQd8AIRAMhwMLAkAgASIBIAJGDQAgAEGOgICAADYCCCAAIAE2AgQgASEBQdAAIRAM7gILQeAAIRAMhgMLQeEAIRAgASIBIAJGDYUDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHiwoCAAGotAABHDc0BIBRBA0YNzAEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhQMLQeIAIRAgASIBIAJGDYQDIAIgAWsgACgCACIUaiEWIAEgFGtBAmohFwNAIAEtAAAgFEHmwoCAAGotAABHDcwBIBRBAkYNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMhAMLQeMAIRAgASIBIAJGDYMDIAIgAWsgACgCACIUaiEWIAEgFGtBA2ohFwNAIAEtAAAgFEHpwoCAAGotAABHDcsBIBRBA0YNzgEgFEEBaiEUIAFBAWoiASACRw0ACyAAIBY2AgAMgwMLAkAgASIBIAJHDQBB5QAhEAyDAwsgACABQQFqIgEgAhCogICAACIQDc0BIAEhAUHWACEQDOkCCwJAIAEiASACRg0AA0ACQCABLQAAIhBBIEYNAAJAAkACQCAQQbh/ag4LAAHPAc8BzwHPAc8BzwHPAc8BAs8BCyABQQFqIQFB0gAhEAztAgsgAUEBaiEBQdMAIRAM7AILIAFBAWohAUHUACEQDOsCCyABQQFqIgEgAkcNAAtB5AAhEAyCAwtB5AAhEAyBAwsDQAJAIAEtAABB8MKAgABqLQAAIhBBAUYNACAQQX5qDgPPAdAB0QHSAQsgAUEBaiIBIAJHDQALQeYAIRAMgAMLAkAgASIBIAJGDQAgAUEBaiEBDAMLQecAIRAM/wILA0ACQCABLQAAQfDEgIAAai0AACIQQQFGDQACQCAQQX5qDgTSAdMB1AEA1QELIAEhAUHXACEQDOcCCyABQQFqIgEgAkcNAAtB6AAhEAz+AgsCQCABIgEgAkcNAEHpACEQDP4CCwJAIAEtAAAiEEF2ag4augHVAdUBvAHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHKAdUB1QEA0wELIAFBAWohAQtBBiEQDOMCCwNAAkAgAS0AAEHwxoCAAGotAABBAUYNACABIQEMngILIAFBAWoiASACRw0AC0HqACEQDPsCCwJAIAEiASACRg0AIAFBAWohAQwDC0HrACEQDPoCCwJAIAEiASACRw0AQewAIRAM+gILIAFBAWohAQwBCwJAIAEiASACRw0AQe0AIRAM+QILIAFBAWohAQtBBCEQDN4CCwJAIAEiFCACRw0AQe4AIRAM9wILIBQhAQJAAkACQCAULQAAQfDIgIAAai0AAEF/ag4H1AHVAdYBAJwCAQLXAQsgFEEBaiEBDAoLIBRBAWohAQzNAQtBACEQIABBADYCHCAAQZuSgIAANgIQIABBBzYCDCAAIBRBAWo2AhQM9gILAkADQAJAIAEtAABB8MiAgABqLQAAIhBBBEYNAAJAAkAgEEF/ag4H0gHTAdQB2QEABAHZAQsgASEBQdoAIRAM4AILIAFBAWohAUHcACEQDN8CCyABQQFqIgEgAkcNAAtB7wAhEAz2AgsgAUEBaiEBDMsBCwJAIAEiFCACRw0AQfAAIRAM9QILIBQtAABBL0cN1AEgFEEBaiEBDAYLAkAgASIUIAJHDQBB8QAhEAz0AgsCQCAULQAAIgFBL0cNACAUQQFqIQFB3QAhEAzbAgsgAUF2aiIEQRZLDdMBQQEgBHRBiYCAAnFFDdMBDMoCCwJAIAEiASACRg0AIAFBAWohAUHeACEQDNoCC0HyACEQDPICCwJAIAEiFCACRw0AQfQAIRAM8gILIBQhAQJAIBQtAABB8MyAgABqLQAAQX9qDgPJApQCANQBC0HhACEQDNgCCwJAIAEiFCACRg0AA0ACQCAULQAAQfDKgIAAai0AACIBQQNGDQACQCABQX9qDgLLAgDVAQsgFCEBQd8AIRAM2gILIBRBAWoiFCACRw0AC0HzACEQDPECC0HzACEQDPACCwJAIAEiASACRg0AIABBj4CAgAA2AgggACABNgIEIAEhAUHgACEQDNcCC0H1ACEQDO8CCwJAIAEiASACRw0AQfYAIRAM7wILIABBj4CAgAA2AgggACABNgIEIAEhAQtBAyEQDNQCCwNAIAEtAABBIEcNwwIgAUEBaiIBIAJHDQALQfcAIRAM7AILAkAgASIBIAJHDQBB+AAhEAzsAgsgAS0AAEEgRw3OASABQQFqIQEM7wELIAAgASIBIAIQrICAgAAiEA3OASABIQEMjgILAkAgASIEIAJHDQBB+gAhEAzqAgsgBC0AAEHMAEcN0QEgBEEBaiEBQRMhEAzPAQsCQCABIgQgAkcNAEH7ACEQDOkCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRADQCAELQAAIAFB8M6AgABqLQAARw3QASABQQVGDc4BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQfsAIRAM6AILAkAgASIEIAJHDQBB/AAhEAzoAgsCQAJAIAQtAABBvX9qDgwA0QHRAdEB0QHRAdEB0QHRAdEB0QEB0QELIARBAWohAUHmACEQDM8CCyAEQQFqIQFB5wAhEAzOAgsCQCABIgQgAkcNAEH9ACEQDOcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDc8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH9ACEQDOcCCyAAQQA2AgAgEEEBaiEBQRAhEAzMAQsCQCABIgQgAkcNAEH+ACEQDOYCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUH2zoCAAGotAABHDc4BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH+ACEQDOYCCyAAQQA2AgAgEEEBaiEBQRYhEAzLAQsCQCABIgQgAkcNAEH/ACEQDOUCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUH8zoCAAGotAABHDc0BIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEH/ACEQDOUCCyAAQQA2AgAgEEEBaiEBQQUhEAzKAQsCQCABIgQgAkcNAEGAASEQDOQCCyAELQAAQdkARw3LASAEQQFqIQFBCCEQDMkBCwJAIAEiBCACRw0AQYEBIRAM4wILAkACQCAELQAAQbJ/ag4DAMwBAcwBCyAEQQFqIQFB6wAhEAzKAgsgBEEBaiEBQewAIRAMyQILAkAgASIEIAJHDQBBggEhEAziAgsCQAJAIAQtAABBuH9qDggAywHLAcsBywHLAcsBAcsBCyAEQQFqIQFB6gAhEAzJAgsgBEEBaiEBQe0AIRAMyAILAkAgASIEIAJHDQBBgwEhEAzhAgsgAiAEayAAKAIAIgFqIRAgBCABa0ECaiEUAkADQCAELQAAIAFBgM+AgABqLQAARw3JASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBA2AgBBgwEhEAzhAgtBACEQIABBADYCACAUQQFqIQEMxgELAkAgASIEIAJHDQBBhAEhEAzgAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBg8+AgABqLQAARw3IASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhAEhEAzgAgsgAEEANgIAIBBBAWohAUEjIRAMxQELAkAgASIEIAJHDQBBhQEhEAzfAgsCQAJAIAQtAABBtH9qDggAyAHIAcgByAHIAcgBAcgBCyAEQQFqIQFB7wAhEAzGAgsgBEEBaiEBQfAAIRAMxQILAkAgASIEIAJHDQBBhgEhEAzeAgsgBC0AAEHFAEcNxQEgBEEBaiEBDIMCCwJAIAEiBCACRw0AQYcBIRAM3QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQYjPgIAAai0AAEcNxQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYcBIRAM3QILIABBADYCACAQQQFqIQFBLSEQDMIBCwJAIAEiBCACRw0AQYgBIRAM3AILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNxAEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYgBIRAM3AILIABBADYCACAQQQFqIQFBKSEQDMEBCwJAIAEiASACRw0AQYkBIRAM2wILQQEhECABLQAAQd8ARw3AASABQQFqIQEMgQILAkAgASIEIAJHDQBBigEhEAzaAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQA0AgBC0AACABQYzPgIAAai0AAEcNwQEgAUEBRg2vAiABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGKASEQDNkCCwJAIAEiBCACRw0AQYsBIRAM2QILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQY7PgIAAai0AAEcNwQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYsBIRAM2QILIABBADYCACAQQQFqIQFBAiEQDL4BCwJAIAEiBCACRw0AQYwBIRAM2AILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNwAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYwBIRAM2AILIABBADYCACAQQQFqIQFBHyEQDL0BCwJAIAEiBCACRw0AQY0BIRAM1wILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNvwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY0BIRAM1wILIABBADYCACAQQQFqIQFBCSEQDLwBCwJAIAEiBCACRw0AQY4BIRAM1gILAkACQCAELQAAQbd/ag4HAL8BvwG/Ab8BvwEBvwELIARBAWohAUH4ACEQDL0CCyAEQQFqIQFB+QAhEAy8AgsCQCABIgQgAkcNAEGPASEQDNUCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGRz4CAAGotAABHDb0BIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGPASEQDNUCCyAAQQA2AgAgEEEBaiEBQRghEAy6AQsCQCABIgQgAkcNAEGQASEQDNQCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUGXz4CAAGotAABHDbwBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGQASEQDNQCCyAAQQA2AgAgEEEBaiEBQRchEAy5AQsCQCABIgQgAkcNAEGRASEQDNMCCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUGaz4CAAGotAABHDbsBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGRASEQDNMCCyAAQQA2AgAgEEEBaiEBQRUhEAy4AQsCQCABIgQgAkcNAEGSASEQDNICCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGhz4CAAGotAABHDboBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGSASEQDNICCyAAQQA2AgAgEEEBaiEBQR4hEAy3AQsCQCABIgQgAkcNAEGTASEQDNECCyAELQAAQcwARw24ASAEQQFqIQFBCiEQDLYBCwJAIAQgAkcNAEGUASEQDNACCwJAAkAgBC0AAEG/f2oODwC5AbkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AQG5AQsgBEEBaiEBQf4AIRAMtwILIARBAWohAUH/ACEQDLYCCwJAIAQgAkcNAEGVASEQDM8CCwJAAkAgBC0AAEG/f2oOAwC4AQG4AQsgBEEBaiEBQf0AIRAMtgILIARBAWohBEGAASEQDLUCCwJAIAQgAkcNAEGWASEQDM4CCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUGnz4CAAGotAABHDbYBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGWASEQDM4CCyAAQQA2AgAgEEEBaiEBQQshEAyzAQsCQCAEIAJHDQBBlwEhEAzNAgsCQAJAAkACQCAELQAAQVNqDiMAuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AQG4AbgBuAG4AbgBArgBuAG4AQO4AQsgBEEBaiEBQfsAIRAMtgILIARBAWohAUH8ACEQDLUCCyAEQQFqIQRBgQEhEAy0AgsgBEEBaiEEQYIBIRAMswILAkAgBCACRw0AQZgBIRAMzAILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQanPgIAAai0AAEcNtAEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZgBIRAMzAILIABBADYCACAQQQFqIQFBGSEQDLEBCwJAIAQgAkcNAEGZASEQDMsCCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUGuz4CAAGotAABHDbMBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGZASEQDMsCCyAAQQA2AgAgEEEBaiEBQQYhEAywAQsCQCAEIAJHDQBBmgEhEAzKAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBtM+AgABqLQAARw2yASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmgEhEAzKAgsgAEEANgIAIBBBAWohAUEcIRAMrwELAkAgBCACRw0AQZsBIRAMyQILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbbPgIAAai0AAEcNsQEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZsBIRAMyQILIABBADYCACAQQQFqIQFBJyEQDK4BCwJAIAQgAkcNAEGcASEQDMgCCwJAAkAgBC0AAEGsf2oOAgABsQELIARBAWohBEGGASEQDK8CCyAEQQFqIQRBhwEhEAyuAgsCQCAEIAJHDQBBnQEhEAzHAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBuM+AgABqLQAARw2vASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBnQEhEAzHAgsgAEEANgIAIBBBAWohAUEmIRAMrAELAkAgBCACRw0AQZ4BIRAMxgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQbrPgIAAai0AAEcNrgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ4BIRAMxgILIABBADYCACAQQQFqIQFBAyEQDKsBCwJAIAQgAkcNAEGfASEQDMUCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDa0BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGfASEQDMUCCyAAQQA2AgAgEEEBaiEBQQwhEAyqAQsCQCAEIAJHDQBBoAEhEAzEAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBvM+AgABqLQAARw2sASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBoAEhEAzEAgsgAEEANgIAIBBBAWohAUENIRAMqQELAkAgBCACRw0AQaEBIRAMwwILAkACQCAELQAAQbp/ag4LAKwBrAGsAawBrAGsAawBrAGsAQGsAQsgBEEBaiEEQYsBIRAMqgILIARBAWohBEGMASEQDKkCCwJAIAQgAkcNAEGiASEQDMICCyAELQAAQdAARw2pASAEQQFqIQQM6QELAkAgBCACRw0AQaMBIRAMwQILAkACQCAELQAAQbd/ag4HAaoBqgGqAaoBqgEAqgELIARBAWohBEGOASEQDKgCCyAEQQFqIQFBIiEQDKYBCwJAIAQgAkcNAEGkASEQDMACCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHAz4CAAGotAABHDagBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGkASEQDMACCyAAQQA2AgAgEEEBaiEBQR0hEAylAQsCQCAEIAJHDQBBpQEhEAy/AgsCQAJAIAQtAABBrn9qDgMAqAEBqAELIARBAWohBEGQASEQDKYCCyAEQQFqIQFBBCEQDKQBCwJAIAQgAkcNAEGmASEQDL4CCwJAAkACQAJAAkAgBC0AAEG/f2oOFQCqAaoBqgGqAaoBqgGqAaoBqgGqAQGqAaoBAqoBqgEDqgGqAQSqAQsgBEEBaiEEQYgBIRAMqAILIARBAWohBEGJASEQDKcCCyAEQQFqIQRBigEhEAymAgsgBEEBaiEEQY8BIRAMpQILIARBAWohBEGRASEQDKQCCwJAIAQgAkcNAEGnASEQDL0CCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDaUBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGnASEQDL0CCyAAQQA2AgAgEEEBaiEBQREhEAyiAQsCQCAEIAJHDQBBqAEhEAy8AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBws+AgABqLQAARw2kASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqAEhEAy8AgsgAEEANgIAIBBBAWohAUEsIRAMoQELAkAgBCACRw0AQakBIRAMuwILIAIgBGsgACgCACIBaiEUIAQgAWtBBGohEAJAA0AgBC0AACABQcXPgIAAai0AAEcNowEgAUEERg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQakBIRAMuwILIABBADYCACAQQQFqIQFBKyEQDKABCwJAIAQgAkcNAEGqASEQDLoCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHKz4CAAGotAABHDaIBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGqASEQDLoCCyAAQQA2AgAgEEEBaiEBQRQhEAyfAQsCQCAEIAJHDQBBqwEhEAy5AgsCQAJAAkACQCAELQAAQb5/ag4PAAECpAGkAaQBpAGkAaQBpAGkAaQBpAGkAQOkAQsgBEEBaiEEQZMBIRAMogILIARBAWohBEGUASEQDKECCyAEQQFqIQRBlQEhEAygAgsgBEEBaiEEQZYBIRAMnwILAkAgBCACRw0AQawBIRAMuAILIAQtAABBxQBHDZ8BIARBAWohBAzgAQsCQCAEIAJHDQBBrQEhEAy3AgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBzc+AgABqLQAARw2fASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrQEhEAy3AgsgAEEANgIAIBBBAWohAUEOIRAMnAELAkAgBCACRw0AQa4BIRAMtgILIAQtAABB0ABHDZ0BIARBAWohAUElIRAMmwELAkAgBCACRw0AQa8BIRAMtQILIAIgBGsgACgCACIBaiEUIAQgAWtBCGohEAJAA0AgBC0AACABQdDPgIAAai0AAEcNnQEgAUEIRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQa8BIRAMtQILIABBADYCACAQQQFqIQFBKiEQDJoBCwJAIAQgAkcNAEGwASEQDLQCCwJAAkAgBC0AAEGrf2oOCwCdAZ0BnQGdAZ0BnQGdAZ0BnQEBnQELIARBAWohBEGaASEQDJsCCyAEQQFqIQRBmwEhEAyaAgsCQCAEIAJHDQBBsQEhEAyzAgsCQAJAIAQtAABBv39qDhQAnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBAZwBCyAEQQFqIQRBmQEhEAyaAgsgBEEBaiEEQZwBIRAMmQILAkAgBCACRw0AQbIBIRAMsgILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQdnPgIAAai0AAEcNmgEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbIBIRAMsgILIABBADYCACAQQQFqIQFBISEQDJcBCwJAIAQgAkcNAEGzASEQDLECCyACIARrIAAoAgAiAWohFCAEIAFrQQZqIRACQANAIAQtAAAgAUHdz4CAAGotAABHDZkBIAFBBkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGzASEQDLECCyAAQQA2AgAgEEEBaiEBQRohEAyWAQsCQCAEIAJHDQBBtAEhEAywAgsCQAJAAkAgBC0AAEG7f2oOEQCaAZoBmgGaAZoBmgGaAZoBmgEBmgGaAZoBmgGaAQKaAQsgBEEBaiEEQZ0BIRAMmAILIARBAWohBEGeASEQDJcCCyAEQQFqIQRBnwEhEAyWAgsCQCAEIAJHDQBBtQEhEAyvAgsgAiAEayAAKAIAIgFqIRQgBCABa0EFaiEQAkADQCAELQAAIAFB5M+AgABqLQAARw2XASABQQVGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtQEhEAyvAgsgAEEANgIAIBBBAWohAUEoIRAMlAELAkAgBCACRw0AQbYBIRAMrgILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQerPgIAAai0AAEcNlgEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbYBIRAMrgILIABBADYCACAQQQFqIQFBByEQDJMBCwJAIAQgAkcNAEG3ASEQDK0CCwJAAkAgBC0AAEG7f2oODgCWAZYBlgGWAZYBlgGWAZYBlgGWAZYBlgEBlgELIARBAWohBEGhASEQDJQCCyAEQQFqIQRBogEhEAyTAgsCQCAEIAJHDQBBuAEhEAysAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB7c+AgABqLQAARw2UASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuAEhEAysAgsgAEEANgIAIBBBAWohAUESIRAMkQELAkAgBCACRw0AQbkBIRAMqwILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfDPgIAAai0AAEcNkwEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbkBIRAMqwILIABBADYCACAQQQFqIQFBICEQDJABCwJAIAQgAkcNAEG6ASEQDKoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUHyz4CAAGotAABHDZIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG6ASEQDKoCCyAAQQA2AgAgEEEBaiEBQQ8hEAyPAQsCQCAEIAJHDQBBuwEhEAypAgsCQAJAIAQtAABBt39qDgcAkgGSAZIBkgGSAQGSAQsgBEEBaiEEQaUBIRAMkAILIARBAWohBEGmASEQDI8CCwJAIAQgAkcNAEG8ASEQDKgCCyACIARrIAAoAgAiAWohFCAEIAFrQQdqIRACQANAIAQtAAAgAUH0z4CAAGotAABHDZABIAFBB0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG8ASEQDKgCCyAAQQA2AgAgEEEBaiEBQRshEAyNAQsCQCAEIAJHDQBBvQEhEAynAgsCQAJAAkAgBC0AAEG+f2oOEgCRAZEBkQGRAZEBkQGRAZEBkQEBkQGRAZEBkQGRAZEBApEBCyAEQQFqIQRBpAEhEAyPAgsgBEEBaiEEQacBIRAMjgILIARBAWohBEGoASEQDI0CCwJAIAQgAkcNAEG+ASEQDKYCCyAELQAAQc4ARw2NASAEQQFqIQQMzwELAkAgBCACRw0AQb8BIRAMpQILAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkAgBC0AAEG/f2oOFQABAgOcAQQFBpwBnAGcAQcICQoLnAEMDQ4PnAELIARBAWohAUHoACEQDJoCCyAEQQFqIQFB6QAhEAyZAgsgBEEBaiEBQe4AIRAMmAILIARBAWohAUHyACEQDJcCCyAEQQFqIQFB8wAhEAyWAgsgBEEBaiEBQfYAIRAMlQILIARBAWohAUH3ACEQDJQCCyAEQQFqIQFB+gAhEAyTAgsgBEEBaiEEQYMBIRAMkgILIARBAWohBEGEASEQDJECCyAEQQFqIQRBhQEhEAyQAgsgBEEBaiEEQZIBIRAMjwILIARBAWohBEGYASEQDI4CCyAEQQFqIQRBoAEhEAyNAgsgBEEBaiEEQaMBIRAMjAILIARBAWohBEGqASEQDIsCCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEGrASEQDIsCC0HAASEQDKMCCyAAIAUgAhCqgICAACIBDYsBIAUhAQxcCwJAIAYgAkYNACAGQQFqIQUMjQELQcIBIRAMoQILA0ACQCAQLQAAQXZqDgSMAQAAjwEACyAQQQFqIhAgAkcNAAtBwwEhEAygAgsCQCAHIAJGDQAgAEGRgICAADYCCCAAIAc2AgQgByEBQQEhEAyHAgtBxAEhEAyfAgsCQCAHIAJHDQBBxQEhEAyfAgsCQAJAIActAABBdmoOBAHOAc4BAM4BCyAHQQFqIQYMjQELIAdBAWohBQyJAQsCQCAHIAJHDQBBxgEhEAyeAgsCQAJAIActAABBdmoOFwGPAY8BAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAQCPAQsgB0EBaiEHC0GwASEQDIQCCwJAIAggAkcNAEHIASEQDJ0CCyAILQAAQSBHDY0BIABBADsBMiAIQQFqIQFBswEhEAyDAgsgASEXAkADQCAXIgcgAkYNASAHLQAAQVBqQf8BcSIQQQpPDcwBAkAgAC8BMiIUQZkzSw0AIAAgFEEKbCIUOwEyIBBB//8DcyAUQf7/A3FJDQAgB0EBaiEXIAAgFCAQaiIQOwEyIBBB//8DcUHoB0kNAQsLQQAhECAAQQA2AhwgAEHBiYCAADYCECAAQQ02AgwgACAHQQFqNgIUDJwCC0HHASEQDJsCCyAAIAggAhCugICAACIQRQ3KASAQQRVHDYwBIABByAE2AhwgACAINgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAyaAgsCQCAJIAJHDQBBzAEhEAyaAgtBACEUQQEhF0EBIRZBACEQAkACQAJAAkACQAJAAkACQAJAIAktAABBUGoOCpYBlQEAAQIDBAUGCJcBC0ECIRAMBgtBAyEQDAULQQQhEAwEC0EFIRAMAwtBBiEQDAILQQchEAwBC0EIIRALQQAhF0EAIRZBACEUDI4BC0EJIRBBASEUQQAhF0EAIRYMjQELAkAgCiACRw0AQc4BIRAMmQILIAotAABBLkcNjgEgCkEBaiEJDMoBCyALIAJHDY4BQdABIRAMlwILAkAgCyACRg0AIABBjoCAgAA2AgggACALNgIEQbcBIRAM/gELQdEBIRAMlgILAkAgBCACRw0AQdIBIRAMlgILIAIgBGsgACgCACIQaiEUIAQgEGtBBGohCwNAIAQtAAAgEEH8z4CAAGotAABHDY4BIBBBBEYN6QEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB0gEhEAyVAgsgACAMIAIQrICAgAAiAQ2NASAMIQEMuAELAkAgBCACRw0AQdQBIRAMlAILIAIgBGsgACgCACIQaiEUIAQgEGtBAWohDANAIAQtAAAgEEGB0ICAAGotAABHDY8BIBBBAUYNjgEgEEEBaiEQIARBAWoiBCACRw0ACyAAIBQ2AgBB1AEhEAyTAgsCQCAEIAJHDQBB1gEhEAyTAgsgAiAEayAAKAIAIhBqIRQgBCAQa0ECaiELA0AgBC0AACAQQYPQgIAAai0AAEcNjgEgEEECRg2QASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHWASEQDJICCwJAIAQgAkcNAEHXASEQDJICCwJAAkAgBC0AAEG7f2oOEACPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAY8BCyAEQQFqIQRBuwEhEAz5AQsgBEEBaiEEQbwBIRAM+AELAkAgBCACRw0AQdgBIRAMkQILIAQtAABByABHDYwBIARBAWohBAzEAQsCQCAEIAJGDQAgAEGQgICAADYCCCAAIAQ2AgRBvgEhEAz3AQtB2QEhEAyPAgsCQCAEIAJHDQBB2gEhEAyPAgsgBC0AAEHIAEYNwwEgAEEBOgAoDLkBCyAAQQI6AC8gACAEIAIQpoCAgAAiEA2NAUHCASEQDPQBCyAALQAoQX9qDgK3AbkBuAELA0ACQCAELQAAQXZqDgQAjgGOAQCOAQsgBEEBaiIEIAJHDQALQd0BIRAMiwILIABBADoALyAALQAtQQRxRQ2EAgsgAEEAOgAvIABBAToANCABIQEMjAELIBBBFUYN2gEgAEEANgIcIAAgATYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMiAILAkAgACAQIAIQtICAgAAiBA0AIBAhAQyBAgsCQCAEQRVHDQAgAEEDNgIcIAAgEDYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMiAILIABBADYCHCAAIBA2AhQgAEGnjoCAADYCECAAQRI2AgxBACEQDIcCCyAQQRVGDdYBIABBADYCHCAAIAE2AhQgAEHajYCAADYCECAAQRQ2AgxBACEQDIYCCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNjQEgAEEHNgIcIAAgEDYCFCAAIBQ2AgxBACEQDIUCCyAAIAAvATBBgAFyOwEwIAEhAQtBKiEQDOoBCyAQQRVGDdEBIABBADYCHCAAIAE2AhQgAEGDjICAADYCECAAQRM2AgxBACEQDIICCyAQQRVGDc8BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDIECCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyNAQsgAEEMNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDIACCyAQQRVGDcwBIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDP8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyMAQsgAEENNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDP4BCyAQQRVGDckBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDP0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyLAQsgAEEONgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPwBCyAAQQA2AhwgACABNgIUIABBwJWAgAA2AhAgAEECNgIMQQAhEAz7AQsgEEEVRg3FASAAQQA2AhwgACABNgIUIABBxoyAgAA2AhAgAEEjNgIMQQAhEAz6AQsgAEEQNgIcIAAgATYCFCAAIBA2AgxBACEQDPkBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQzxAQsgAEERNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPgBCyAQQRVGDcEBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPcBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQuYCAgAAiEA0AIAFBAWohAQyIAQsgAEETNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPYBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQuYCAgAAiBA0AIAFBAWohAQztAQsgAEEUNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPUBCyAQQRVGDb0BIABBADYCHCAAIAE2AhQgAEGaj4CAADYCECAAQSI2AgxBACEQDPQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQt4CAgAAiEA0AIAFBAWohAQyGAQsgAEEWNgIcIAAgEDYCDCAAIAFBAWo2AhRBACEQDPMBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQt4CAgAAiBA0AIAFBAWohAQzpAQsgAEEXNgIcIAAgBDYCDCAAIAFBAWo2AhRBACEQDPIBCyAAQQA2AhwgACABNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzxAQtCASERCyAQQQFqIQECQCAAKQMgIhJC//////////8PVg0AIAAgEkIEhiARhDcDICABIQEMhAELIABBADYCHCAAIAE2AhQgAEGtiYCAADYCECAAQQw2AgxBACEQDO8BCyAAQQA2AhwgACAQNgIUIABBzZOAgAA2AhAgAEEMNgIMQQAhEAzuAQsgACgCBCEXIABBADYCBCAQIBGnaiIWIQEgACAXIBAgFiAUGyIQELWAgIAAIhRFDXMgAEEFNgIcIAAgEDYCFCAAIBQ2AgxBACEQDO0BCyAAQQA2AhwgACAQNgIUIABBqpyAgAA2AhAgAEEPNgIMQQAhEAzsAQsgACAQIAIQtICAgAAiAQ0BIBAhAQtBDiEQDNEBCwJAIAFBFUcNACAAQQI2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAzqAQsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAM6QELIAFBAWohEAJAIAAvATAiAUGAAXFFDQACQCAAIBAgAhC7gICAACIBDQAgECEBDHALIAFBFUcNugEgAEEFNgIcIAAgEDYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAM6QELAkAgAUGgBHFBoARHDQAgAC0ALUECcQ0AIABBADYCHCAAIBA2AhQgAEGWk4CAADYCECAAQQQ2AgxBACEQDOkBCyAAIBAgAhC9gICAABogECEBAkACQAJAAkACQCAAIBAgAhCzgICAAA4WAgEABAQEBAQEBAQEBAQEBAQEBAQEAwQLIABBAToALgsgACAALwEwQcAAcjsBMCAQIQELQSYhEAzRAQsgAEEjNgIcIAAgEDYCFCAAQaWWgIAANgIQIABBFTYCDEEAIRAM6QELIABBADYCHCAAIBA2AhQgAEHVi4CAADYCECAAQRE2AgxBACEQDOgBCyAALQAtQQFxRQ0BQcMBIRAMzgELAkAgDSACRg0AA0ACQCANLQAAQSBGDQAgDSEBDMQBCyANQQFqIg0gAkcNAAtBJSEQDOcBC0ElIRAM5gELIAAoAgQhBCAAQQA2AgQgACAEIA0Qr4CAgAAiBEUNrQEgAEEmNgIcIAAgBDYCDCAAIA1BAWo2AhRBACEQDOUBCyAQQRVGDasBIABBADYCHCAAIAE2AhQgAEH9jYCAADYCECAAQR02AgxBACEQDOQBCyAAQSc2AhwgACABNgIUIAAgEDYCDEEAIRAM4wELIBAhAUEBIRQCQAJAAkACQAJAAkACQCAALQAsQX5qDgcGBQUDAQIABQsgACAALwEwQQhyOwEwDAMLQQIhFAwBC0EEIRQLIABBAToALCAAIAAvATAgFHI7ATALIBAhAQtBKyEQDMoBCyAAQQA2AhwgACAQNgIUIABBq5KAgAA2AhAgAEELNgIMQQAhEAziAQsgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDEEAIRAM4QELIABBADoALCAQIQEMvQELIBAhAUEBIRQCQAJAAkACQAJAIAAtACxBe2oOBAMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0EpIRAMxQELIABBADYCHCAAIAE2AhQgAEHwlICAADYCECAAQQM2AgxBACEQDN0BCwJAIA4tAABBDUcNACAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA5BAWohAQx1CyAAQSw2AhwgACABNgIMIAAgDkEBajYCFEEAIRAM3QELIAAtAC1BAXFFDQFBxAEhEAzDAQsCQCAOIAJHDQBBLSEQDNwBCwJAAkADQAJAIA4tAABBdmoOBAIAAAMACyAOQQFqIg4gAkcNAAtBLSEQDN0BCyAAKAIEIQEgAEEANgIEAkAgACABIA4QsYCAgAAiAQ0AIA4hAQx0CyAAQSw2AhwgACAONgIUIAAgATYCDEEAIRAM3AELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHMLIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzbAQsgACgCBCEEIABBADYCBCAAIAQgDhCxgICAACIEDaABIA4hAQzOAQsgEEEsRw0BIAFBAWohEEEBIQECQAJAAkACQAJAIAAtACxBe2oOBAMBAgQACyAQIQEMBAtBAiEBDAELQQQhAQsgAEEBOgAsIAAgAC8BMCABcjsBMCAQIQEMAQsgACAALwEwQQhyOwEwIBAhAQtBOSEQDL8BCyAAQQA6ACwgASEBC0E0IRAMvQELIAAgAC8BMEEgcjsBMCABIQEMAgsgACgCBCEEIABBADYCBAJAIAAgBCABELGAgIAAIgQNACABIQEMxwELIABBNzYCHCAAIAE2AhQgACAENgIMQQAhEAzUAQsgAEEIOgAsIAEhAQtBMCEQDLkBCwJAIAAtAChBAUYNACABIQEMBAsgAC0ALUEIcUUNkwEgASEBDAMLIAAtADBBIHENlAFBxQEhEAy3AQsCQCAPIAJGDQACQANAAkAgDy0AAEFQaiIBQf8BcUEKSQ0AIA8hAUE1IRAMugELIAApAyAiEUKZs+bMmbPmzBlWDQEgACARQgp+IhE3AyAgESABrUL/AYMiEkJ/hVYNASAAIBEgEnw3AyAgD0EBaiIPIAJHDQALQTkhEAzRAQsgACgCBCECIABBADYCBCAAIAIgD0EBaiIEELGAgIAAIgINlQEgBCEBDMMBC0E5IRAMzwELAkAgAC8BMCIBQQhxRQ0AIAAtAChBAUcNACAALQAtQQhxRQ2QAQsgACABQff7A3FBgARyOwEwIA8hAQtBNyEQDLQBCyAAIAAvATBBEHI7ATAMqwELIBBBFUYNiwEgAEEANgIcIAAgATYCFCAAQfCOgIAANgIQIABBHDYCDEEAIRAMywELIABBwwA2AhwgACABNgIMIAAgDUEBajYCFEEAIRAMygELAkAgAS0AAEE6Rw0AIAAoAgQhECAAQQA2AgQCQCAAIBAgARCvgICAACIQDQAgAUEBaiEBDGMLIABBwwA2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMygELIABBADYCHCAAIAE2AhQgAEGxkYCAADYCECAAQQo2AgxBACEQDMkBCyAAQQA2AhwgACABNgIUIABBoJmAgAA2AhAgAEEeNgIMQQAhEAzIAQsgAEEANgIACyAAQYASOwEqIAAgF0EBaiIBIAIQqICAgAAiEA0BIAEhAQtBxwAhEAysAQsgEEEVRw2DASAAQdEANgIcIAAgATYCFCAAQeOXgIAANgIQIABBFTYCDEEAIRAMxAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDF4LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMwwELIABBADYCHCAAIBQ2AhQgAEHBqICAADYCECAAQQc2AgwgAEEANgIAQQAhEAzCAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAzBAQtBACEQIABBADYCHCAAIAE2AhQgAEGAkYCAADYCECAAQQk2AgwMwAELIBBBFUYNfSAAQQA2AhwgACABNgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAy/AQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgAUEBaiEBAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBAJAIAAgECABEK2AgIAAIhANACABIQEMXAsgAEHYADYCHCAAIAE2AhQgACAQNgIMQQAhEAy+AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMrQELIABB2QA2AhwgACABNgIUIAAgBDYCDEEAIRAMvQELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKsBCyAAQdoANgIcIAAgATYCFCAAIAQ2AgxBACEQDLwBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQypAQsgAEHcADYCHCAAIAE2AhQgACAENgIMQQAhEAy7AQsCQCABLQAAQVBqIhBB/wFxQQpPDQAgACAQOgAqIAFBAWohAUHPACEQDKIBCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQynAQsgAEHeADYCHCAAIAE2AhQgACAENgIMQQAhEAy6AQsgAEEANgIAIBdBAWohAQJAIAAtAClBI08NACABIQEMWQsgAEEANgIcIAAgATYCFCAAQdOJgIAANgIQIABBCDYCDEEAIRAMuQELIABBADYCAAtBACEQIABBADYCHCAAIAE2AhQgAEGQs4CAADYCECAAQQg2AgwMtwELIABBADYCACAXQQFqIQECQCAALQApQSFHDQAgASEBDFYLIABBADYCHCAAIAE2AhQgAEGbioCAADYCECAAQQg2AgxBACEQDLYBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKSIQQV1qQQtPDQAgASEBDFULAkAgEEEGSw0AQQEgEHRBygBxRQ0AIAEhAQxVC0EAIRAgAEEANgIcIAAgATYCFCAAQfeJgIAANgIQIABBCDYCDAy1AQsgEEEVRg1xIABBADYCHCAAIAE2AhQgAEG5jYCAADYCECAAQRo2AgxBACEQDLQBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxUCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLMBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDLIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDLEBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxRCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDLABCyAAQQA2AhwgACABNgIUIABBxoqAgAA2AhAgAEEHNgIMQQAhEAyvAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAyuAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMSQsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAytAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMTQsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAysAQsgAEEANgIcIAAgATYCFCAAQdyIgIAANgIQIABBBzYCDEEAIRAMqwELIBBBP0cNASABQQFqIQELQQUhEAyQAQtBACEQIABBADYCHCAAIAE2AhQgAEH9koCAADYCECAAQQc2AgwMqAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMpwELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEILIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMpgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDEYLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMpQELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0gA2AhwgACAUNgIUIAAgATYCDEEAIRAMpAELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDD8LIABB0wA2AhwgACAUNgIUIAAgATYCDEEAIRAMowELIAAoAgQhASAAQQA2AgQCQCAAIAEgFBCngICAACIBDQAgFCEBDEMLIABB5QA2AhwgACAUNgIUIAAgATYCDEEAIRAMogELIABBADYCHCAAIBQ2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKEBCyAAQQA2AhwgACABNgIUIABBw4+AgAA2AhAgAEEHNgIMQQAhEAygAQtBACEQIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgwMnwELIABBADYCHCAAIBQ2AhQgAEGMnICAADYCECAAQQc2AgxBACEQDJ4BCyAAQQA2AhwgACAUNgIUIABB/pGAgAA2AhAgAEEHNgIMQQAhEAydAQsgAEEANgIcIAAgATYCFCAAQY6bgIAANgIQIABBBjYCDEEAIRAMnAELIBBBFUYNVyAAQQA2AhwgACABNgIUIABBzI6AgAA2AhAgAEEgNgIMQQAhEAybAQsgAEEANgIAIBBBAWohAUEkIRALIAAgEDoAKSAAKAIEIRAgAEEANgIEIAAgECABEKuAgIAAIhANVCABIQEMPgsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQfGbgIAANgIQIABBBjYCDAyXAQsgAUEVRg1QIABBADYCHCAAIAU2AhQgAEHwjICAADYCECAAQRs2AgxBACEQDJYBCyAAKAIEIQUgAEEANgIEIAAgBSAQEKmAgIAAIgUNASAQQQFqIQULQa0BIRAMewsgAEHBATYCHCAAIAU2AgwgACAQQQFqNgIUQQAhEAyTAQsgACgCBCEGIABBADYCBCAAIAYgEBCpgICAACIGDQEgEEEBaiEGC0GuASEQDHgLIABBwgE2AhwgACAGNgIMIAAgEEEBajYCFEEAIRAMkAELIABBADYCHCAAIAc2AhQgAEGXi4CAADYCECAAQQ02AgxBACEQDI8BCyAAQQA2AhwgACAINgIUIABB45CAgAA2AhAgAEEJNgIMQQAhEAyOAQsgAEEANgIcIAAgCDYCFCAAQZSNgIAANgIQIABBITYCDEEAIRAMjQELQQEhFkEAIRdBACEUQQEhEAsgACAQOgArIAlBAWohCAJAAkAgAC0ALUEQcQ0AAkACQAJAIAAtACoOAwEAAgQLIBZFDQMMAgsgFA0BDAILIBdFDQELIAAoAgQhECAAQQA2AgQgACAQIAgQrYCAgAAiEEUNPSAAQckBNgIcIAAgCDYCFCAAIBA2AgxBACEQDIwBCyAAKAIEIQQgAEEANgIEIAAgBCAIEK2AgIAAIgRFDXYgAEHKATYCHCAAIAg2AhQgACAENgIMQQAhEAyLAQsgACgCBCEEIABBADYCBCAAIAQgCRCtgICAACIERQ10IABBywE2AhwgACAJNgIUIAAgBDYCDEEAIRAMigELIAAoAgQhBCAAQQA2AgQgACAEIAoQrYCAgAAiBEUNciAAQc0BNgIcIAAgCjYCFCAAIAQ2AgxBACEQDIkBCwJAIAstAABBUGoiEEH/AXFBCk8NACAAIBA6ACogC0EBaiEKQbYBIRAMcAsgACgCBCEEIABBADYCBCAAIAQgCxCtgICAACIERQ1wIABBzwE2AhwgACALNgIUIAAgBDYCDEEAIRAMiAELIABBADYCHCAAIAQ2AhQgAEGQs4CAADYCECAAQQg2AgwgAEEANgIAQQAhEAyHAQsgAUEVRg0/IABBADYCHCAAIAw2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDIYBCyAAQYEEOwEoIAAoAgQhECAAQgA3AwAgACAQIAxBAWoiDBCrgICAACIQRQ04IABB0wE2AhwgACAMNgIUIAAgEDYCDEEAIRAMhQELIABBADYCAAtBACEQIABBADYCHCAAIAQ2AhQgAEHYm4CAADYCECAAQQg2AgwMgwELIAAoAgQhECAAQgA3AwAgACAQIAtBAWoiCxCrgICAACIQDQFBxgEhEAxpCyAAQQI6ACgMVQsgAEHVATYCHCAAIAs2AhQgACAQNgIMQQAhEAyAAQsgEEEVRg03IABBADYCHCAAIAQ2AhQgAEGkjICAADYCECAAQRA2AgxBACEQDH8LIAAtADRBAUcNNCAAIAQgAhC8gICAACIQRQ00IBBBFUcNNSAAQdwBNgIcIAAgBDYCFCAAQdWWgIAANgIQIABBFTYCDEEAIRAMfgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQMfQtBACEQDGMLQQIhEAxiC0ENIRAMYQtBDyEQDGALQSUhEAxfC0ETIRAMXgtBFSEQDF0LQRYhEAxcC0EXIRAMWwtBGCEQDFoLQRkhEAxZC0EaIRAMWAtBGyEQDFcLQRwhEAxWC0EdIRAMVQtBHyEQDFQLQSEhEAxTC0EjIRAMUgtBxgAhEAxRC0EuIRAMUAtBLyEQDE8LQTshEAxOC0E9IRAMTQtByAAhEAxMC0HJACEQDEsLQcsAIRAMSgtBzAAhEAxJC0HOACEQDEgLQdEAIRAMRwtB1QAhEAxGC0HYACEQDEULQdkAIRAMRAtB2wAhEAxDC0HkACEQDEILQeUAIRAMQQtB8QAhEAxAC0H0ACEQDD8LQY0BIRAMPgtBlwEhEAw9C0GpASEQDDwLQawBIRAMOwtBwAEhEAw6C0G5ASEQDDkLQa8BIRAMOAtBsQEhEAw3C0GyASEQDDYLQbQBIRAMNQtBtQEhEAw0C0G6ASEQDDMLQb0BIRAMMgtBvwEhEAwxC0HBASEQDDALIABBADYCHCAAIAQ2AhQgAEHpi4CAADYCECAAQR82AgxBACEQDEgLIABB2wE2AhwgACAENgIUIABB+paAgAA2AhAgAEEVNgIMQQAhEAxHCyAAQfgANgIcIAAgDDYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMRgsgAEHRADYCHCAAIAU2AhQgAEGwl4CAADYCECAAQRU2AgxBACEQDEULIABB+QA2AhwgACABNgIUIAAgEDYCDEEAIRAMRAsgAEH4ADYCHCAAIAE2AhQgAEHKmICAADYCECAAQRU2AgxBACEQDEMLIABB5AA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAxCCyAAQdcANgIcIAAgATYCFCAAQcmXgIAANgIQIABBFTYCDEEAIRAMQQsgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMQAsgAEHCADYCHCAAIAE2AhQgAEHjmICAADYCECAAQRU2AgxBACEQDD8LIABBADYCBCAAIA8gDxCxgICAACIERQ0BIABBOjYCHCAAIAQ2AgwgACAPQQFqNgIUQQAhEAw+CyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBEUNACAAQTs2AhwgACAENgIMIAAgAUEBajYCFEEAIRAMPgsgAUEBaiEBDC0LIA9BAWohAQwtCyAAQQA2AhwgACAPNgIUIABB5JKAgAA2AhAgAEEENgIMQQAhEAw7CyAAQTY2AhwgACAENgIUIAAgAjYCDEEAIRAMOgsgAEEuNgIcIAAgDjYCFCAAIAQ2AgxBACEQDDkLIABB0AA2AhwgACABNgIUIABBkZiAgAA2AhAgAEEVNgIMQQAhEAw4CyANQQFqIQEMLAsgAEEVNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMNgsgAEEbNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNQsgAEEPNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMNAsgAEELNgIcIAAgATYCFCAAQZGXgIAANgIQIABBFTYCDEEAIRAMMwsgAEEaNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMgsgAEELNgIcIAAgATYCFCAAQYKZgIAANgIQIABBFTYCDEEAIRAMMQsgAEEKNgIcIAAgATYCFCAAQeSWgIAANgIQIABBFTYCDEEAIRAMMAsgAEEeNgIcIAAgATYCFCAAQfmXgIAANgIQIABBFTYCDEEAIRAMLwsgAEEANgIcIAAgEDYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMLgsgAEEENgIcIAAgATYCFCAAQbCYgIAANgIQIABBFTYCDEEAIRAMLQsgAEEANgIAIAtBAWohCwtBuAEhEAwSCyAAQQA2AgAgEEEBaiEBQfUAIRAMEQsgASEBAkAgAC0AKUEFRw0AQeMAIRAMEQtB4gAhEAwQC0EAIRAgAEEANgIcIABB5JGAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAwoCyAAQQA2AgAgF0EBaiEBQcAAIRAMDgtBASEBCyAAIAE6ACwgAEEANgIAIBdBAWohAQtBKCEQDAsLIAEhAQtBOCEQDAkLAkAgASIPIAJGDQADQAJAIA8tAABBgL6AgABqLQAAIgFBAUYNACABQQJHDQMgD0EBaiEBDAQLIA9BAWoiDyACRw0AC0E+IRAMIgtBPiEQDCELIABBADoALCAPIQEMAQtBCyEQDAYLQTohEAwFCyABQQFqIQFBLSEQDAQLIAAgAToALCAAQQA2AgAgFkEBaiEBQQwhEAwDCyAAQQA2AgAgF0EBaiEBQQohEAwCCyAAQQA2AgALIABBADoALCANIQFBCSEQDAALC0EAIRAgAEEANgIcIAAgCzYCFCAAQc2QgIAANgIQIABBCTYCDAwXC0EAIRAgAEEANgIcIAAgCjYCFCAAQemKgIAANgIQIABBCTYCDAwWC0EAIRAgAEEANgIcIAAgCTYCFCAAQbeQgIAANgIQIABBCTYCDAwVC0EAIRAgAEEANgIcIAAgCDYCFCAAQZyRgIAANgIQIABBCTYCDAwUC0EAIRAgAEEANgIcIAAgATYCFCAAQc2QgIAANgIQIABBCTYCDAwTC0EAIRAgAEEANgIcIAAgATYCFCAAQemKgIAANgIQIABBCTYCDAwSC0EAIRAgAEEANgIcIAAgATYCFCAAQbeQgIAANgIQIABBCTYCDAwRC0EAIRAgAEEANgIcIAAgATYCFCAAQZyRgIAANgIQIABBCTYCDAwQC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwPC0EAIRAgAEEANgIcIAAgATYCFCAAQZeVgIAANgIQIABBDzYCDAwOC0EAIRAgAEEANgIcIAAgATYCFCAAQcCSgIAANgIQIABBCzYCDAwNC0EAIRAgAEEANgIcIAAgATYCFCAAQZWJgIAANgIQIABBCzYCDAwMC0EAIRAgAEEANgIcIAAgATYCFCAAQeGPgIAANgIQIABBCjYCDAwLC0EAIRAgAEEANgIcIAAgATYCFCAAQfuPgIAANgIQIABBCjYCDAwKC0EAIRAgAEEANgIcIAAgATYCFCAAQfGZgIAANgIQIABBAjYCDAwJC0EAIRAgAEEANgIcIAAgATYCFCAAQcSUgIAANgIQIABBAjYCDAwIC0EAIRAgAEEANgIcIAAgATYCFCAAQfKVgIAANgIQIABBAjYCDAwHCyAAQQI2AhwgACABNgIUIABBnJqAgAA2AhAgAEEWNgIMQQAhEAwGC0EBIRAMBQtB1AAhECABIgQgAkYNBCADQQhqIAAgBCACQdjCgIAAQQoQxYCAgAAgAygCDCEEIAMoAggOAwEEAgALEMqAgIAAAAsgAEEANgIcIABBtZqAgAA2AhAgAEEXNgIMIAAgBEEBajYCFEEAIRAMAgsgAEEANgIcIAAgBDYCFCAAQcqagIAANgIQIABBCTYCDEEAIRAMAQsCQCABIgQgAkcNAEEiIRAMAQsgAEGJgICAADYCCCAAIAQ2AgRBISEQCyADQRBqJICAgIAAIBALrwEBAn8gASgCACEGAkACQCACIANGDQAgBCAGaiEEIAYgA2ogAmshByACIAZBf3MgBWoiBmohBQNAAkAgAi0AACAELQAARg0AQQIhBAwDCwJAIAYNAEEAIQQgBSECDAMLIAZBf2ohBiAEQQFqIQQgAkEBaiICIANHDQALIAchBiADIQILIABBATYCACABIAY2AgAgACACNgIEDwsgAUEANgIAIAAgBDYCACAAIAI2AgQLCgAgABDHgICAAAvyNgELfyOAgICAAEEQayIBJICAgIAAAkBBACgCoNCAgAANAEEAEMuAgIAAQYDUhIAAayICQdkASQ0AQQAhAwJAQQAoAuDTgIAAIgQNAEEAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEIakFwcUHYqtWqBXMiBDYC4NOAgABBAEEANgL004CAAEEAQQA2AsTTgIAAC0EAIAI2AszTgIAAQQBBgNSEgAA2AsjTgIAAQQBBgNSEgAA2ApjQgIAAQQAgBDYCrNCAgABBAEF/NgKo0ICAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALQYDUhIAAQXhBgNSEgABrQQ9xQQBBgNSEgABBCGpBD3EbIgNqIgRBBGogAkFIaiIFIANrIgNBAXI2AgBBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAQYDUhIAAIAVqQTg2AgQLAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABB7AFLDQACQEEAKAKI0ICAACIGQRAgAEETakFwcSAAQQtJGyICQQN2IgR2IgNBA3FFDQACQAJAIANBAXEgBHJBAXMiBUEDdCIEQbDQgIAAaiIDIARBuNCAgABqKAIAIgQoAggiAkcNAEEAIAZBfiAFd3E2AojQgIAADAELIAMgAjYCCCACIAM2AgwLIARBCGohAyAEIAVBA3QiBUEDcjYCBCAEIAVqIgQgBCgCBEEBcjYCBAwMCyACQQAoApDQgIAAIgdNDQECQCADRQ0AAkACQCADIAR0QQIgBHQiA0EAIANrcnEiA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqIgRBA3QiA0Gw0ICAAGoiBSADQbjQgIAAaigCACIDKAIIIgBHDQBBACAGQX4gBHdxIgY2AojQgIAADAELIAUgADYCCCAAIAU2AgwLIAMgAkEDcjYCBCADIARBA3QiBGogBCACayIFNgIAIAMgAmoiACAFQQFyNgIEAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQQCQAJAIAZBASAHQQN2dCIIcQ0AQQAgBiAIcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCAENgIMIAIgBDYCCCAEIAI2AgwgBCAINgIICyADQQhqIQNBACAANgKc0ICAAEEAIAU2ApDQgIAADAwLQQAoAozQgIAAIglFDQEgCUEAIAlrcUF/aiIDIANBDHZBEHEiA3YiBEEFdkEIcSIFIANyIAQgBXYiA0ECdkEEcSIEciADIAR2IgNBAXZBAnEiBHIgAyAEdiIDQQF2QQFxIgRyIAMgBHZqQQJ0QbjSgIAAaigCACIAKAIEQXhxIAJrIQQgACEFAkADQAJAIAUoAhAiAw0AIAVBFGooAgAiA0UNAgsgAygCBEF4cSACayIFIAQgBSAESSIFGyEEIAMgACAFGyEAIAMhBQwACwsgACgCGCEKAkAgACgCDCIIIABGDQAgACgCCCIDQQAoApjQgIAASRogCCADNgIIIAMgCDYCDAwLCwJAIABBFGoiBSgCACIDDQAgACgCECIDRQ0DIABBEGohBQsDQCAFIQsgAyIIQRRqIgUoAgAiAw0AIAhBEGohBSAIKAIQIgMNAAsgC0EANgIADAoLQX8hAiAAQb9/Sw0AIABBE2oiA0FwcSECQQAoAozQgIAAIgdFDQBBACELAkAgAkGAAkkNAEEfIQsgAkH///8HSw0AIANBCHYiAyADQYD+P2pBEHZBCHEiA3QiBCAEQYDgH2pBEHZBBHEiBHQiBSAFQYCAD2pBEHZBAnEiBXRBD3YgAyAEciAFcmsiA0EBdCACIANBFWp2QQFxckEcaiELC0EAIAJrIQQCQAJAAkACQCALQQJ0QbjSgIAAaigCACIFDQBBACEDQQAhCAwBC0EAIQMgAkEAQRkgC0EBdmsgC0EfRht0IQBBACEIA0ACQCAFKAIEQXhxIAJrIgYgBE8NACAGIQQgBSEIIAYNAEEAIQQgBSEIIAUhAwwDCyADIAVBFGooAgAiBiAGIAUgAEEddkEEcWpBEGooAgAiBUYbIAMgBhshAyAAQQF0IQAgBQ0ACwsCQCADIAhyDQBBACEIQQIgC3QiA0EAIANrciAHcSIDRQ0DIANBACADa3FBf2oiAyADQQx2QRBxIgN2IgVBBXZBCHEiACADciAFIAB2IgNBAnZBBHEiBXIgAyAFdiIDQQF2QQJxIgVyIAMgBXYiA0EBdkEBcSIFciADIAV2akECdEG40oCAAGooAgAhAwsgA0UNAQsDQCADKAIEQXhxIAJrIgYgBEkhAAJAIAMoAhAiBQ0AIANBFGooAgAhBQsgBiAEIAAbIQQgAyAIIAAbIQggBSEDIAUNAAsLIAhFDQAgBEEAKAKQ0ICAACACa08NACAIKAIYIQsCQCAIKAIMIgAgCEYNACAIKAIIIgNBACgCmNCAgABJGiAAIAM2AgggAyAANgIMDAkLAkAgCEEUaiIFKAIAIgMNACAIKAIQIgNFDQMgCEEQaiEFCwNAIAUhBiADIgBBFGoiBSgCACIDDQAgAEEQaiEFIAAoAhAiAw0ACyAGQQA2AgAMCAsCQEEAKAKQ0ICAACIDIAJJDQBBACgCnNCAgAAhBAJAAkAgAyACayIFQRBJDQAgBCACaiIAIAVBAXI2AgRBACAFNgKQ0ICAAEEAIAA2ApzQgIAAIAQgA2ogBTYCACAEIAJBA3I2AgQMAQsgBCADQQNyNgIEIAQgA2oiAyADKAIEQQFyNgIEQQBBADYCnNCAgABBAEEANgKQ0ICAAAsgBEEIaiEDDAoLAkBBACgClNCAgAAiACACTQ0AQQAoAqDQgIAAIgMgAmoiBCAAIAJrIgVBAXI2AgRBACAFNgKU0ICAAEEAIAQ2AqDQgIAAIAMgAkEDcjYCBCADQQhqIQMMCgsCQAJAQQAoAuDTgIAARQ0AQQAoAujTgIAAIQQMAQtBAEJ/NwLs04CAAEEAQoCAhICAgMAANwLk04CAAEEAIAFBDGpBcHFB2KrVqgVzNgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgABBgIAEIQQLQQAhAwJAIAQgAkHHAGoiB2oiBkEAIARrIgtxIgggAksNAEEAQTA2AvjTgIAADAoLAkBBACgCwNOAgAAiA0UNAAJAQQAoArjTgIAAIgQgCGoiBSAETQ0AIAUgA00NAQtBACEDQQBBMDYC+NOAgAAMCgtBAC0AxNOAgABBBHENBAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQAJAIAMoAgAiBSAESw0AIAUgAygCBGogBEsNAwsgAygCCCIDDQALC0EAEMuAgIAAIgBBf0YNBSAIIQYCQEEAKALk04CAACIDQX9qIgQgAHFFDQAgCCAAayAEIABqQQAgA2txaiEGCyAGIAJNDQUgBkH+////B0sNBQJAQQAoAsDTgIAAIgNFDQBBACgCuNOAgAAiBCAGaiIFIARNDQYgBSADSw0GCyAGEMuAgIAAIgMgAEcNAQwHCyAGIABrIAtxIgZB/v///wdLDQQgBhDLgICAACIAIAMoAgAgAygCBGpGDQMgACEDCwJAIANBf0YNACACQcgAaiAGTQ0AAkAgByAGa0EAKALo04CAACIEakEAIARrcSIEQf7///8HTQ0AIAMhAAwHCwJAIAQQy4CAgABBf0YNACAEIAZqIQYgAyEADAcLQQAgBmsQy4CAgAAaDAQLIAMhACADQX9HDQUMAwtBACEIDAcLQQAhAAwFCyAAQX9HDQILQQBBACgCxNOAgABBBHI2AsTTgIAACyAIQf7///8HSw0BIAgQy4CAgAAhAEEAEMuAgIAAIQMgAEF/Rg0BIANBf0YNASAAIANPDQEgAyAAayIGIAJBOGpNDQELQQBBACgCuNOAgAAgBmoiAzYCuNOAgAACQCADQQAoArzTgIAATQ0AQQAgAzYCvNOAgAALAkACQAJAAkBBACgCoNCAgAAiBEUNAEHI04CAACEDA0AgACADKAIAIgUgAygCBCIIakYNAiADKAIIIgMNAAwDCwsCQAJAQQAoApjQgIAAIgNFDQAgACADTw0BC0EAIAA2ApjQgIAAC0EAIQNBACAGNgLM04CAAEEAIAA2AsjTgIAAQQBBfzYCqNCAgABBAEEAKALg04CAADYCrNCAgABBAEEANgLU04CAAANAIANBxNCAgABqIANBuNCAgABqIgQ2AgAgBCADQbDQgIAAaiIFNgIAIANBvNCAgABqIAU2AgAgA0HM0ICAAGogA0HA0ICAAGoiBTYCACAFIAQ2AgAgA0HU0ICAAGogA0HI0ICAAGoiBDYCACAEIAU2AgAgA0HQ0ICAAGogBDYCACADQSBqIgNBgAJHDQALIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgQgBkFIaiIFIANrIgNBAXI2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAQ2AqDQgIAAIAAgBWpBODYCBAwCCyADLQAMQQhxDQAgBCAFSQ0AIAQgAE8NACAEQXggBGtBD3FBACAEQQhqQQ9xGyIFaiIAQQAoApTQgIAAIAZqIgsgBWsiBUEBcjYCBCADIAggBmo2AgRBAEEAKALw04CAADYCpNCAgABBACAFNgKU0ICAAEEAIAA2AqDQgIAAIAQgC2pBODYCBAwBCwJAIABBACgCmNCAgAAiCE8NAEEAIAA2ApjQgIAAIAAhCAsgACAGaiEFQcjTgIAAIQMCQAJAAkACQAJAAkACQANAIAMoAgAgBUYNASADKAIIIgMNAAwCCwsgAy0ADEEIcUUNAQtByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiIFIARLDQMLIAMoAgghAwwACwsgAyAANgIAIAMgAygCBCAGajYCBCAAQXggAGtBD3FBACAAQQhqQQ9xG2oiCyACQQNyNgIEIAVBeCAFa0EPcUEAIAVBCGpBD3EbaiIGIAsgAmoiAmshAwJAIAYgBEcNAEEAIAI2AqDQgIAAQQBBACgClNCAgAAgA2oiAzYClNCAgAAgAiADQQFyNgIEDAMLAkAgBkEAKAKc0ICAAEcNAEEAIAI2ApzQgIAAQQBBACgCkNCAgAAgA2oiAzYCkNCAgAAgAiADQQFyNgIEIAIgA2ogAzYCAAwDCwJAIAYoAgQiBEEDcUEBRw0AIARBeHEhBwJAAkAgBEH/AUsNACAGKAIIIgUgBEEDdiIIQQN0QbDQgIAAaiIARhoCQCAGKAIMIgQgBUcNAEEAQQAoAojQgIAAQX4gCHdxNgKI0ICAAAwCCyAEIABGGiAEIAU2AgggBSAENgIMDAELIAYoAhghCQJAAkAgBigCDCIAIAZGDQAgBigCCCIEIAhJGiAAIAQ2AgggBCAANgIMDAELAkAgBkEUaiIEKAIAIgUNACAGQRBqIgQoAgAiBQ0AQQAhAAwBCwNAIAQhCCAFIgBBFGoiBCgCACIFDQAgAEEQaiEEIAAoAhAiBQ0ACyAIQQA2AgALIAlFDQACQAJAIAYgBigCHCIFQQJ0QbjSgIAAaiIEKAIARw0AIAQgADYCACAADQFBAEEAKAKM0ICAAEF+IAV3cTYCjNCAgAAMAgsgCUEQQRQgCSgCECAGRhtqIAA2AgAgAEUNAQsgACAJNgIYAkAgBigCECIERQ0AIAAgBDYCECAEIAA2AhgLIAYoAhQiBEUNACAAQRRqIAQ2AgAgBCAANgIYCyAHIANqIQMgBiAHaiIGKAIEIQQLIAYgBEF+cTYCBCACIANqIAM2AgAgAiADQQFyNgIEAkAgA0H/AUsNACADQXhxQbDQgIAAaiEEAkACQEEAKAKI0ICAACIFQQEgA0EDdnQiA3ENAEEAIAUgA3I2AojQgIAAIAQhAwwBCyAEKAIIIQMLIAMgAjYCDCAEIAI2AgggAiAENgIMIAIgAzYCCAwDC0EfIQQCQCADQf///wdLDQAgA0EIdiIEIARBgP4/akEQdkEIcSIEdCIFIAVBgOAfakEQdkEEcSIFdCIAIABBgIAPakEQdkECcSIAdEEPdiAEIAVyIAByayIEQQF0IAMgBEEVanZBAXFyQRxqIQQLIAIgBDYCHCACQgA3AhAgBEECdEG40oCAAGohBQJAQQAoAozQgIAAIgBBASAEdCIIcQ0AIAUgAjYCAEEAIAAgCHI2AozQgIAAIAIgBTYCGCACIAI2AgggAiACNgIMDAMLIANBAEEZIARBAXZrIARBH0YbdCEEIAUoAgAhAANAIAAiBSgCBEF4cSADRg0CIARBHXYhACAEQQF0IQQgBSAAQQRxakEQaiIIKAIAIgANAAsgCCACNgIAIAIgBTYCGCACIAI2AgwgAiACNgIIDAILIABBeCAAa0EPcUEAIABBCGpBD3EbIgNqIgsgBkFIaiIIIANrIgNBAXI2AgQgACAIakE4NgIEIAQgBUE3IAVrQQ9xQQAgBUFJakEPcRtqQUFqIgggCCAEQRBqSRsiCEEjNgIEQQBBACgC8NOAgAA2AqTQgIAAQQAgAzYClNCAgABBACALNgKg0ICAACAIQRBqQQApAtDTgIAANwIAIAhBACkCyNOAgAA3AghBACAIQQhqNgLQ04CAAEEAIAY2AszTgIAAQQAgADYCyNOAgABBAEEANgLU04CAACAIQSRqIQMDQCADQQc2AgAgA0EEaiIDIAVJDQALIAggBEYNAyAIIAgoAgRBfnE2AgQgCCAIIARrIgA2AgAgBCAAQQFyNgIEAkAgAEH/AUsNACAAQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgAEEDdnQiAHENAEEAIAUgAHI2AojQgIAAIAMhBQwBCyADKAIIIQULIAUgBDYCDCADIAQ2AgggBCADNgIMIAQgBTYCCAwEC0EfIQMCQCAAQf///wdLDQAgAEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCIIIAhBgIAPakEQdkECcSIIdEEPdiADIAVyIAhyayIDQQF0IAAgA0EVanZBAXFyQRxqIQMLIAQgAzYCHCAEQgA3AhAgA0ECdEG40oCAAGohBQJAQQAoAozQgIAAIghBASADdCIGcQ0AIAUgBDYCAEEAIAggBnI2AozQgIAAIAQgBTYCGCAEIAQ2AgggBCAENgIMDAQLIABBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhCANAIAgiBSgCBEF4cSAARg0DIANBHXYhCCADQQF0IQMgBSAIQQRxakEQaiIGKAIAIggNAAsgBiAENgIAIAQgBTYCGCAEIAQ2AgwgBCAENgIIDAMLIAUoAggiAyACNgIMIAUgAjYCCCACQQA2AhggAiAFNgIMIAIgAzYCCAsgC0EIaiEDDAULIAUoAggiAyAENgIMIAUgBDYCCCAEQQA2AhggBCAFNgIMIAQgAzYCCAtBACgClNCAgAAiAyACTQ0AQQAoAqDQgIAAIgQgAmoiBSADIAJrIgNBAXI2AgRBACADNgKU0ICAAEEAIAU2AqDQgIAAIAQgAkEDcjYCBCAEQQhqIQMMAwtBACEDQQBBMDYC+NOAgAAMAgsCQCALRQ0AAkACQCAIIAgoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAA2AgAgAA0BQQAgB0F+IAV3cSIHNgKM0ICAAAwCCyALQRBBFCALKAIQIAhGG2ogADYCACAARQ0BCyAAIAs2AhgCQCAIKAIQIgNFDQAgACADNgIQIAMgADYCGAsgCEEUaigCACIDRQ0AIABBFGogAzYCACADIAA2AhgLAkACQCAEQQ9LDQAgCCAEIAJqIgNBA3I2AgQgCCADaiIDIAMoAgRBAXI2AgQMAQsgCCACaiIAIARBAXI2AgQgCCACQQNyNgIEIAAgBGogBDYCAAJAIARB/wFLDQAgBEF4cUGw0ICAAGohAwJAAkBBACgCiNCAgAAiBUEBIARBA3Z0IgRxDQBBACAFIARyNgKI0ICAACADIQQMAQsgAygCCCEECyAEIAA2AgwgAyAANgIIIAAgAzYCDCAAIAQ2AggMAQtBHyEDAkAgBEH///8HSw0AIARBCHYiAyADQYD+P2pBEHZBCHEiA3QiBSAFQYDgH2pBEHZBBHEiBXQiAiACQYCAD2pBEHZBAnEiAnRBD3YgAyAFciACcmsiA0EBdCAEIANBFWp2QQFxckEcaiEDCyAAIAM2AhwgAEIANwIQIANBAnRBuNKAgABqIQUCQCAHQQEgA3QiAnENACAFIAA2AgBBACAHIAJyNgKM0ICAACAAIAU2AhggACAANgIIIAAgADYCDAwBCyAEQQBBGSADQQF2ayADQR9GG3QhAyAFKAIAIQICQANAIAIiBSgCBEF4cSAERg0BIANBHXYhAiADQQF0IQMgBSACQQRxakEQaiIGKAIAIgINAAsgBiAANgIAIAAgBTYCGCAAIAA2AgwgACAANgIIDAELIAUoAggiAyAANgIMIAUgADYCCCAAQQA2AhggACAFNgIMIAAgAzYCCAsgCEEIaiEDDAELAkAgCkUNAAJAAkAgACAAKAIcIgVBAnRBuNKAgABqIgMoAgBHDQAgAyAINgIAIAgNAUEAIAlBfiAFd3E2AozQgIAADAILIApBEEEUIAooAhAgAEYbaiAINgIAIAhFDQELIAggCjYCGAJAIAAoAhAiA0UNACAIIAM2AhAgAyAINgIYCyAAQRRqKAIAIgNFDQAgCEEUaiADNgIAIAMgCDYCGAsCQAJAIARBD0sNACAAIAQgAmoiA0EDcjYCBCAAIANqIgMgAygCBEEBcjYCBAwBCyAAIAJqIgUgBEEBcjYCBCAAIAJBA3I2AgQgBSAEaiAENgIAAkAgB0UNACAHQXhxQbDQgIAAaiECQQAoApzQgIAAIQMCQAJAQQEgB0EDdnQiCCAGcQ0AQQAgCCAGcjYCiNCAgAAgAiEIDAELIAIoAgghCAsgCCADNgIMIAIgAzYCCCADIAI2AgwgAyAINgIIC0EAIAU2ApzQgIAAQQAgBDYCkNCAgAALIABBCGohAwsgAUEQaiSAgICAACADCwoAIAAQyYCAgAAL4g0BB38CQCAARQ0AIABBeGoiASAAQXxqKAIAIgJBeHEiAGohAwJAIAJBAXENACACQQNxRQ0BIAEgASgCACICayIBQQAoApjQgIAAIgRJDQEgAiAAaiEAAkAgAUEAKAKc0ICAAEYNAAJAIAJB/wFLDQAgASgCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgASgCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAwsgAiAGRhogAiAENgIIIAQgAjYCDAwCCyABKAIYIQcCQAJAIAEoAgwiBiABRg0AIAEoAggiAiAESRogBiACNgIIIAIgBjYCDAwBCwJAIAFBFGoiAigCACIEDQAgAUEQaiICKAIAIgQNAEEAIQYMAQsDQCACIQUgBCIGQRRqIgIoAgAiBA0AIAZBEGohAiAGKAIQIgQNAAsgBUEANgIACyAHRQ0BAkACQCABIAEoAhwiBEECdEG40oCAAGoiAigCAEcNACACIAY2AgAgBg0BQQBBACgCjNCAgABBfiAEd3E2AozQgIAADAMLIAdBEEEUIAcoAhAgAUYbaiAGNgIAIAZFDQILIAYgBzYCGAJAIAEoAhAiAkUNACAGIAI2AhAgAiAGNgIYCyABKAIUIgJFDQEgBkEUaiACNgIAIAIgBjYCGAwBCyADKAIEIgJBA3FBA0cNACADIAJBfnE2AgRBACAANgKQ0ICAACABIABqIAA2AgAgASAAQQFyNgIEDwsgASADTw0AIAMoAgQiAkEBcUUNAAJAAkAgAkECcQ0AAkAgA0EAKAKg0ICAAEcNAEEAIAE2AqDQgIAAQQBBACgClNCAgAAgAGoiADYClNCAgAAgASAAQQFyNgIEIAFBACgCnNCAgABHDQNBAEEANgKQ0ICAAEEAQQA2ApzQgIAADwsCQCADQQAoApzQgIAARw0AQQAgATYCnNCAgABBAEEAKAKQ0ICAACAAaiIANgKQ0ICAACABIABBAXI2AgQgASAAaiAANgIADwsgAkF4cSAAaiEAAkACQCACQf8BSw0AIAMoAggiBCACQQN2IgVBA3RBsNCAgABqIgZGGgJAIAMoAgwiAiAERw0AQQBBACgCiNCAgABBfiAFd3E2AojQgIAADAILIAIgBkYaIAIgBDYCCCAEIAI2AgwMAQsgAygCGCEHAkACQCADKAIMIgYgA0YNACADKAIIIgJBACgCmNCAgABJGiAGIAI2AgggAiAGNgIMDAELAkAgA0EUaiICKAIAIgQNACADQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQACQAJAIAMgAygCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAgsgB0EQQRQgBygCECADRhtqIAY2AgAgBkUNAQsgBiAHNgIYAkAgAygCECICRQ0AIAYgAjYCECACIAY2AhgLIAMoAhQiAkUNACAGQRRqIAI2AgAgAiAGNgIYCyABIABqIAA2AgAgASAAQQFyNgIEIAFBACgCnNCAgABHDQFBACAANgKQ0ICAAA8LIAMgAkF+cTYCBCABIABqIAA2AgAgASAAQQFyNgIECwJAIABB/wFLDQAgAEF4cUGw0ICAAGohAgJAAkBBACgCiNCAgAAiBEEBIABBA3Z0IgBxDQBBACAEIAByNgKI0ICAACACIQAMAQsgAigCCCEACyAAIAE2AgwgAiABNgIIIAEgAjYCDCABIAA2AggPC0EfIQICQCAAQf///wdLDQAgAEEIdiICIAJBgP4/akEQdkEIcSICdCIEIARBgOAfakEQdkEEcSIEdCIGIAZBgIAPakEQdkECcSIGdEEPdiACIARyIAZyayICQQF0IAAgAkEVanZBAXFyQRxqIQILIAEgAjYCHCABQgA3AhAgAkECdEG40oCAAGohBAJAAkBBACgCjNCAgAAiBkEBIAJ0IgNxDQAgBCABNgIAQQAgBiADcjYCjNCAgAAgASAENgIYIAEgATYCCCABIAE2AgwMAQsgAEEAQRkgAkEBdmsgAkEfRht0IQIgBCgCACEGAkADQCAGIgQoAgRBeHEgAEYNASACQR12IQYgAkEBdCECIAQgBkEEcWpBEGoiAygCACIGDQALIAMgATYCACABIAQ2AhggASABNgIMIAEgATYCCAwBCyAEKAIIIgAgATYCDCAEIAE2AgggAUEANgIYIAEgBDYCDCABIAA2AggLQQBBACgCqNCAgABBf2oiAUF/IAEbNgKo0ICAAAsLBAAAAAtOAAJAIAANAD8AQRB0DwsCQCAAQf//A3ENACAAQX9MDQACQCAAQRB2QAAiAEF/Rw0AQQBBMDYC+NOAgABBfw8LIABBEHQPCxDKgICAAAAL8gICA38BfgJAIAJFDQAgACABOgAAIAIgAGoiA0F/aiABOgAAIAJBA0kNACAAIAE6AAIgACABOgABIANBfWogAToAACADQX5qIAE6AAAgAkEHSQ0AIAAgAToAAyADQXxqIAE6AAAgAkEJSQ0AIABBACAAa0EDcSIEaiIDIAFB/wFxQYGChAhsIgE2AgAgAyACIARrQXxxIgRqIgJBfGogATYCACAEQQlJDQAgAyABNgIIIAMgATYCBCACQXhqIAE2AgAgAkF0aiABNgIAIARBGUkNACADIAE2AhggAyABNgIUIAMgATYCECADIAE2AgwgAkFwaiABNgIAIAJBbGogATYCACACQWhqIAE2AgAgAkFkaiABNgIAIAQgA0EEcUEYciIFayICQSBJDQAgAa1CgYCAgBB+IQYgAyAFaiEBA0AgASAGNwMYIAEgBjcDECABIAY3AwggASAGNwMAIAFBIGohASACQWBqIgJBH0sNAAsLIAALC45IAQBBgAgLhkgBAAAAAgAAAAMAAAAAAAAAAAAAAAQAAAAFAAAAAAAAAAAAAAAGAAAABwAAAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEludmFsaWQgY2hhciBpbiB1cmwgcXVlcnkAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9ib2R5AENvbnRlbnQtTGVuZ3RoIG92ZXJmbG93AENodW5rIHNpemUgb3ZlcmZsb3cAUmVzcG9uc2Ugb3ZlcmZsb3cASW52YWxpZCBtZXRob2QgZm9yIEhUVFAveC54IHJlcXVlc3QASW52YWxpZCBtZXRob2QgZm9yIFJUU1AveC54IHJlcXVlc3QARXhwZWN0ZWQgU09VUkNFIG1ldGhvZCBmb3IgSUNFL3gueCByZXF1ZXN0AEludmFsaWQgY2hhciBpbiB1cmwgZnJhZ21lbnQgc3RhcnQARXhwZWN0ZWQgZG90AFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fc3RhdHVzAEludmFsaWQgcmVzcG9uc2Ugc3RhdHVzAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMAVXNlciBjYWxsYmFjayBlcnJvcgBgb25fcmVzZXRgIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19oZWFkZXJgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2JlZ2luYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlYCBjYWxsYmFjayBlcnJvcgBgb25fc3RhdHVzX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdmVyc2lvbl9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX3VybF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWVzc2FnZV9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX21ldGhvZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZWAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lYCBjYWxsYmFjayBlcnJvcgBVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNlcnZlcgBJbnZhbGlkIGhlYWRlciB2YWx1ZSBjaGFyAEludmFsaWQgaGVhZGVyIGZpZWxkIGNoYXIAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl92ZXJzaW9uAEludmFsaWQgbWlub3IgdmVyc2lvbgBJbnZhbGlkIG1ham9yIHZlcnNpb24ARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgdmVyc2lvbgBFeHBlY3RlZCBDUkxGIGFmdGVyIHZlcnNpb24ASW52YWxpZCBIVFRQIHZlcnNpb24ASW52YWxpZCBoZWFkZXIgdG9rZW4AU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl91cmwASW52YWxpZCBjaGFyYWN0ZXJzIGluIHVybABVbmV4cGVjdGVkIHN0YXJ0IGNoYXIgaW4gdXJsAERvdWJsZSBAIGluIHVybABFbXB0eSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXJhY3RlciBpbiBDb250ZW50LUxlbmd0aABEdXBsaWNhdGUgQ29udGVudC1MZW5ndGgASW52YWxpZCBjaGFyIGluIHVybCBwYXRoAENvbnRlbnQtTGVuZ3RoIGNhbid0IGJlIHByZXNlbnQgd2l0aCBUcmFuc2Zlci1FbmNvZGluZwBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBzaXplAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX3ZhbHVlAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgdmFsdWUATWlzc2luZyBleHBlY3RlZCBMRiBhZnRlciBoZWFkZXIgdmFsdWUASW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIHF1b3RlIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGVkIHZhbHVlAFBhdXNlZCBieSBvbl9oZWFkZXJzX2NvbXBsZXRlAEludmFsaWQgRU9GIHN0YXRlAG9uX3Jlc2V0IHBhdXNlAG9uX2NodW5rX2hlYWRlciBwYXVzZQBvbl9tZXNzYWdlX2JlZ2luIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl92YWx1ZSBwYXVzZQBvbl9zdGF0dXNfY29tcGxldGUgcGF1c2UAb25fdmVyc2lvbl9jb21wbGV0ZSBwYXVzZQBvbl91cmxfY29tcGxldGUgcGF1c2UAb25fY2h1bmtfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX3ZhbHVlX2NvbXBsZXRlIHBhdXNlAG9uX21lc3NhZ2VfY29tcGxldGUgcGF1c2UAb25fbWV0aG9kX2NvbXBsZXRlIHBhdXNlAG9uX2hlYWRlcl9maWVsZF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19leHRlbnNpb25fbmFtZSBwYXVzZQBVbmV4cGVjdGVkIHNwYWNlIGFmdGVyIHN0YXJ0IGxpbmUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fbmFtZQBJbnZhbGlkIGNoYXJhY3RlciBpbiBjaHVuayBleHRlbnNpb25zIG5hbWUAUGF1c2Ugb24gQ09OTkVDVC9VcGdyYWRlAFBhdXNlIG9uIFBSSS9VcGdyYWRlAEV4cGVjdGVkIEhUVFAvMiBDb25uZWN0aW9uIFByZWZhY2UAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9tZXRob2QARXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgbWV0aG9kAFNwYW4gY2FsbGJhY2sgZXJyb3IgaW4gb25faGVhZGVyX2ZpZWxkAFBhdXNlZABJbnZhbGlkIHdvcmQgZW5jb3VudGVyZWQASW52YWxpZCBtZXRob2QgZW5jb3VudGVyZWQAVW5leHBlY3RlZCBjaGFyIGluIHVybCBzY2hlbWEAUmVxdWVzdCBoYXMgaW52YWxpZCBgVHJhbnNmZXItRW5jb2RpbmdgAFNXSVRDSF9QUk9YWQBVU0VfUFJPWFkATUtBQ1RJVklUWQBVTlBST0NFU1NBQkxFX0VOVElUWQBDT1BZAE1PVkVEX1BFUk1BTkVOVExZAFRPT19FQVJMWQBOT1RJRlkARkFJTEVEX0RFUEVOREVOQ1kAQkFEX0dBVEVXQVkAUExBWQBQVVQAQ0hFQ0tPVVQAR0FURVdBWV9USU1FT1VUAFJFUVVFU1RfVElNRU9VVABORVRXT1JLX0NPTk5FQ1RfVElNRU9VVABDT05ORUNUSU9OX1RJTUVPVVQATE9HSU5fVElNRU9VVABORVRXT1JLX1JFQURfVElNRU9VVABQT1NUAE1JU0RJUkVDVEVEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9SRVFVRVNUAENMSUVOVF9DTE9TRURfTE9BRF9CQUxBTkNFRF9SRVFVRVNUAEJBRF9SRVFVRVNUAEhUVFBfUkVRVUVTVF9TRU5UX1RPX0hUVFBTX1BPUlQAUkVQT1JUAElNX0FfVEVBUE9UAFJFU0VUX0NPTlRFTlQATk9fQ09OVEVOVABQQVJUSUFMX0NPTlRFTlQASFBFX0lOVkFMSURfQ09OU1RBTlQASFBFX0NCX1JFU0VUAEdFVABIUEVfU1RSSUNUAENPTkZMSUNUAFRFTVBPUkFSWV9SRURJUkVDVABQRVJNQU5FTlRfUkVESVJFQ1QAQ09OTkVDVABNVUxUSV9TVEFUVVMASFBFX0lOVkFMSURfU1RBVFVTAFRPT19NQU5ZX1JFUVVFU1RTAEVBUkxZX0hJTlRTAFVOQVZBSUxBQkxFX0ZPUl9MRUdBTF9SRUFTT05TAE9QVElPTlMAU1dJVENISU5HX1BST1RPQ09MUwBWQVJJQU5UX0FMU09fTkVHT1RJQVRFUwBNVUxUSVBMRV9DSE9JQ0VTAElOVEVSTkFMX1NFUlZFUl9FUlJPUgBXRUJfU0VSVkVSX1VOS05PV05fRVJST1IAUkFJTEdVTl9FUlJPUgBJREVOVElUWV9QUk9WSURFUl9BVVRIRU5USUNBVElPTl9FUlJPUgBTU0xfQ0VSVElGSUNBVEVfRVJST1IASU5WQUxJRF9YX0ZPUldBUkRFRF9GT1IAU0VUX1BBUkFNRVRFUgBHRVRfUEFSQU1FVEVSAEhQRV9VU0VSAFNFRV9PVEhFUgBIUEVfQ0JfQ0hVTktfSEVBREVSAE1LQ0FMRU5EQVIAU0VUVVAAV0VCX1NFUlZFUl9JU19ET1dOAFRFQVJET1dOAEhQRV9DTE9TRURfQ09OTkVDVElPTgBIRVVSSVNUSUNfRVhQSVJBVElPTgBESVNDT05ORUNURURfT1BFUkFUSU9OAE5PTl9BVVRIT1JJVEFUSVZFX0lORk9STUFUSU9OAEhQRV9JTlZBTElEX1ZFUlNJT04ASFBFX0NCX01FU1NBR0VfQkVHSU4AU0lURV9JU19GUk9aRU4ASFBFX0lOVkFMSURfSEVBREVSX1RPS0VOAElOVkFMSURfVE9LRU4ARk9SQklEREVOAEVOSEFOQ0VfWU9VUl9DQUxNAEhQRV9JTlZBTElEX1VSTABCTE9DS0VEX0JZX1BBUkVOVEFMX0NPTlRST0wATUtDT0wAQUNMAEhQRV9JTlRFUk5BTABSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFX1VOT0ZGSUNJQUwASFBFX09LAFVOTElOSwBVTkxPQ0sAUFJJAFJFVFJZX1dJVEgASFBFX0lOVkFMSURfQ09OVEVOVF9MRU5HVEgASFBFX1VORVhQRUNURURfQ09OVEVOVF9MRU5HVEgARkxVU0gAUFJPUFBBVENIAE0tU0VBUkNIAFVSSV9UT09fTE9ORwBQUk9DRVNTSU5HAE1JU0NFTExBTkVPVVNfUEVSU0lTVEVOVF9XQVJOSU5HAE1JU0NFTExBTkVPVVNfV0FSTklORwBIUEVfSU5WQUxJRF9UUkFOU0ZFUl9FTkNPRElORwBFeHBlY3RlZCBDUkxGAEhQRV9JTlZBTElEX0NIVU5LX1NJWkUATU9WRQBDT05USU5VRQBIUEVfQ0JfU1RBVFVTX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJTX0NPTVBMRVRFAEhQRV9DQl9WRVJTSU9OX0NPTVBMRVRFAEhQRV9DQl9VUkxfQ09NUExFVEUASFBFX0NCX0NIVU5LX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfVkFMVUVfQ09NUExFVEUASFBFX0NCX0NIVU5LX0VYVEVOU0lPTl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX05BTUVfQ09NUExFVEUASFBFX0NCX01FU1NBR0VfQ09NUExFVEUASFBFX0NCX01FVEhPRF9DT01QTEVURQBIUEVfQ0JfSEVBREVSX0ZJRUxEX0NPTVBMRVRFAERFTEVURQBIUEVfSU5WQUxJRF9FT0ZfU1RBVEUASU5WQUxJRF9TU0xfQ0VSVElGSUNBVEUAUEFVU0UATk9fUkVTUE9OU0UAVU5TVVBQT1JURURfTUVESUFfVFlQRQBHT05FAE5PVF9BQ0NFUFRBQkxFAFNFUlZJQ0VfVU5BVkFJTEFCTEUAUkFOR0VfTk9UX1NBVElTRklBQkxFAE9SSUdJTl9JU19VTlJFQUNIQUJMRQBSRVNQT05TRV9JU19TVEFMRQBQVVJHRQBNRVJHRQBSRVFVRVNUX0hFQURFUl9GSUVMRFNfVE9PX0xBUkdFAFJFUVVFU1RfSEVBREVSX1RPT19MQVJHRQBQQVlMT0FEX1RPT19MQVJHRQBJTlNVRkZJQ0lFTlRfU1RPUkFHRQBIUEVfUEFVU0VEX1VQR1JBREUASFBFX1BBVVNFRF9IMl9VUEdSQURFAFNPVVJDRQBBTk5PVU5DRQBUUkFDRQBIUEVfVU5FWFBFQ1RFRF9TUEFDRQBERVNDUklCRQBVTlNVQlNDUklCRQBSRUNPUkQASFBFX0lOVkFMSURfTUVUSE9EAE5PVF9GT1VORABQUk9QRklORABVTkJJTkQAUkVCSU5EAFVOQVVUSE9SSVpFRABNRVRIT0RfTk9UX0FMTE9XRUQASFRUUF9WRVJTSU9OX05PVF9TVVBQT1JURUQAQUxSRUFEWV9SRVBPUlRFRABBQ0NFUFRFRABOT1RfSU1QTEVNRU5URUQATE9PUF9ERVRFQ1RFRABIUEVfQ1JfRVhQRUNURUQASFBFX0xGX0VYUEVDVEVEAENSRUFURUQASU1fVVNFRABIUEVfUEFVU0VEAFRJTUVPVVRfT0NDVVJFRABQQVlNRU5UX1JFUVVJUkVEAFBSRUNPTkRJVElPTl9SRVFVSVJFRABQUk9YWV9BVVRIRU5USUNBVElPTl9SRVFVSVJFRABORVRXT1JLX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAExFTkdUSF9SRVFVSVJFRABTU0xfQ0VSVElGSUNBVEVfUkVRVUlSRUQAVVBHUkFERV9SRVFVSVJFRABQQUdFX0VYUElSRUQAUFJFQ09ORElUSU9OX0ZBSUxFRABFWFBFQ1RBVElPTl9GQUlMRUQAUkVWQUxJREFUSU9OX0ZBSUxFRABTU0xfSEFORFNIQUtFX0ZBSUxFRABMT0NLRUQAVFJBTlNGT1JNQVRJT05fQVBQTElFRABOT1RfTU9ESUZJRUQATk9UX0VYVEVOREVEAEJBTkRXSURUSF9MSU1JVF9FWENFRURFRABTSVRFX0lTX09WRVJMT0FERUQASEVBRABFeHBlY3RlZCBIVFRQLwAAXhMAACYTAAAwEAAA8BcAAJ0TAAAVEgAAORcAAPASAAAKEAAAdRIAAK0SAACCEwAATxQAAH8QAACgFQAAIxQAAIkSAACLFAAATRUAANQRAADPFAAAEBgAAMkWAADcFgAAwREAAOAXAAC7FAAAdBQAAHwVAADlFAAACBcAAB8QAABlFQAAoxQAACgVAAACFQAAmRUAACwQAACLGQAATw8AANQOAABqEAAAzhAAAAIXAACJDgAAbhMAABwTAABmFAAAVhcAAMETAADNEwAAbBMAAGgXAABmFwAAXxcAACITAADODwAAaQ4AANgOAABjFgAAyxMAAKoOAAAoFwAAJhcAAMUTAABdFgAA6BEAAGcTAABlEwAA8hYAAHMTAAAdFwAA+RYAAPMRAADPDgAAzhUAAAwSAACzEQAApREAAGEQAAAyFwAAuxMAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIDAgICAgIAAAICAAICAAICAgICAgICAgIABAAAAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAACAAICAgICAAACAgACAgACAgICAgICAgICAAMABAAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgICAgIAAgACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAbG9zZWVlcC1hbGl2ZQAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAQEBAQEBAQEBAQIBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBY2h1bmtlZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEAAAEBAAEBAAEBAQEBAQEBAQEAAAAAAAAAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABlY3Rpb25lbnQtbGVuZ3Rob25yb3h5LWNvbm5lY3Rpb24AAAAAAAAAAAAAAAAAAAByYW5zZmVyLWVuY29kaW5ncGdyYWRlDQoNCg0KU00NCg0KVFRQL0NFL1RTUC8AAAAAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQIAAQMAAAAAAAAAAAAAAAAAAAAAAAAEAQEFAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAAAAQAAAgAAAAAAAAAAAAAAAAAAAAAAAAMEAAAEBAQEBAQEBAQEBAUEBAQEBAQEBAQEBAQABAAGBwQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAABAAAAAAAAAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAIAAAAAAgAAAAAAAAAAAAAAAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABOT1VOQ0VFQ0tPVVRORUNURVRFQ1JJQkVMVVNIRVRFQURTRUFSQ0hSR0VDVElWSVRZTEVOREFSVkVPVElGWVBUSU9OU0NIU0VBWVNUQVRDSEdFT1JESVJFQ1RPUlRSQ0hQQVJBTUVURVJVUkNFQlNDUklCRUFSRE9XTkFDRUlORE5LQ0tVQlNDUklCRUhUVFAvQURUUC8=' /***/ }), /***/ 53434: /***/ ((module) => { module.exports = 'AGFzbQEAAAABMAhgAX8Bf2ADf39/AX9gBH9/f38Bf2AAAGADf39/AGABfwBgAn9/AGAGf39/f39/AALLAQgDZW52GHdhc21fb25faGVhZGVyc19jb21wbGV0ZQACA2VudhV3YXNtX29uX21lc3NhZ2VfYmVnaW4AAANlbnYLd2FzbV9vbl91cmwAAQNlbnYOd2FzbV9vbl9zdGF0dXMAAQNlbnYUd2FzbV9vbl9oZWFkZXJfZmllbGQAAQNlbnYUd2FzbV9vbl9oZWFkZXJfdmFsdWUAAQNlbnYMd2FzbV9vbl9ib2R5AAEDZW52GHdhc21fb25fbWVzc2FnZV9jb21wbGV0ZQAAA0ZFAwMEAAAFAAAAAAAABQEFAAUFBQAABgAAAAAGBgYGAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAAABAQcAAAUFAwABBAUBcAESEgUDAQACBggBfwFBgNQECwfRBSIGbWVtb3J5AgALX2luaXRpYWxpemUACRlfX2luZGlyZWN0X2Z1bmN0aW9uX3RhYmxlAQALbGxodHRwX2luaXQAChhsbGh0dHBfc2hvdWxkX2tlZXBfYWxpdmUAQQxsbGh0dHBfYWxsb2MADAZtYWxsb2MARgtsbGh0dHBfZnJlZQANBGZyZWUASA9sbGh0dHBfZ2V0X3R5cGUADhVsbGh0dHBfZ2V0X2h0dHBfbWFqb3IADxVsbGh0dHBfZ2V0X2h0dHBfbWlub3IAEBFsbGh0dHBfZ2V0X21ldGhvZAARFmxsaHR0cF9nZXRfc3RhdHVzX2NvZGUAEhJsbGh0dHBfZ2V0X3VwZ3JhZGUAEwxsbGh0dHBfcmVzZXQAFA5sbGh0dHBfZXhlY3V0ZQAVFGxsaHR0cF9zZXR0aW5nc19pbml0ABYNbGxodHRwX2ZpbmlzaAAXDGxsaHR0cF9wYXVzZQAYDWxsaHR0cF9yZXN1bWUAGRtsbGh0dHBfcmVzdW1lX2FmdGVyX3VwZ3JhZGUAGhBsbGh0dHBfZ2V0X2Vycm5vABsXbGxodHRwX2dldF9lcnJvcl9yZWFzb24AHBdsbGh0dHBfc2V0X2Vycm9yX3JlYXNvbgAdFGxsaHR0cF9nZXRfZXJyb3JfcG9zAB4RbGxodHRwX2Vycm5vX25hbWUAHxJsbGh0dHBfbWV0aG9kX25hbWUAIBJsbGh0dHBfc3RhdHVzX25hbWUAIRpsbGh0dHBfc2V0X2xlbmllbnRfaGVhZGVycwAiIWxsaHR0cF9zZXRfbGVuaWVudF9jaHVua2VkX2xlbmd0aAAjHWxsaHR0cF9zZXRfbGVuaWVudF9rZWVwX2FsaXZlACQkbGxodHRwX3NldF9sZW5pZW50X3RyYW5zZmVyX2VuY29kaW5nACUYbGxodHRwX21lc3NhZ2VfbmVlZHNfZW9mAD8JFwEAQQELEQECAwQFCwYHNTk3MS8tJyspCrLgAkUCAAsIABCIgICAAAsZACAAEMKAgIAAGiAAIAI2AjggACABOgAoCxwAIAAgAC8BMiAALQAuIAAQwYCAgAAQgICAgAALKgEBf0HAABDGgICAACIBEMKAgIAAGiABQYCIgIAANgI4IAEgADoAKCABCwoAIAAQyICAgAALBwAgAC0AKAsHACAALQAqCwcAIAAtACsLBwAgAC0AKQsHACAALwEyCwcAIAAtAC4LRQEEfyAAKAIYIQEgAC0ALSECIAAtACghAyAAKAI4IQQgABDCgICAABogACAENgI4IAAgAzoAKCAAIAI6AC0gACABNgIYCxEAIAAgASABIAJqEMOAgIAACxAAIABBAEHcABDMgICAABoLZwEBf0EAIQECQCAAKAIMDQACQAJAAkACQCAALQAvDgMBAAMCCyAAKAI4IgFFDQAgASgCLCIBRQ0AIAAgARGAgICAAAAiAQ0DC0EADwsQyoCAgAAACyAAQcOWgIAANgIQQQ4hAQsgAQseAAJAIAAoAgwNACAAQdGbgIAANgIQIABBFTYCDAsLFgACQCAAKAIMQRVHDQAgAEEANgIMCwsWAAJAIAAoAgxBFkcNACAAQQA2AgwLCwcAIAAoAgwLBwAgACgCEAsJACAAIAE2AhALBwAgACgCFAsiAAJAIABBJEkNABDKgICAAAALIABBAnRBoLOAgABqKAIACyIAAkAgAEEuSQ0AEMqAgIAAAAsgAEECdEGwtICAAGooAgAL7gsBAX9B66iAgAAhAQJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIABBnH9qDvQDY2IAAWFhYWFhYQIDBAVhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhBgcICQoLDA0OD2FhYWFhEGFhYWFhYWFhYWFhEWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYRITFBUWFxgZGhthYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhHB0eHyAhIiMkJSYnKCkqKywtLi8wMTIzNDU2YTc4OTphYWFhYWFhYTthYWE8YWFhYT0+P2FhYWFhYWFhQGFhQWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYUJDREVGR0hJSktMTU5PUFFSU2FhYWFhYWFhVFVWV1hZWlthXF1hYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFeYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhYWFhX2BhC0Hhp4CAAA8LQaShgIAADwtBy6yAgAAPC0H+sYCAAA8LQcCkgIAADwtBq6SAgAAPC0GNqICAAA8LQeKmgIAADwtBgLCAgAAPC0G5r4CAAA8LQdekgIAADwtB75+AgAAPC0Hhn4CAAA8LQfqfgIAADwtB8qCAgAAPC0Gor4CAAA8LQa6ygIAADwtBiLCAgAAPC0Hsp4CAAA8LQYKigIAADwtBjp2AgAAPC0HQroCAAA8LQcqjgIAADwtBxbKAgAAPC0HfnICAAA8LQdKcgIAADwtBxKCAgAAPC0HXoICAAA8LQaKfgIAADwtB7a6AgAAPC0GrsICAAA8LQdSlgIAADwtBzK6AgAAPC0H6roCAAA8LQfyrgIAADwtB0rCAgAAPC0HxnYCAAA8LQbuggIAADwtB96uAgAAPC0GQsYCAAA8LQdexgIAADwtBoq2AgAAPC0HUp4CAAA8LQeCrgIAADwtBn6yAgAAPC0HrsYCAAA8LQdWfgIAADwtByrGAgAAPC0HepYCAAA8LQdSegIAADwtB9JyAgAAPC0GnsoCAAA8LQbGdgIAADwtBoJ2AgAAPC0G5sYCAAA8LQbywgIAADwtBkqGAgAAPC0GzpoCAAA8LQemsgIAADwtBrJ6AgAAPC0HUq4CAAA8LQfemgIAADwtBgKaAgAAPC0GwoYCAAA8LQf6egIAADwtBjaOAgAAPC0GJrYCAAA8LQfeigIAADwtBoLGAgAAPC0Gun4CAAA8LQcalgIAADwtB6J6AgAAPC0GTooCAAA8LQcKvgIAADwtBw52AgAAPC0GLrICAAA8LQeGdgIAADwtBja+AgAAPC0HqoYCAAA8LQbStgIAADwtB0q+AgAAPC0HfsoCAAA8LQdKygIAADwtB8LCAgAAPC0GpooCAAA8LQfmjgIAADwtBmZ6AgAAPC0G1rICAAA8LQZuwgIAADwtBkrKAgAAPC0G2q4CAAA8LQcKigIAADwtB+LKAgAAPC0GepYCAAA8LQdCigIAADwtBup6AgAAPC0GBnoCAAA8LEMqAgIAAAAtB1qGAgAAhAQsgAQsWACAAIAAtAC1B/gFxIAFBAEdyOgAtCxkAIAAgAC0ALUH9AXEgAUEAR0EBdHI6AC0LGQAgACAALQAtQfsBcSABQQBHQQJ0cjoALQsZACAAIAAtAC1B9wFxIAFBAEdBA3RyOgAtCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAgAiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCBCIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQcaRgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIwIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAggiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2ioCAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCNCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIMIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZqAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAjgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCECIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZWQgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAI8IgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAhQiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEGqm4CAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCQCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIYIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABB7ZOAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCJCIERQ0AIAAgBBGAgICAAAAhAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIsIgRFDQAgACAEEYCAgIAAACEDCyADC0kBAn9BACEDAkAgACgCOCIERQ0AIAQoAigiBEUNACAAIAEgAiABayAEEYGAgIAAACIDQX9HDQAgAEH2iICAADYCEEEYIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCUCIERQ0AIAAgBBGAgICAAAAhAwsgAwtJAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAIcIgRFDQAgACABIAIgAWsgBBGBgICAAAAiA0F/Rw0AIABBwpmAgAA2AhBBGCEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAkgiBEUNACAAIAQRgICAgAAAIQMLIAMLSQECf0EAIQMCQCAAKAI4IgRFDQAgBCgCICIERQ0AIAAgASACIAFrIAQRgYCAgAAAIgNBf0cNACAAQZSUgIAANgIQQRghAwsgAwsuAQJ/QQAhAwJAIAAoAjgiBEUNACAEKAJMIgRFDQAgACAEEYCAgIAAACEDCyADCy4BAn9BACEDAkAgACgCOCIERQ0AIAQoAlQiBEUNACAAIAQRgICAgAAAIQMLIAMLLgECf0EAIQMCQCAAKAI4IgRFDQAgBCgCWCIERQ0AIAAgBBGAgICAAAAhAwsgAwtFAQF/AkACQCAALwEwQRRxQRRHDQBBASEDIAAtAChBAUYNASAALwEyQeUARiEDDAELIAAtAClBBUYhAwsgACADOgAuQQAL/gEBA39BASEDAkAgAC8BMCIEQQhxDQAgACkDIEIAUiEDCwJAAkAgAC0ALkUNAEEBIQUgAC0AKUEFRg0BQQEhBSAEQcAAcUUgA3FBAUcNAQtBACEFIARBwABxDQBBAiEFIARB//8DcSIDQQhxDQACQCADQYAEcUUNAAJAIAAtAChBAUcNACAALQAtQQpxDQBBBQ8LQQQPCwJAIANBIHENAAJAIAAtAChBAUYNACAALwEyQf//A3EiAEGcf2pB5ABJDQAgAEHMAUYNACAAQbACRg0AQQQhBSAEQShxRQ0CIANBiARxQYAERg0CC0EADwtBAEEDIAApAyBQGyEFCyAFC2IBAn9BACEBAkAgAC0AKEEBRg0AIAAvATJB//8DcSICQZx/akHkAEkNACACQcwBRg0AIAJBsAJGDQAgAC8BMCIAQcAAcQ0AQQEhASAAQYgEcUGABEYNACAAQShxRSEBCyABC6cBAQN/AkACQAJAIAAtACpFDQAgAC0AK0UNAEEAIQMgAC8BMCIEQQJxRQ0BDAILQQAhAyAALwEwIgRBAXFFDQELQQEhAyAALQAoQQFGDQAgAC8BMkH//wNxIgVBnH9qQeQASQ0AIAVBzAFGDQAgBUGwAkYNACAEQcAAcQ0AQQAhAyAEQYgEcUGABEYNACAEQShxQQBHIQMLIABBADsBMCAAQQA6AC8gAwuZAQECfwJAAkACQCAALQAqRQ0AIAAtACtFDQBBACEBIAAvATAiAkECcUUNAQwCC0EAIQEgAC8BMCICQQFxRQ0BC0EBIQEgAC0AKEEBRg0AIAAvATJB//8DcSIAQZx/akHkAEkNACAAQcwBRg0AIABBsAJGDQAgAkHAAHENAEEAIQEgAkGIBHFBgARGDQAgAkEocUEARyEBCyABC0kBAXsgAEEQav0MAAAAAAAAAAAAAAAAAAAAACIB/QsDACAAIAH9CwMAIABBMGogAf0LAwAgAEEgaiAB/QsDACAAQd0BNgIcQQALewEBfwJAIAAoAgwiAw0AAkAgACgCBEUNACAAIAE2AgQLAkAgACABIAIQxICAgAAiAw0AIAAoAgwPCyAAIAM2AhxBACEDIAAoAgQiAUUNACAAIAEgAiAAKAIIEYGAgIAAACIBRQ0AIAAgAjYCFCAAIAE2AgwgASEDCyADC+TzAQMOfwN+BH8jgICAgABBEGsiAySAgICAACABIQQgASEFIAEhBiABIQcgASEIIAEhCSABIQogASELIAEhDCABIQ0gASEOIAEhDwJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAAKAIcIhBBf2oO3QHaAQHZAQIDBAUGBwgJCgsMDQ7YAQ8Q1wEREtYBExQVFhcYGRob4AHfARwdHtUBHyAhIiMkJdQBJicoKSorLNMB0gEtLtEB0AEvMDEyMzQ1Njc4OTo7PD0+P0BBQkNERUbbAUdISUrPAc4BS80BTMwBTU5PUFFSU1RVVldYWVpbXF1eX2BhYmNkZWZnaGlqa2xtbm9wcXJzdHV2d3h5ent8fX5/gAGBAYIBgwGEAYUBhgGHAYgBiQGKAYsBjAGNAY4BjwGQAZEBkgGTAZQBlQGWAZcBmAGZAZoBmwGcAZ0BngGfAaABoQGiAaMBpAGlAaYBpwGoAakBqgGrAawBrQGuAa8BsAGxAbIBswG0AbUBtgG3AcsBygG4AckBuQHIAboBuwG8Ab0BvgG/AcABwQHCAcMBxAHFAcYBANwBC0EAIRAMxgELQQ4hEAzFAQtBDSEQDMQBC0EPIRAMwwELQRAhEAzCAQtBEyEQDMEBC0EUIRAMwAELQRUhEAy/AQtBFiEQDL4BC0EXIRAMvQELQRghEAy8AQtBGSEQDLsBC0EaIRAMugELQRshEAy5AQtBHCEQDLgBC0EIIRAMtwELQR0hEAy2AQtBICEQDLUBC0EfIRAMtAELQQchEAyzAQtBISEQDLIBC0EiIRAMsQELQR4hEAywAQtBIyEQDK8BC0ESIRAMrgELQREhEAytAQtBJCEQDKwBC0ElIRAMqwELQSYhEAyqAQtBJyEQDKkBC0HDASEQDKgBC0EpIRAMpwELQSshEAymAQtBLCEQDKUBC0EtIRAMpAELQS4hEAyjAQtBLyEQDKIBC0HEASEQDKEBC0EwIRAMoAELQTQhEAyfAQtBDCEQDJ4BC0ExIRAMnQELQTIhEAycAQtBMyEQDJsBC0E5IRAMmgELQTUhEAyZAQtBxQEhEAyYAQtBCyEQDJcBC0E6IRAMlgELQTYhEAyVAQtBCiEQDJQBC0E3IRAMkwELQTghEAySAQtBPCEQDJEBC0E7IRAMkAELQT0hEAyPAQtBCSEQDI4BC0EoIRAMjQELQT4hEAyMAQtBPyEQDIsBC0HAACEQDIoBC0HBACEQDIkBC0HCACEQDIgBC0HDACEQDIcBC0HEACEQDIYBC0HFACEQDIUBC0HGACEQDIQBC0EqIRAMgwELQccAIRAMggELQcgAIRAMgQELQckAIRAMgAELQcoAIRAMfwtBywAhEAx+C0HNACEQDH0LQcwAIRAMfAtBzgAhEAx7C0HPACEQDHoLQdAAIRAMeQtB0QAhEAx4C0HSACEQDHcLQdMAIRAMdgtB1AAhEAx1C0HWACEQDHQLQdUAIRAMcwtBBiEQDHILQdcAIRAMcQtBBSEQDHALQdgAIRAMbwtBBCEQDG4LQdkAIRAMbQtB2gAhEAxsC0HbACEQDGsLQdwAIRAMagtBAyEQDGkLQd0AIRAMaAtB3gAhEAxnC0HfACEQDGYLQeEAIRAMZQtB4AAhEAxkC0HiACEQDGMLQeMAIRAMYgtBAiEQDGELQeQAIRAMYAtB5QAhEAxfC0HmACEQDF4LQecAIRAMXQtB6AAhEAxcC0HpACEQDFsLQeoAIRAMWgtB6wAhEAxZC0HsACEQDFgLQe0AIRAMVwtB7gAhEAxWC0HvACEQDFULQfAAIRAMVAtB8QAhEAxTC0HyACEQDFILQfMAIRAMUQtB9AAhEAxQC0H1ACEQDE8LQfYAIRAMTgtB9wAhEAxNC0H4ACEQDEwLQfkAIRAMSwtB+gAhEAxKC0H7ACEQDEkLQfwAIRAMSAtB/QAhEAxHC0H+ACEQDEYLQf8AIRAMRQtBgAEhEAxEC0GBASEQDEMLQYIBIRAMQgtBgwEhEAxBC0GEASEQDEALQYUBIRAMPwtBhgEhEAw+C0GHASEQDD0LQYgBIRAMPAtBiQEhEAw7C0GKASEQDDoLQYsBIRAMOQtBjAEhEAw4C0GNASEQDDcLQY4BIRAMNgtBjwEhEAw1C0GQASEQDDQLQZEBIRAMMwtBkgEhEAwyC0GTASEQDDELQZQBIRAMMAtBlQEhEAwvC0GWASEQDC4LQZcBIRAMLQtBmAEhEAwsC0GZASEQDCsLQZoBIRAMKgtBmwEhEAwpC0GcASEQDCgLQZ0BIRAMJwtBngEhEAwmC0GfASEQDCULQaABIRAMJAtBoQEhEAwjC0GiASEQDCILQaMBIRAMIQtBpAEhEAwgC0GlASEQDB8LQaYBIRAMHgtBpwEhEAwdC0GoASEQDBwLQakBIRAMGwtBqgEhEAwaC0GrASEQDBkLQawBIRAMGAtBrQEhEAwXC0GuASEQDBYLQQEhEAwVC0GvASEQDBQLQbABIRAMEwtBsQEhEAwSC0GzASEQDBELQbIBIRAMEAtBtAEhEAwPC0G1ASEQDA4LQbYBIRAMDQtBtwEhEAwMC0G4ASEQDAsLQbkBIRAMCgtBugEhEAwJC0G7ASEQDAgLQcYBIRAMBwtBvAEhEAwGC0G9ASEQDAULQb4BIRAMBAtBvwEhEAwDC0HAASEQDAILQcIBIRAMAQtBwQEhEAsDQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAOxwEAAQIDBAUGBwgJCgsMDQ4PEBESExQVFhcYGRobHB4fICEjJSg/QEFERUZHSElKS0xNT1BRUlPeA1dZW1xdYGJlZmdoaWprbG1vcHFyc3R1dnd4eXp7fH1+gAGCAYUBhgGHAYkBiwGMAY0BjgGPAZABkQGUAZUBlgGXAZgBmQGaAZsBnAGdAZ4BnwGgAaEBogGjAaQBpQGmAacBqAGpAaoBqwGsAa0BrgGvAbABsQGyAbMBtAG1AbYBtwG4AbkBugG7AbwBvQG+Ab8BwAHBAcIBwwHEAcUBxgHHAcgByQHKAcsBzAHNAc4BzwHQAdEB0gHTAdQB1QHWAdcB2AHZAdoB2wHcAd0B3gHgAeEB4gHjAeQB5QHmAecB6AHpAeoB6wHsAe0B7gHvAfAB8QHyAfMBmQKkArAC/gL+AgsgASIEIAJHDfMBQd0BIRAM/wMLIAEiECACRw3dAUHDASEQDP4DCyABIgEgAkcNkAFB9wAhEAz9AwsgASIBIAJHDYYBQe8AIRAM/AMLIAEiASACRw1/QeoAIRAM+wMLIAEiASACRw17QegAIRAM+gMLIAEiASACRw14QeYAIRAM+QMLIAEiASACRw0aQRghEAz4AwsgASIBIAJHDRRBEiEQDPcDCyABIgEgAkcNWUHFACEQDPYDCyABIgEgAkcNSkE/IRAM9QMLIAEiASACRw1IQTwhEAz0AwsgASIBIAJHDUFBMSEQDPMDCyAALQAuQQFGDesDDIcCCyAAIAEiASACEMCAgIAAQQFHDeYBIABCADcDIAznAQsgACABIgEgAhC0gICAACIQDecBIAEhAQz1AgsCQCABIgEgAkcNAEEGIRAM8AMLIAAgAUEBaiIBIAIQu4CAgAAiEA3oASABIQEMMQsgAEIANwMgQRIhEAzVAwsgASIQIAJHDStBHSEQDO0DCwJAIAEiASACRg0AIAFBAWohAUEQIRAM1AMLQQchEAzsAwsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3lAUEIIRAM6wMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQRQhEAzSAwtBCSEQDOoDCyABIQEgACkDIFAN5AEgASEBDPICCwJAIAEiASACRw0AQQshEAzpAwsgACABQQFqIgEgAhC2gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeUBIAEhAQzyAgsgACABIgEgAhC4gICAACIQDeYBIAEhAQwNCyAAIAEiASACELqAgIAAIhAN5wEgASEBDPACCwJAIAEiASACRw0AQQ8hEAzlAwsgAS0AACIQQTtGDQggEEENRw3oASABQQFqIQEM7wILIAAgASIBIAIQuoCAgAAiEA3oASABIQEM8gILA0ACQCABLQAAQfC1gIAAai0AACIQQQFGDQAgEEECRw3rASAAKAIEIRAgAEEANgIEIAAgECABQQFqIgEQuYCAgAAiEA3qASABIQEM9AILIAFBAWoiASACRw0AC0ESIRAM4gMLIAAgASIBIAIQuoCAgAAiEA3pASABIQEMCgsgASIBIAJHDQZBGyEQDOADCwJAIAEiASACRw0AQRYhEAzgAwsgAEGKgICAADYCCCAAIAE2AgQgACABIAIQuICAgAAiEA3qASABIQFBICEQDMYDCwJAIAEiASACRg0AA0ACQCABLQAAQfC3gIAAai0AACIQQQJGDQACQCAQQX9qDgTlAewBAOsB7AELIAFBAWohAUEIIRAMyAMLIAFBAWoiASACRw0AC0EVIRAM3wMLQRUhEAzeAwsDQAJAIAEtAABB8LmAgABqLQAAIhBBAkYNACAQQX9qDgTeAewB4AHrAewBCyABQQFqIgEgAkcNAAtBGCEQDN0DCwJAIAEiASACRg0AIABBi4CAgAA2AgggACABNgIEIAEhAUEHIRAMxAMLQRkhEAzcAwsgAUEBaiEBDAILAkAgASIUIAJHDQBBGiEQDNsDCyAUIQECQCAULQAAQXNqDhTdAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAu4C7gLuAgDuAgtBACEQIABBADYCHCAAQa+LgIAANgIQIABBAjYCDCAAIBRBAWo2AhQM2gMLAkAgAS0AACIQQTtGDQAgEEENRw3oASABQQFqIQEM5QILIAFBAWohAQtBIiEQDL8DCwJAIAEiECACRw0AQRwhEAzYAwtCACERIBAhASAQLQAAQVBqDjfnAeYBAQIDBAUGBwgAAAAAAAAACQoLDA0OAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAPEBESExQAC0EeIRAMvQMLQgIhEQzlAQtCAyERDOQBC0IEIREM4wELQgUhEQziAQtCBiERDOEBC0IHIREM4AELQgghEQzfAQtCCSERDN4BC0IKIREM3QELQgshEQzcAQtCDCERDNsBC0INIREM2gELQg4hEQzZAQtCDyERDNgBC0IKIREM1wELQgshEQzWAQtCDCERDNUBC0INIREM1AELQg4hEQzTAQtCDyERDNIBC0IAIRECQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAIBAtAABBUGoON+UB5AEAAQIDBAUGB+YB5gHmAeYB5gHmAeYBCAkKCwwN5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAeYB5gHmAQ4PEBESE+YBC0ICIREM5AELQgMhEQzjAQtCBCERDOIBC0IFIREM4QELQgYhEQzgAQtCByERDN8BC0IIIREM3gELQgkhEQzdAQtCCiERDNwBC0ILIREM2wELQgwhEQzaAQtCDSERDNkBC0IOIREM2AELQg8hEQzXAQtCCiERDNYBC0ILIREM1QELQgwhEQzUAQtCDSERDNMBC0IOIREM0gELQg8hEQzRAQsgAEIAIAApAyAiESACIAEiEGutIhJ9IhMgEyARVhs3AyAgESASViIURQ3SAUEfIRAMwAMLAkAgASIBIAJGDQAgAEGJgICAADYCCCAAIAE2AgQgASEBQSQhEAynAwtBICEQDL8DCyAAIAEiECACEL6AgIAAQX9qDgW2AQDFAgHRAdIBC0ERIRAMpAMLIABBAToALyAQIQEMuwMLIAEiASACRw3SAUEkIRAMuwMLIAEiDSACRw0eQcYAIRAMugMLIAAgASIBIAIQsoCAgAAiEA3UASABIQEMtQELIAEiECACRw0mQdAAIRAMuAMLAkAgASIBIAJHDQBBKCEQDLgDCyAAQQA2AgQgAEGMgICAADYCCCAAIAEgARCxgICAACIQDdMBIAEhAQzYAQsCQCABIhAgAkcNAEEpIRAMtwMLIBAtAAAiAUEgRg0UIAFBCUcN0wEgEEEBaiEBDBULAkAgASIBIAJGDQAgAUEBaiEBDBcLQSohEAy1AwsCQCABIhAgAkcNAEErIRAMtQMLAkAgEC0AACIBQQlGDQAgAUEgRw3VAQsgAC0ALEEIRg3TASAQIQEMkQMLAkAgASIBIAJHDQBBLCEQDLQDCyABLQAAQQpHDdUBIAFBAWohAQzJAgsgASIOIAJHDdUBQS8hEAyyAwsDQAJAIAEtAAAiEEEgRg0AAkAgEEF2ag4EANwB3AEA2gELIAEhAQzgAQsgAUEBaiIBIAJHDQALQTEhEAyxAwtBMiEQIAEiFCACRg2wAyACIBRrIAAoAgAiAWohFSAUIAFrQQNqIRYCQANAIBQtAAAiF0EgciAXIBdBv39qQf8BcUEaSRtB/wFxIAFB8LuAgABqLQAARw0BAkAgAUEDRw0AQQYhAQyWAwsgAUEBaiEBIBRBAWoiFCACRw0ACyAAIBU2AgAMsQMLIABBADYCACAUIQEM2QELQTMhECABIhQgAkYNrwMgAiAUayAAKAIAIgFqIRUgFCABa0EIaiEWAkADQCAULQAAIhdBIHIgFyAXQb9/akH/AXFBGkkbQf8BcSABQfS7gIAAai0AAEcNAQJAIAFBCEcNAEEFIQEMlQMLIAFBAWohASAUQQFqIhQgAkcNAAsgACAVNgIADLADCyAAQQA2AgAgFCEBDNgBC0E0IRAgASIUIAJGDa4DIAIgFGsgACgCACIBaiEVIBQgAWtBBWohFgJAA0AgFC0AACIXQSByIBcgF0G/f2pB/wFxQRpJG0H/AXEgAUHQwoCAAGotAABHDQECQCABQQVHDQBBByEBDJQDCyABQQFqIQEgFEEBaiIUIAJHDQALIAAgFTYCAAyvAwsgAEEANgIAIBQhAQzXAQsCQCABIgEgAkYNAANAAkAgAS0AAEGAvoCAAGotAAAiEEEBRg0AIBBBAkYNCiABIQEM3QELIAFBAWoiASACRw0AC0EwIRAMrgMLQTAhEAytAwsCQCABIgEgAkYNAANAAkAgAS0AACIQQSBGDQAgEEF2ag4E2QHaAdoB2QHaAQsgAUEBaiIBIAJHDQALQTghEAytAwtBOCEQDKwDCwNAAkAgAS0AACIQQSBGDQAgEEEJRw0DCyABQQFqIgEgAkcNAAtBPCEQDKsDCwNAAkAgAS0AACIQQSBGDQACQAJAIBBBdmoOBNoBAQHaAQALIBBBLEYN2wELIAEhAQwECyABQQFqIgEgAkcNAAtBPyEQDKoDCyABIQEM2wELQcAAIRAgASIUIAJGDagDIAIgFGsgACgCACIBaiEWIBQgAWtBBmohFwJAA0AgFC0AAEEgciABQYDAgIAAai0AAEcNASABQQZGDY4DIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADKkDCyAAQQA2AgAgFCEBC0E2IRAMjgMLAkAgASIPIAJHDQBBwQAhEAynAwsgAEGMgICAADYCCCAAIA82AgQgDyEBIAAtACxBf2oOBM0B1QHXAdkBhwMLIAFBAWohAQzMAQsCQCABIgEgAkYNAANAAkAgAS0AACIQQSByIBAgEEG/f2pB/wFxQRpJG0H/AXEiEEEJRg0AIBBBIEYNAAJAAkACQAJAIBBBnX9qDhMAAwMDAwMDAwEDAwMDAwMDAwMCAwsgAUEBaiEBQTEhEAyRAwsgAUEBaiEBQTIhEAyQAwsgAUEBaiEBQTMhEAyPAwsgASEBDNABCyABQQFqIgEgAkcNAAtBNSEQDKUDC0E1IRAMpAMLAkAgASIBIAJGDQADQAJAIAEtAABBgLyAgABqLQAAQQFGDQAgASEBDNMBCyABQQFqIgEgAkcNAAtBPSEQDKQDC0E9IRAMowMLIAAgASIBIAIQsICAgAAiEA3WASABIQEMAQsgEEEBaiEBC0E8IRAMhwMLAkAgASIBIAJHDQBBwgAhEAygAwsCQANAAkAgAS0AAEF3ag4YAAL+Av4ChAP+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gL+Av4C/gIA/gILIAFBAWoiASACRw0AC0HCACEQDKADCyABQQFqIQEgAC0ALUEBcUUNvQEgASEBC0EsIRAMhQMLIAEiASACRw3TAUHEACEQDJ0DCwNAAkAgAS0AAEGQwICAAGotAABBAUYNACABIQEMtwILIAFBAWoiASACRw0AC0HFACEQDJwDCyANLQAAIhBBIEYNswEgEEE6Rw2BAyAAKAIEIQEgAEEANgIEIAAgASANEK+AgIAAIgEN0AEgDUEBaiEBDLMCC0HHACEQIAEiDSACRg2aAyACIA1rIAAoAgAiAWohFiANIAFrQQVqIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQZDCgIAAai0AAEcNgAMgAUEFRg30AiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyaAwtByAAhECABIg0gAkYNmQMgAiANayAAKAIAIgFqIRYgDSABa0EJaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUGWwoCAAGotAABHDf8CAkAgAUEJRw0AQQIhAQz1AgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMmQMLAkAgASINIAJHDQBByQAhEAyZAwsCQAJAIA0tAAAiAUEgciABIAFBv39qQf8BcUEaSRtB/wFxQZJ/ag4HAIADgAOAA4ADgAMBgAMLIA1BAWohAUE+IRAMgAMLIA1BAWohAUE/IRAM/wILQcoAIRAgASINIAJGDZcDIAIgDWsgACgCACIBaiEWIA0gAWtBAWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFBoMKAgABqLQAARw39AiABQQFGDfACIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJcDC0HLACEQIAEiDSACRg2WAyACIA1rIAAoAgAiAWohFiANIAFrQQ5qIRcDQCANLQAAIhRBIHIgFCAUQb9/akH/AXFBGkkbQf8BcSABQaLCgIAAai0AAEcN/AIgAUEORg3wAiABQQFqIQEgDUEBaiINIAJHDQALIAAgFjYCAAyWAwtBzAAhECABIg0gAkYNlQMgAiANayAAKAIAIgFqIRYgDSABa0EPaiEXA0AgDS0AACIUQSByIBQgFEG/f2pB/wFxQRpJG0H/AXEgAUHAwoCAAGotAABHDfsCAkAgAUEPRw0AQQMhAQzxAgsgAUEBaiEBIA1BAWoiDSACRw0ACyAAIBY2AgAMlQMLQc0AIRAgASINIAJGDZQDIAIgDWsgACgCACIBaiEWIA0gAWtBBWohFwNAIA0tAAAiFEEgciAUIBRBv39qQf8BcUEaSRtB/wFxIAFB0MKAgABqLQAARw36AgJAIAFBBUcNAEEEIQEM8AILIAFBAWohASANQQFqIg0gAkcNAAsgACAWNgIADJQDCwJAIAEiDSACRw0AQc4AIRAMlAMLAkACQAJAAkAgDS0AACIBQSByIAEgAUG/f2pB/wFxQRpJG0H/AXFBnX9qDhMA/QL9Av0C/QL9Av0C/QL9Av0C/QL9Av0CAf0C/QL9AgID/QILIA1BAWohAUHBACEQDP0CCyANQQFqIQFBwgAhEAz8AgsgDUEBaiEBQcMAIRAM+wILIA1BAWohAUHEACEQDPoCCwJAIAEiASACRg0AIABBjYCAgAA2AgggACABNgIEIAEhAUHFACEQDPoCC0HPACEQDJIDCyAQIQECQAJAIBAtAABBdmoOBAGoAqgCAKgCCyAQQQFqIQELQSchEAz4AgsCQCABIgEgAkcNAEHRACEQDJEDCwJAIAEtAABBIEYNACABIQEMjQELIAFBAWohASAALQAtQQFxRQ3HASABIQEMjAELIAEiFyACRw3IAUHSACEQDI8DC0HTACEQIAEiFCACRg2OAyACIBRrIAAoAgAiAWohFiAUIAFrQQFqIRcDQCAULQAAIAFB1sKAgABqLQAARw3MASABQQFGDccBIAFBAWohASAUQQFqIhQgAkcNAAsgACAWNgIADI4DCwJAIAEiASACRw0AQdUAIRAMjgMLIAEtAABBCkcNzAEgAUEBaiEBDMcBCwJAIAEiASACRw0AQdYAIRAMjQMLAkACQCABLQAAQXZqDgQAzQHNAQHNAQsgAUEBaiEBDMcBCyABQQFqIQFBygAhEAzzAgsgACABIgEgAhCugICAACIQDcsBIAEhAUHNACEQDPICCyAALQApQSJGDYUDDKYCCwJAIAEiASACRw0AQdsAIRAMigMLQQAhFEEBIRdBASEWQQAhEAJAAkACQAJAAkACQAJAAkACQCABLQAAQVBqDgrUAdMBAAECAwQFBgjVAQtBAiEQDAYLQQMhEAwFC0EEIRAMBAtBBSEQDAMLQQYhEAwCC0EHIRAMAQtBCCEQC0EAIRdBACEWQQAhFAzMAQtBCSEQQQEhFEEAIRdBACEWDMsBCwJAIAEiASACRw0AQd0AIRAMiQMLIAEtAABBLkcNzAEgAUEBaiEBDKYCCyABIgEgAkcNzAFB3wAhEAyHAwsCQCABIgEgAkYNACAAQY6AgIAANgIIIAAgATYCBCABIQFB0AAhEAzuAgtB4AAhEAyGAwtB4QAhECABIgEgAkYNhQMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQeLCgIAAai0AAEcNzQEgFEEDRg3MASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyFAwtB4gAhECABIgEgAkYNhAMgAiABayAAKAIAIhRqIRYgASAUa0ECaiEXA0AgAS0AACAUQebCgIAAai0AAEcNzAEgFEECRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyEAwtB4wAhECABIgEgAkYNgwMgAiABayAAKAIAIhRqIRYgASAUa0EDaiEXA0AgAS0AACAUQenCgIAAai0AAEcNywEgFEEDRg3OASAUQQFqIRQgAUEBaiIBIAJHDQALIAAgFjYCAAyDAwsCQCABIgEgAkcNAEHlACEQDIMDCyAAIAFBAWoiASACEKiAgIAAIhANzQEgASEBQdYAIRAM6QILAkAgASIBIAJGDQADQAJAIAEtAAAiEEEgRg0AAkACQAJAIBBBuH9qDgsAAc8BzwHPAc8BzwHPAc8BzwECzwELIAFBAWohAUHSACEQDO0CCyABQQFqIQFB0wAhEAzsAgsgAUEBaiEBQdQAIRAM6wILIAFBAWoiASACRw0AC0HkACEQDIIDC0HkACEQDIEDCwNAAkAgAS0AAEHwwoCAAGotAAAiEEEBRg0AIBBBfmoOA88B0AHRAdIBCyABQQFqIgEgAkcNAAtB5gAhEAyAAwsCQCABIgEgAkYNACABQQFqIQEMAwtB5wAhEAz/AgsDQAJAIAEtAABB8MSAgABqLQAAIhBBAUYNAAJAIBBBfmoOBNIB0wHUAQDVAQsgASEBQdcAIRAM5wILIAFBAWoiASACRw0AC0HoACEQDP4CCwJAIAEiASACRw0AQekAIRAM/gILAkAgAS0AACIQQXZqDhq6AdUB1QG8AdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAdUB1QHVAcoB1QHVAQDTAQsgAUEBaiEBC0EGIRAM4wILA0ACQCABLQAAQfDGgIAAai0AAEEBRg0AIAEhAQyeAgsgAUEBaiIBIAJHDQALQeoAIRAM+wILAkAgASIBIAJGDQAgAUEBaiEBDAMLQesAIRAM+gILAkAgASIBIAJHDQBB7AAhEAz6AgsgAUEBaiEBDAELAkAgASIBIAJHDQBB7QAhEAz5AgsgAUEBaiEBC0EEIRAM3gILAkAgASIUIAJHDQBB7gAhEAz3AgsgFCEBAkACQAJAIBQtAABB8MiAgABqLQAAQX9qDgfUAdUB1gEAnAIBAtcBCyAUQQFqIQEMCgsgFEEBaiEBDM0BC0EAIRAgAEEANgIcIABBm5KAgAA2AhAgAEEHNgIMIAAgFEEBajYCFAz2AgsCQANAAkAgAS0AAEHwyICAAGotAAAiEEEERg0AAkACQCAQQX9qDgfSAdMB1AHZAQAEAdkBCyABIQFB2gAhEAzgAgsgAUEBaiEBQdwAIRAM3wILIAFBAWoiASACRw0AC0HvACEQDPYCCyABQQFqIQEMywELAkAgASIUIAJHDQBB8AAhEAz1AgsgFC0AAEEvRw3UASAUQQFqIQEMBgsCQCABIhQgAkcNAEHxACEQDPQCCwJAIBQtAAAiAUEvRw0AIBRBAWohAUHdACEQDNsCCyABQXZqIgRBFksN0wFBASAEdEGJgIACcUUN0wEMygILAkAgASIBIAJGDQAgAUEBaiEBQd4AIRAM2gILQfIAIRAM8gILAkAgASIUIAJHDQBB9AAhEAzyAgsgFCEBAkAgFC0AAEHwzICAAGotAABBf2oOA8kClAIA1AELQeEAIRAM2AILAkAgASIUIAJGDQADQAJAIBQtAABB8MqAgABqLQAAIgFBA0YNAAJAIAFBf2oOAssCANUBCyAUIQFB3wAhEAzaAgsgFEEBaiIUIAJHDQALQfMAIRAM8QILQfMAIRAM8AILAkAgASIBIAJGDQAgAEGPgICAADYCCCAAIAE2AgQgASEBQeAAIRAM1wILQfUAIRAM7wILAkAgASIBIAJHDQBB9gAhEAzvAgsgAEGPgICAADYCCCAAIAE2AgQgASEBC0EDIRAM1AILA0AgAS0AAEEgRw3DAiABQQFqIgEgAkcNAAtB9wAhEAzsAgsCQCABIgEgAkcNAEH4ACEQDOwCCyABLQAAQSBHDc4BIAFBAWohAQzvAQsgACABIgEgAhCsgICAACIQDc4BIAEhAQyOAgsCQCABIgQgAkcNAEH6ACEQDOoCCyAELQAAQcwARw3RASAEQQFqIQFBEyEQDM8BCwJAIAEiBCACRw0AQfsAIRAM6QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEANAIAQtAAAgAUHwzoCAAGotAABHDdABIAFBBUYNzgEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBB+wAhEAzoAgsCQCABIgQgAkcNAEH8ACEQDOgCCwJAAkAgBC0AAEG9f2oODADRAdEB0QHRAdEB0QHRAdEB0QHRAQHRAQsgBEEBaiEBQeYAIRAMzwILIARBAWohAUHnACEQDM4CCwJAIAEiBCACRw0AQf0AIRAM5wILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNzwEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf0AIRAM5wILIABBADYCACAQQQFqIQFBECEQDMwBCwJAIAEiBCACRw0AQf4AIRAM5gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQfbOgIAAai0AAEcNzgEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf4AIRAM5gILIABBADYCACAQQQFqIQFBFiEQDMsBCwJAIAEiBCACRw0AQf8AIRAM5QILIAIgBGsgACgCACIBaiEUIAQgAWtBA2ohEAJAA0AgBC0AACABQfzOgIAAai0AAEcNzQEgAUEDRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQf8AIRAM5QILIABBADYCACAQQQFqIQFBBSEQDMoBCwJAIAEiBCACRw0AQYABIRAM5AILIAQtAABB2QBHDcsBIARBAWohAUEIIRAMyQELAkAgASIEIAJHDQBBgQEhEAzjAgsCQAJAIAQtAABBsn9qDgMAzAEBzAELIARBAWohAUHrACEQDMoCCyAEQQFqIQFB7AAhEAzJAgsCQCABIgQgAkcNAEGCASEQDOICCwJAAkAgBC0AAEG4f2oOCADLAcsBywHLAcsBywEBywELIARBAWohAUHqACEQDMkCCyAEQQFqIQFB7QAhEAzIAgsCQCABIgQgAkcNAEGDASEQDOECCyACIARrIAAoAgAiAWohECAEIAFrQQJqIRQCQANAIAQtAAAgAUGAz4CAAGotAABHDckBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgEDYCAEGDASEQDOECC0EAIRAgAEEANgIAIBRBAWohAQzGAQsCQCABIgQgAkcNAEGEASEQDOACCyACIARrIAAoAgAiAWohFCAEIAFrQQRqIRACQANAIAQtAAAgAUGDz4CAAGotAABHDcgBIAFBBEYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGEASEQDOACCyAAQQA2AgAgEEEBaiEBQSMhEAzFAQsCQCABIgQgAkcNAEGFASEQDN8CCwJAAkAgBC0AAEG0f2oOCADIAcgByAHIAcgByAEByAELIARBAWohAUHvACEQDMYCCyAEQQFqIQFB8AAhEAzFAgsCQCABIgQgAkcNAEGGASEQDN4CCyAELQAAQcUARw3FASAEQQFqIQEMgwILAkAgASIEIAJHDQBBhwEhEAzdAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFBiM+AgABqLQAARw3FASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBhwEhEAzdAgsgAEEANgIAIBBBAWohAUEtIRAMwgELAkAgASIEIAJHDQBBiAEhEAzcAgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw3EASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiAEhEAzcAgsgAEEANgIAIBBBAWohAUEpIRAMwQELAkAgASIBIAJHDQBBiQEhEAzbAgtBASEQIAEtAABB3wBHDcABIAFBAWohAQyBAgsCQCABIgQgAkcNAEGKASEQDNoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRADQCAELQAAIAFBjM+AgABqLQAARw3BASABQQFGDa8CIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQYoBIRAM2QILAkAgASIEIAJHDQBBiwEhEAzZAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFBjs+AgABqLQAARw3BASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBiwEhEAzZAgsgAEEANgIAIBBBAWohAUECIRAMvgELAkAgASIEIAJHDQBBjAEhEAzYAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw3AASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjAEhEAzYAgsgAEEANgIAIBBBAWohAUEfIRAMvQELAkAgASIEIAJHDQBBjQEhEAzXAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8s+AgABqLQAARw2/ASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBjQEhEAzXAgsgAEEANgIAIBBBAWohAUEJIRAMvAELAkAgASIEIAJHDQBBjgEhEAzWAgsCQAJAIAQtAABBt39qDgcAvwG/Ab8BvwG/AQG/AQsgBEEBaiEBQfgAIRAMvQILIARBAWohAUH5ACEQDLwCCwJAIAEiBCACRw0AQY8BIRAM1QILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQZHPgIAAai0AAEcNvQEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQY8BIRAM1QILIABBADYCACAQQQFqIQFBGCEQDLoBCwJAIAEiBCACRw0AQZABIRAM1AILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQZfPgIAAai0AAEcNvAEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZABIRAM1AILIABBADYCACAQQQFqIQFBFyEQDLkBCwJAIAEiBCACRw0AQZEBIRAM0wILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQZrPgIAAai0AAEcNuwEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZEBIRAM0wILIABBADYCACAQQQFqIQFBFSEQDLgBCwJAIAEiBCACRw0AQZIBIRAM0gILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQaHPgIAAai0AAEcNugEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZIBIRAM0gILIABBADYCACAQQQFqIQFBHiEQDLcBCwJAIAEiBCACRw0AQZMBIRAM0QILIAQtAABBzABHDbgBIARBAWohAUEKIRAMtgELAkAgBCACRw0AQZQBIRAM0AILAkACQCAELQAAQb9/ag4PALkBuQG5AbkBuQG5AbkBuQG5AbkBuQG5AbkBAbkBCyAEQQFqIQFB/gAhEAy3AgsgBEEBaiEBQf8AIRAMtgILAkAgBCACRw0AQZUBIRAMzwILAkACQCAELQAAQb9/ag4DALgBAbgBCyAEQQFqIQFB/QAhEAy2AgsgBEEBaiEEQYABIRAMtQILAkAgBCACRw0AQZYBIRAMzgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQafPgIAAai0AAEcNtgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZYBIRAMzgILIABBADYCACAQQQFqIQFBCyEQDLMBCwJAIAQgAkcNAEGXASEQDM0CCwJAAkACQAJAIAQtAABBU2oOIwC4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBuAG4AbgBAbgBuAG4AbgBuAECuAG4AbgBA7gBCyAEQQFqIQFB+wAhEAy2AgsgBEEBaiEBQfwAIRAMtQILIARBAWohBEGBASEQDLQCCyAEQQFqIQRBggEhEAyzAgsCQCAEIAJHDQBBmAEhEAzMAgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBqc+AgABqLQAARw20ASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmAEhEAzMAgsgAEEANgIAIBBBAWohAUEZIRAMsQELAkAgBCACRw0AQZkBIRAMywILIAIgBGsgACgCACIBaiEUIAQgAWtBBWohEAJAA0AgBC0AACABQa7PgIAAai0AAEcNswEgAUEFRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZkBIRAMywILIABBADYCACAQQQFqIQFBBiEQDLABCwJAIAQgAkcNAEGaASEQDMoCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG0z4CAAGotAABHDbIBIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGaASEQDMoCCyAAQQA2AgAgEEEBaiEBQRwhEAyvAQsCQCAEIAJHDQBBmwEhEAzJAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBts+AgABqLQAARw2xASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBmwEhEAzJAgsgAEEANgIAIBBBAWohAUEnIRAMrgELAkAgBCACRw0AQZwBIRAMyAILAkACQCAELQAAQax/ag4CAAGxAQsgBEEBaiEEQYYBIRAMrwILIARBAWohBEGHASEQDK4CCwJAIAQgAkcNAEGdASEQDMcCCyACIARrIAAoAgAiAWohFCAEIAFrQQFqIRACQANAIAQtAAAgAUG4z4CAAGotAABHDa8BIAFBAUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGdASEQDMcCCyAAQQA2AgAgEEEBaiEBQSYhEAysAQsCQCAEIAJHDQBBngEhEAzGAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFBus+AgABqLQAARw2uASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBngEhEAzGAgsgAEEANgIAIBBBAWohAUEDIRAMqwELAkAgBCACRw0AQZ8BIRAMxQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNrQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQZ8BIRAMxQILIABBADYCACAQQQFqIQFBDCEQDKoBCwJAIAQgAkcNAEGgASEQDMQCCyACIARrIAAoAgAiAWohFCAEIAFrQQNqIRACQANAIAQtAAAgAUG8z4CAAGotAABHDawBIAFBA0YNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGgASEQDMQCCyAAQQA2AgAgEEEBaiEBQQ0hEAypAQsCQCAEIAJHDQBBoQEhEAzDAgsCQAJAIAQtAABBun9qDgsArAGsAawBrAGsAawBrAGsAawBAawBCyAEQQFqIQRBiwEhEAyqAgsgBEEBaiEEQYwBIRAMqQILAkAgBCACRw0AQaIBIRAMwgILIAQtAABB0ABHDakBIARBAWohBAzpAQsCQCAEIAJHDQBBowEhEAzBAgsCQAJAIAQtAABBt39qDgcBqgGqAaoBqgGqAQCqAQsgBEEBaiEEQY4BIRAMqAILIARBAWohAUEiIRAMpgELAkAgBCACRw0AQaQBIRAMwAILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQcDPgIAAai0AAEcNqAEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaQBIRAMwAILIABBADYCACAQQQFqIQFBHSEQDKUBCwJAIAQgAkcNAEGlASEQDL8CCwJAAkAgBC0AAEGuf2oOAwCoAQGoAQsgBEEBaiEEQZABIRAMpgILIARBAWohAUEEIRAMpAELAkAgBCACRw0AQaYBIRAMvgILAkACQAJAAkACQCAELQAAQb9/ag4VAKoBqgGqAaoBqgGqAaoBqgGqAaoBAaoBqgECqgGqAQOqAaoBBKoBCyAEQQFqIQRBiAEhEAyoAgsgBEEBaiEEQYkBIRAMpwILIARBAWohBEGKASEQDKYCCyAEQQFqIQRBjwEhEAylAgsgBEEBaiEEQZEBIRAMpAILAkAgBCACRw0AQacBIRAMvQILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQe3PgIAAai0AAEcNpQEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQacBIRAMvQILIABBADYCACAQQQFqIQFBESEQDKIBCwJAIAQgAkcNAEGoASEQDLwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHCz4CAAGotAABHDaQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGoASEQDLwCCyAAQQA2AgAgEEEBaiEBQSwhEAyhAQsCQCAEIAJHDQBBqQEhEAy7AgsgAiAEayAAKAIAIgFqIRQgBCABa0EEaiEQAkADQCAELQAAIAFBxc+AgABqLQAARw2jASABQQRGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBqQEhEAy7AgsgAEEANgIAIBBBAWohAUErIRAMoAELAkAgBCACRw0AQaoBIRAMugILIAIgBGsgACgCACIBaiEUIAQgAWtBAmohEAJAA0AgBC0AACABQcrPgIAAai0AAEcNogEgAUECRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQaoBIRAMugILIABBADYCACAQQQFqIQFBFCEQDJ8BCwJAIAQgAkcNAEGrASEQDLkCCwJAAkACQAJAIAQtAABBvn9qDg8AAQKkAaQBpAGkAaQBpAGkAaQBpAGkAaQBA6QBCyAEQQFqIQRBkwEhEAyiAgsgBEEBaiEEQZQBIRAMoQILIARBAWohBEGVASEQDKACCyAEQQFqIQRBlgEhEAyfAgsCQCAEIAJHDQBBrAEhEAy4AgsgBC0AAEHFAEcNnwEgBEEBaiEEDOABCwJAIAQgAkcNAEGtASEQDLcCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHNz4CAAGotAABHDZ8BIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEGtASEQDLcCCyAAQQA2AgAgEEEBaiEBQQ4hEAycAQsCQCAEIAJHDQBBrgEhEAy2AgsgBC0AAEHQAEcNnQEgBEEBaiEBQSUhEAybAQsCQCAEIAJHDQBBrwEhEAy1AgsgAiAEayAAKAIAIgFqIRQgBCABa0EIaiEQAkADQCAELQAAIAFB0M+AgABqLQAARw2dASABQQhGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBrwEhEAy1AgsgAEEANgIAIBBBAWohAUEqIRAMmgELAkAgBCACRw0AQbABIRAMtAILAkACQCAELQAAQat/ag4LAJ0BnQGdAZ0BnQGdAZ0BnQGdAQGdAQsgBEEBaiEEQZoBIRAMmwILIARBAWohBEGbASEQDJoCCwJAIAQgAkcNAEGxASEQDLMCCwJAAkAgBC0AAEG/f2oOFACcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAGcAZwBnAEBnAELIARBAWohBEGZASEQDJoCCyAEQQFqIQRBnAEhEAyZAgsCQCAEIAJHDQBBsgEhEAyyAgsgAiAEayAAKAIAIgFqIRQgBCABa0EDaiEQAkADQCAELQAAIAFB2c+AgABqLQAARw2aASABQQNGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBsgEhEAyyAgsgAEEANgIAIBBBAWohAUEhIRAMlwELAkAgBCACRw0AQbMBIRAMsQILIAIgBGsgACgCACIBaiEUIAQgAWtBBmohEAJAA0AgBC0AACABQd3PgIAAai0AAEcNmQEgAUEGRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbMBIRAMsQILIABBADYCACAQQQFqIQFBGiEQDJYBCwJAIAQgAkcNAEG0ASEQDLACCwJAAkACQCAELQAAQbt/ag4RAJoBmgGaAZoBmgGaAZoBmgGaAQGaAZoBmgGaAZoBApoBCyAEQQFqIQRBnQEhEAyYAgsgBEEBaiEEQZ4BIRAMlwILIARBAWohBEGfASEQDJYCCwJAIAQgAkcNAEG1ASEQDK8CCyACIARrIAAoAgAiAWohFCAEIAFrQQVqIRACQANAIAQtAAAgAUHkz4CAAGotAABHDZcBIAFBBUYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG1ASEQDK8CCyAAQQA2AgAgEEEBaiEBQSghEAyUAQsCQCAEIAJHDQBBtgEhEAyuAgsgAiAEayAAKAIAIgFqIRQgBCABa0ECaiEQAkADQCAELQAAIAFB6s+AgABqLQAARw2WASABQQJGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBtgEhEAyuAgsgAEEANgIAIBBBAWohAUEHIRAMkwELAkAgBCACRw0AQbcBIRAMrQILAkACQCAELQAAQbt/ag4OAJYBlgGWAZYBlgGWAZYBlgGWAZYBlgGWAQGWAQsgBEEBaiEEQaEBIRAMlAILIARBAWohBEGiASEQDJMCCwJAIAQgAkcNAEG4ASEQDKwCCyACIARrIAAoAgAiAWohFCAEIAFrQQJqIRACQANAIAQtAAAgAUHtz4CAAGotAABHDZQBIAFBAkYNASABQQFqIQEgBEEBaiIEIAJHDQALIAAgFDYCAEG4ASEQDKwCCyAAQQA2AgAgEEEBaiEBQRIhEAyRAQsCQCAEIAJHDQBBuQEhEAyrAgsgAiAEayAAKAIAIgFqIRQgBCABa0EBaiEQAkADQCAELQAAIAFB8M+AgABqLQAARw2TASABQQFGDQEgAUEBaiEBIARBAWoiBCACRw0ACyAAIBQ2AgBBuQEhEAyrAgsgAEEANgIAIBBBAWohAUEgIRAMkAELAkAgBCACRw0AQboBIRAMqgILIAIgBGsgACgCACIBaiEUIAQgAWtBAWohEAJAA0AgBC0AACABQfLPgIAAai0AAEcNkgEgAUEBRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQboBIRAMqgILIABBADYCACAQQQFqIQFBDyEQDI8BCwJAIAQgAkcNAEG7ASEQDKkCCwJAAkAgBC0AAEG3f2oOBwCSAZIBkgGSAZIBAZIBCyAEQQFqIQRBpQEhEAyQAgsgBEEBaiEEQaYBIRAMjwILAkAgBCACRw0AQbwBIRAMqAILIAIgBGsgACgCACIBaiEUIAQgAWtBB2ohEAJAA0AgBC0AACABQfTPgIAAai0AAEcNkAEgAUEHRg0BIAFBAWohASAEQQFqIgQgAkcNAAsgACAUNgIAQbwBIRAMqAILIABBADYCACAQQQFqIQFBGyEQDI0BCwJAIAQgAkcNAEG9ASEQDKcCCwJAAkACQCAELQAAQb5/ag4SAJEBkQGRAZEBkQGRAZEBkQGRAQGRAZEBkQGRAZEBkQECkQELIARBAWohBEGkASEQDI8CCyAEQQFqIQRBpwEhEAyOAgsgBEEBaiEEQagBIRAMjQILAkAgBCACRw0AQb4BIRAMpgILIAQtAABBzgBHDY0BIARBAWohBAzPAQsCQCAEIAJHDQBBvwEhEAylAgsCQAJAAkACQAJAAkACQAJAAkACQAJAAkACQAJAAkACQCAELQAAQb9/ag4VAAECA5wBBAUGnAGcAZwBBwgJCgucAQwNDg+cAQsgBEEBaiEBQegAIRAMmgILIARBAWohAUHpACEQDJkCCyAEQQFqIQFB7gAhEAyYAgsgBEEBaiEBQfIAIRAMlwILIARBAWohAUHzACEQDJYCCyAEQQFqIQFB9gAhEAyVAgsgBEEBaiEBQfcAIRAMlAILIARBAWohAUH6ACEQDJMCCyAEQQFqIQRBgwEhEAySAgsgBEEBaiEEQYQBIRAMkQILIARBAWohBEGFASEQDJACCyAEQQFqIQRBkgEhEAyPAgsgBEEBaiEEQZgBIRAMjgILIARBAWohBEGgASEQDI0CCyAEQQFqIQRBowEhEAyMAgsgBEEBaiEEQaoBIRAMiwILAkAgBCACRg0AIABBkICAgAA2AgggACAENgIEQasBIRAMiwILQcABIRAMowILIAAgBSACEKqAgIAAIgENiwEgBSEBDFwLAkAgBiACRg0AIAZBAWohBQyNAQtBwgEhEAyhAgsDQAJAIBAtAABBdmoOBIwBAACPAQALIBBBAWoiECACRw0AC0HDASEQDKACCwJAIAcgAkYNACAAQZGAgIAANgIIIAAgBzYCBCAHIQFBASEQDIcCC0HEASEQDJ8CCwJAIAcgAkcNAEHFASEQDJ8CCwJAAkAgBy0AAEF2ag4EAc4BzgEAzgELIAdBAWohBgyNAQsgB0EBaiEFDIkBCwJAIAcgAkcNAEHGASEQDJ4CCwJAAkAgBy0AAEF2ag4XAY8BjwEBjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BAI8BCyAHQQFqIQcLQbABIRAMhAILAkAgCCACRw0AQcgBIRAMnQILIAgtAABBIEcNjQEgAEEAOwEyIAhBAWohAUGzASEQDIMCCyABIRcCQANAIBciByACRg0BIActAABBUGpB/wFxIhBBCk8NzAECQCAALwEyIhRBmTNLDQAgACAUQQpsIhQ7ATIgEEH//wNzIBRB/v8DcUkNACAHQQFqIRcgACAUIBBqIhA7ATIgEEH//wNxQegHSQ0BCwtBACEQIABBADYCHCAAQcGJgIAANgIQIABBDTYCDCAAIAdBAWo2AhQMnAILQccBIRAMmwILIAAgCCACEK6AgIAAIhBFDcoBIBBBFUcNjAEgAEHIATYCHCAAIAg2AhQgAEHJl4CAADYCECAAQRU2AgxBACEQDJoCCwJAIAkgAkcNAEHMASEQDJoCC0EAIRRBASEXQQEhFkEAIRACQAJAAkACQAJAAkACQAJAAkAgCS0AAEFQag4KlgGVAQABAgMEBQYIlwELQQIhEAwGC0EDIRAMBQtBBCEQDAQLQQUhEAwDC0EGIRAMAgtBByEQDAELQQghEAtBACEXQQAhFkEAIRQMjgELQQkhEEEBIRRBACEXQQAhFgyNAQsCQCAKIAJHDQBBzgEhEAyZAgsgCi0AAEEuRw2OASAKQQFqIQkMygELIAsgAkcNjgFB0AEhEAyXAgsCQCALIAJGDQAgAEGOgICAADYCCCAAIAs2AgRBtwEhEAz+AQtB0QEhEAyWAgsCQCAEIAJHDQBB0gEhEAyWAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EEaiELA0AgBC0AACAQQfzPgIAAai0AAEcNjgEgEEEERg3pASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHSASEQDJUCCyAAIAwgAhCsgICAACIBDY0BIAwhAQy4AQsCQCAEIAJHDQBB1AEhEAyUAgsgAiAEayAAKAIAIhBqIRQgBCAQa0EBaiEMA0AgBC0AACAQQYHQgIAAai0AAEcNjwEgEEEBRg2OASAQQQFqIRAgBEEBaiIEIAJHDQALIAAgFDYCAEHUASEQDJMCCwJAIAQgAkcNAEHWASEQDJMCCyACIARrIAAoAgAiEGohFCAEIBBrQQJqIQsDQCAELQAAIBBBg9CAgABqLQAARw2OASAQQQJGDZABIBBBAWohECAEQQFqIgQgAkcNAAsgACAUNgIAQdYBIRAMkgILAkAgBCACRw0AQdcBIRAMkgILAkACQCAELQAAQbt/ag4QAI8BjwGPAY8BjwGPAY8BjwGPAY8BjwGPAY8BjwEBjwELIARBAWohBEG7ASEQDPkBCyAEQQFqIQRBvAEhEAz4AQsCQCAEIAJHDQBB2AEhEAyRAgsgBC0AAEHIAEcNjAEgBEEBaiEEDMQBCwJAIAQgAkYNACAAQZCAgIAANgIIIAAgBDYCBEG+ASEQDPcBC0HZASEQDI8CCwJAIAQgAkcNAEHaASEQDI8CCyAELQAAQcgARg3DASAAQQE6ACgMuQELIABBAjoALyAAIAQgAhCmgICAACIQDY0BQcIBIRAM9AELIAAtAChBf2oOArcBuQG4AQsDQAJAIAQtAABBdmoOBACOAY4BAI4BCyAEQQFqIgQgAkcNAAtB3QEhEAyLAgsgAEEAOgAvIAAtAC1BBHFFDYQCCyAAQQA6AC8gAEEBOgA0IAEhAQyMAQsgEEEVRg3aASAAQQA2AhwgACABNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAyIAgsCQCAAIBAgAhC0gICAACIEDQAgECEBDIECCwJAIARBFUcNACAAQQM2AhwgACAQNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAyIAgsgAEEANgIcIAAgEDYCFCAAQaeOgIAANgIQIABBEjYCDEEAIRAMhwILIBBBFUYN1gEgAEEANgIcIAAgATYCFCAAQdqNgIAANgIQIABBFDYCDEEAIRAMhgILIAAoAgQhFyAAQQA2AgQgECARp2oiFiEBIAAgFyAQIBYgFBsiEBC1gICAACIURQ2NASAAQQc2AhwgACAQNgIUIAAgFDYCDEEAIRAMhQILIAAgAC8BMEGAAXI7ATAgASEBC0EqIRAM6gELIBBBFUYN0QEgAEEANgIcIAAgATYCFCAAQYOMgIAANgIQIABBEzYCDEEAIRAMggILIBBBFUYNzwEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAMgQILIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDI0BCyAAQQw2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAMgAILIBBBFUYNzAEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM/wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIwBCyAAQQ02AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/gELIBBBFUYNyQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM/QELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIsBCyAAQQ42AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM/AELIABBADYCHCAAIAE2AhQgAEHAlYCAADYCECAAQQI2AgxBACEQDPsBCyAQQRVGDcUBIABBADYCHCAAIAE2AhQgAEHGjICAADYCECAAQSM2AgxBACEQDPoBCyAAQRA2AhwgACABNgIUIAAgEDYCDEEAIRAM+QELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDPEBCyAAQRE2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM+AELIBBBFUYNwQEgAEEANgIcIAAgATYCFCAAQcaMgIAANgIQIABBIzYCDEEAIRAM9wELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC5gICAACIQDQAgAUEBaiEBDIgBCyAAQRM2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM9gELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC5gICAACIEDQAgAUEBaiEBDO0BCyAAQRQ2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM9QELIBBBFUYNvQEgAEEANgIcIAAgATYCFCAAQZqPgIAANgIQIABBIjYCDEEAIRAM9AELIAAoAgQhECAAQQA2AgQCQCAAIBAgARC3gICAACIQDQAgAUEBaiEBDIYBCyAAQRY2AhwgACAQNgIMIAAgAUEBajYCFEEAIRAM8wELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARC3gICAACIEDQAgAUEBaiEBDOkBCyAAQRc2AhwgACAENgIMIAAgAUEBajYCFEEAIRAM8gELIABBADYCHCAAIAE2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDPEBC0IBIRELIBBBAWohAQJAIAApAyAiEkL//////////w9WDQAgACASQgSGIBGENwMgIAEhAQyEAQsgAEEANgIcIAAgATYCFCAAQa2JgIAANgIQIABBDDYCDEEAIRAM7wELIABBADYCHCAAIBA2AhQgAEHNk4CAADYCECAAQQw2AgxBACEQDO4BCyAAKAIEIRcgAEEANgIEIBAgEadqIhYhASAAIBcgECAWIBQbIhAQtYCAgAAiFEUNcyAAQQU2AhwgACAQNgIUIAAgFDYCDEEAIRAM7QELIABBADYCHCAAIBA2AhQgAEGqnICAADYCECAAQQ82AgxBACEQDOwBCyAAIBAgAhC0gICAACIBDQEgECEBC0EOIRAM0QELAkAgAUEVRw0AIABBAjYCHCAAIBA2AhQgAEGwmICAADYCECAAQRU2AgxBACEQDOoBCyAAQQA2AhwgACAQNgIUIABBp46AgAA2AhAgAEESNgIMQQAhEAzpAQsgAUEBaiEQAkAgAC8BMCIBQYABcUUNAAJAIAAgECACELuAgIAAIgENACAQIQEMcAsgAUEVRw26ASAAQQU2AhwgACAQNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAzpAQsCQCABQaAEcUGgBEcNACAALQAtQQJxDQAgAEEANgIcIAAgEDYCFCAAQZaTgIAANgIQIABBBDYCDEEAIRAM6QELIAAgECACEL2AgIAAGiAQIQECQAJAAkACQAJAIAAgECACELOAgIAADhYCAQAEBAQEBAQEBAQEBAQEBAQEBAQDBAsgAEEBOgAuCyAAIAAvATBBwAByOwEwIBAhAQtBJiEQDNEBCyAAQSM2AhwgACAQNgIUIABBpZaAgAA2AhAgAEEVNgIMQQAhEAzpAQsgAEEANgIcIAAgEDYCFCAAQdWLgIAANgIQIABBETYCDEEAIRAM6AELIAAtAC1BAXFFDQFBwwEhEAzOAQsCQCANIAJGDQADQAJAIA0tAABBIEYNACANIQEMxAELIA1BAWoiDSACRw0AC0ElIRAM5wELQSUhEAzmAQsgACgCBCEEIABBADYCBCAAIAQgDRCvgICAACIERQ2tASAAQSY2AhwgACAENgIMIAAgDUEBajYCFEEAIRAM5QELIBBBFUYNqwEgAEEANgIcIAAgATYCFCAAQf2NgIAANgIQIABBHTYCDEEAIRAM5AELIABBJzYCHCAAIAE2AhQgACAQNgIMQQAhEAzjAQsgECEBQQEhFAJAAkACQAJAAkACQAJAIAAtACxBfmoOBwYFBQMBAgAFCyAAIAAvATBBCHI7ATAMAwtBAiEUDAELQQQhFAsgAEEBOgAsIAAgAC8BMCAUcjsBMAsgECEBC0ErIRAMygELIABBADYCHCAAIBA2AhQgAEGrkoCAADYCECAAQQs2AgxBACEQDOIBCyAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMQQAhEAzhAQsgAEEAOgAsIBAhAQy9AQsgECEBQQEhFAJAAkACQAJAAkAgAC0ALEF7ag4EAwECAAULIAAgAC8BMEEIcjsBMAwDC0ECIRQMAQtBBCEUCyAAQQE6ACwgACAALwEwIBRyOwEwCyAQIQELQSkhEAzFAQsgAEEANgIcIAAgATYCFCAAQfCUgIAANgIQIABBAzYCDEEAIRAM3QELAkAgDi0AAEENRw0AIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDkEBaiEBDHULIABBLDYCHCAAIAE2AgwgACAOQQFqNgIUQQAhEAzdAQsgAC0ALUEBcUUNAUHEASEQDMMBCwJAIA4gAkcNAEEtIRAM3AELAkACQANAAkAgDi0AAEF2ag4EAgAAAwALIA5BAWoiDiACRw0AC0EtIRAM3QELIAAoAgQhASAAQQA2AgQCQCAAIAEgDhCxgICAACIBDQAgDiEBDHQLIABBLDYCHCAAIA42AhQgACABNgIMQQAhEAzcAQsgACgCBCEBIABBADYCBAJAIAAgASAOELGAgIAAIgENACAOQQFqIQEMcwsgAEEsNgIcIAAgATYCDCAAIA5BAWo2AhRBACEQDNsBCyAAKAIEIQQgAEEANgIEIAAgBCAOELGAgIAAIgQNoAEgDiEBDM4BCyAQQSxHDQEgAUEBaiEQQQEhAQJAAkACQAJAAkAgAC0ALEF7ag4EAwECBAALIBAhAQwEC0ECIQEMAQtBBCEBCyAAQQE6ACwgACAALwEwIAFyOwEwIBAhAQwBCyAAIAAvATBBCHI7ATAgECEBC0E5IRAMvwELIABBADoALCABIQELQTQhEAy9AQsgACAALwEwQSByOwEwIAEhAQwCCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQsYCAgAAiBA0AIAEhAQzHAQsgAEE3NgIcIAAgATYCFCAAIAQ2AgxBACEQDNQBCyAAQQg6ACwgASEBC0EwIRAMuQELAkAgAC0AKEEBRg0AIAEhAQwECyAALQAtQQhxRQ2TASABIQEMAwsgAC0AMEEgcQ2UAUHFASEQDLcBCwJAIA8gAkYNAAJAA0ACQCAPLQAAQVBqIgFB/wFxQQpJDQAgDyEBQTUhEAy6AQsgACkDICIRQpmz5syZs+bMGVYNASAAIBFCCn4iETcDICARIAGtQv8BgyISQn+FVg0BIAAgESASfDcDICAPQQFqIg8gAkcNAAtBOSEQDNEBCyAAKAIEIQIgAEEANgIEIAAgAiAPQQFqIgQQsYCAgAAiAg2VASAEIQEMwwELQTkhEAzPAQsCQCAALwEwIgFBCHFFDQAgAC0AKEEBRw0AIAAtAC1BCHFFDZABCyAAIAFB9/sDcUGABHI7ATAgDyEBC0E3IRAMtAELIAAgAC8BMEEQcjsBMAyrAQsgEEEVRg2LASAAQQA2AhwgACABNgIUIABB8I6AgAA2AhAgAEEcNgIMQQAhEAzLAQsgAEHDADYCHCAAIAE2AgwgACANQQFqNgIUQQAhEAzKAQsCQCABLQAAQTpHDQAgACgCBCEQIABBADYCBAJAIAAgECABEK+AgIAAIhANACABQQFqIQEMYwsgAEHDADYCHCAAIBA2AgwgACABQQFqNgIUQQAhEAzKAQsgAEEANgIcIAAgATYCFCAAQbGRgIAANgIQIABBCjYCDEEAIRAMyQELIABBADYCHCAAIAE2AhQgAEGgmYCAADYCECAAQR42AgxBACEQDMgBCyAAQQA2AgALIABBgBI7ASogACAXQQFqIgEgAhCogICAACIQDQEgASEBC0HHACEQDKwBCyAQQRVHDYMBIABB0QA2AhwgACABNgIUIABB45eAgAA2AhAgAEEVNgIMQQAhEAzEAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMXgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAzDAQsgAEEANgIcIAAgFDYCFCAAQcGogIAANgIQIABBBzYCDCAAQQA2AgBBACEQDMIBCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxdCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDMEBC0EAIRAgAEEANgIcIAAgATYCFCAAQYCRgIAANgIQIABBCTYCDAzAAQsgEEEVRg19IABBADYCHCAAIAE2AhQgAEGUjYCAADYCECAAQSE2AgxBACEQDL8BC0EBIRZBACEXQQAhFEEBIRALIAAgEDoAKyABQQFqIQECQAJAIAAtAC1BEHENAAJAAkACQCAALQAqDgMBAAIECyAWRQ0DDAILIBQNAQwCCyAXRQ0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQrYCAgAAiEA0AIAEhAQxcCyAAQdgANgIcIAAgATYCFCAAIBA2AgxBACEQDL4BCyAAKAIEIQQgAEEANgIEAkAgACAEIAEQrYCAgAAiBA0AIAEhAQytAQsgAEHZADYCHCAAIAE2AhQgACAENgIMQQAhEAy9AQsgACgCBCEEIABBADYCBAJAIAAgBCABEK2AgIAAIgQNACABIQEMqwELIABB2gA2AhwgACABNgIUIAAgBDYCDEEAIRAMvAELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKkBCyAAQdwANgIcIAAgATYCFCAAIAQ2AgxBACEQDLsBCwJAIAEtAABBUGoiEEH/AXFBCk8NACAAIBA6ACogAUEBaiEBQc8AIRAMogELIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCtgICAACIEDQAgASEBDKcBCyAAQd4ANgIcIAAgATYCFCAAIAQ2AgxBACEQDLoBCyAAQQA2AgAgF0EBaiEBAkAgAC0AKUEjTw0AIAEhAQxZCyAAQQA2AhwgACABNgIUIABB04mAgAA2AhAgAEEINgIMQQAhEAy5AQsgAEEANgIAC0EAIRAgAEEANgIcIAAgATYCFCAAQZCzgIAANgIQIABBCDYCDAy3AQsgAEEANgIAIBdBAWohAQJAIAAtAClBIUcNACABIQEMVgsgAEEANgIcIAAgATYCFCAAQZuKgIAANgIQIABBCDYCDEEAIRAMtgELIABBADYCACAXQQFqIQECQCAALQApIhBBXWpBC08NACABIQEMVQsCQCAQQQZLDQBBASAQdEHKAHFFDQAgASEBDFULQQAhECAAQQA2AhwgACABNgIUIABB94mAgAA2AhAgAEEINgIMDLUBCyAQQRVGDXEgAEEANgIcIAAgATYCFCAAQbmNgIAANgIQIABBGjYCDEEAIRAMtAELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFQLIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMswELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0gA2AhwgACABNgIUIAAgEDYCDEEAIRAMsgELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDE0LIABB0wA2AhwgACABNgIUIAAgEDYCDEEAIRAMsQELIAAoAgQhECAAQQA2AgQCQCAAIBAgARCngICAACIQDQAgASEBDFELIABB5QA2AhwgACABNgIUIAAgEDYCDEEAIRAMsAELIABBADYCHCAAIAE2AhQgAEHGioCAADYCECAAQQc2AgxBACEQDK8BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdIANgIcIAAgATYCFCAAIBA2AgxBACEQDK4BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxJCyAAQdMANgIcIAAgATYCFCAAIBA2AgxBACEQDK0BCyAAKAIEIRAgAEEANgIEAkAgACAQIAEQp4CAgAAiEA0AIAEhAQxNCyAAQeUANgIcIAAgATYCFCAAIBA2AgxBACEQDKwBCyAAQQA2AhwgACABNgIUIABB3IiAgAA2AhAgAEEHNgIMQQAhEAyrAQsgEEE/Rw0BIAFBAWohAQtBBSEQDJABC0EAIRAgAEEANgIcIAAgATYCFCAAQf2SgIAANgIQIABBBzYCDAyoAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHSADYCHCAAIAE2AhQgACAQNgIMQQAhEAynAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMQgsgAEHTADYCHCAAIAE2AhQgACAQNgIMQQAhEAymAQsgACgCBCEQIABBADYCBAJAIAAgECABEKeAgIAAIhANACABIQEMRgsgAEHlADYCHCAAIAE2AhQgACAQNgIMQQAhEAylAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHSADYCHCAAIBQ2AhQgACABNgIMQQAhEAykAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMPwsgAEHTADYCHCAAIBQ2AhQgACABNgIMQQAhEAyjAQsgACgCBCEBIABBADYCBAJAIAAgASAUEKeAgIAAIgENACAUIQEMQwsgAEHlADYCHCAAIBQ2AhQgACABNgIMQQAhEAyiAQsgAEEANgIcIAAgFDYCFCAAQcOPgIAANgIQIABBBzYCDEEAIRAMoQELIABBADYCHCAAIAE2AhQgAEHDj4CAADYCECAAQQc2AgxBACEQDKABC0EAIRAgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDAyfAQsgAEEANgIcIAAgFDYCFCAAQYycgIAANgIQIABBBzYCDEEAIRAMngELIABBADYCHCAAIBQ2AhQgAEH+kYCAADYCECAAQQc2AgxBACEQDJ0BCyAAQQA2AhwgACABNgIUIABBjpuAgAA2AhAgAEEGNgIMQQAhEAycAQsgEEEVRg1XIABBADYCHCAAIAE2AhQgAEHMjoCAADYCECAAQSA2AgxBACEQDJsBCyAAQQA2AgAgEEEBaiEBQSQhEAsgACAQOgApIAAoAgQhECAAQQA2AgQgACAQIAEQq4CAgAAiEA1UIAEhAQw+CyAAQQA2AgALQQAhECAAQQA2AhwgACAENgIUIABB8ZuAgAA2AhAgAEEGNgIMDJcBCyABQRVGDVAgAEEANgIcIAAgBTYCFCAAQfCMgIAANgIQIABBGzYCDEEAIRAMlgELIAAoAgQhBSAAQQA2AgQgACAFIBAQqYCAgAAiBQ0BIBBBAWohBQtBrQEhEAx7CyAAQcEBNgIcIAAgBTYCDCAAIBBBAWo2AhRBACEQDJMBCyAAKAIEIQYgAEEANgIEIAAgBiAQEKmAgIAAIgYNASAQQQFqIQYLQa4BIRAMeAsgAEHCATYCHCAAIAY2AgwgACAQQQFqNgIUQQAhEAyQAQsgAEEANgIcIAAgBzYCFCAAQZeLgIAANgIQIABBDTYCDEEAIRAMjwELIABBADYCHCAAIAg2AhQgAEHjkICAADYCECAAQQk2AgxBACEQDI4BCyAAQQA2AhwgACAINgIUIABBlI2AgAA2AhAgAEEhNgIMQQAhEAyNAQtBASEWQQAhF0EAIRRBASEQCyAAIBA6ACsgCUEBaiEIAkACQCAALQAtQRBxDQACQAJAAkAgAC0AKg4DAQACBAsgFkUNAwwCCyAUDQEMAgsgF0UNAQsgACgCBCEQIABBADYCBCAAIBAgCBCtgICAACIQRQ09IABByQE2AhwgACAINgIUIAAgEDYCDEEAIRAMjAELIAAoAgQhBCAAQQA2AgQgACAEIAgQrYCAgAAiBEUNdiAAQcoBNgIcIAAgCDYCFCAAIAQ2AgxBACEQDIsBCyAAKAIEIQQgAEEANgIEIAAgBCAJEK2AgIAAIgRFDXQgAEHLATYCHCAAIAk2AhQgACAENgIMQQAhEAyKAQsgACgCBCEEIABBADYCBCAAIAQgChCtgICAACIERQ1yIABBzQE2AhwgACAKNgIUIAAgBDYCDEEAIRAMiQELAkAgCy0AAEFQaiIQQf8BcUEKTw0AIAAgEDoAKiALQQFqIQpBtgEhEAxwCyAAKAIEIQQgAEEANgIEIAAgBCALEK2AgIAAIgRFDXAgAEHPATYCHCAAIAs2AhQgACAENgIMQQAhEAyIAQsgAEEANgIcIAAgBDYCFCAAQZCzgIAANgIQIABBCDYCDCAAQQA2AgBBACEQDIcBCyABQRVGDT8gAEEANgIcIAAgDDYCFCAAQcyOgIAANgIQIABBIDYCDEEAIRAMhgELIABBgQQ7ASggACgCBCEQIABCADcDACAAIBAgDEEBaiIMEKuAgIAAIhBFDTggAEHTATYCHCAAIAw2AhQgACAQNgIMQQAhEAyFAQsgAEEANgIAC0EAIRAgAEEANgIcIAAgBDYCFCAAQdibgIAANgIQIABBCDYCDAyDAQsgACgCBCEQIABCADcDACAAIBAgC0EBaiILEKuAgIAAIhANAUHGASEQDGkLIABBAjoAKAxVCyAAQdUBNgIcIAAgCzYCFCAAIBA2AgxBACEQDIABCyAQQRVGDTcgAEEANgIcIAAgBDYCFCAAQaSMgIAANgIQIABBEDYCDEEAIRAMfwsgAC0ANEEBRw00IAAgBCACELyAgIAAIhBFDTQgEEEVRw01IABB3AE2AhwgACAENgIUIABB1ZaAgAA2AhAgAEEVNgIMQQAhEAx+C0EAIRAgAEEANgIcIABBr4uAgAA2AhAgAEECNgIMIAAgFEEBajYCFAx9C0EAIRAMYwtBAiEQDGILQQ0hEAxhC0EPIRAMYAtBJSEQDF8LQRMhEAxeC0EVIRAMXQtBFiEQDFwLQRchEAxbC0EYIRAMWgtBGSEQDFkLQRohEAxYC0EbIRAMVwtBHCEQDFYLQR0hEAxVC0EfIRAMVAtBISEQDFMLQSMhEAxSC0HGACEQDFELQS4hEAxQC0EvIRAMTwtBOyEQDE4LQT0hEAxNC0HIACEQDEwLQckAIRAMSwtBywAhEAxKC0HMACEQDEkLQc4AIRAMSAtB0QAhEAxHC0HVACEQDEYLQdgAIRAMRQtB2QAhEAxEC0HbACEQDEMLQeQAIRAMQgtB5QAhEAxBC0HxACEQDEALQfQAIRAMPwtBjQEhEAw+C0GXASEQDD0LQakBIRAMPAtBrAEhEAw7C0HAASEQDDoLQbkBIRAMOQtBrwEhEAw4C0GxASEQDDcLQbIBIRAMNgtBtAEhEAw1C0G1ASEQDDQLQboBIRAMMwtBvQEhEAwyC0G/ASEQDDELQcEBIRAMMAsgAEEANgIcIAAgBDYCFCAAQemLgIAANgIQIABBHzYCDEEAIRAMSAsgAEHbATYCHCAAIAQ2AhQgAEH6loCAADYCECAAQRU2AgxBACEQDEcLIABB+AA2AhwgACAMNgIUIABBypiAgAA2AhAgAEEVNgIMQQAhEAxGCyAAQdEANgIcIAAgBTYCFCAAQbCXgIAANgIQIABBFTYCDEEAIRAMRQsgAEH5ADYCHCAAIAE2AhQgACAQNgIMQQAhEAxECyAAQfgANgIcIAAgATYCFCAAQcqYgIAANgIQIABBFTYCDEEAIRAMQwsgAEHkADYCHCAAIAE2AhQgAEHjl4CAADYCECAAQRU2AgxBACEQDEILIABB1wA2AhwgACABNgIUIABByZeAgAA2AhAgAEEVNgIMQQAhEAxBCyAAQQA2AhwgACABNgIUIABBuY2AgAA2AhAgAEEaNgIMQQAhEAxACyAAQcIANgIcIAAgATYCFCAAQeOYgIAANgIQIABBFTYCDEEAIRAMPwsgAEEANgIEIAAgDyAPELGAgIAAIgRFDQEgAEE6NgIcIAAgBDYCDCAAIA9BAWo2AhRBACEQDD4LIAAoAgQhBCAAQQA2AgQCQCAAIAQgARCxgICAACIERQ0AIABBOzYCHCAAIAQ2AgwgACABQQFqNgIUQQAhEAw+CyABQQFqIQEMLQsgD0EBaiEBDC0LIABBADYCHCAAIA82AhQgAEHkkoCAADYCECAAQQQ2AgxBACEQDDsLIABBNjYCHCAAIAQ2AhQgACACNgIMQQAhEAw6CyAAQS42AhwgACAONgIUIAAgBDYCDEEAIRAMOQsgAEHQADYCHCAAIAE2AhQgAEGRmICAADYCECAAQRU2AgxBACEQDDgLIA1BAWohAQwsCyAAQRU2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAw2CyAAQRs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw1CyAAQQ82AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAw0CyAAQQs2AhwgACABNgIUIABBkZeAgAA2AhAgAEEVNgIMQQAhEAwzCyAAQRo2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwyCyAAQQs2AhwgACABNgIUIABBgpmAgAA2AhAgAEEVNgIMQQAhEAwxCyAAQQo2AhwgACABNgIUIABB5JaAgAA2AhAgAEEVNgIMQQAhEAwwCyAAQR42AhwgACABNgIUIABB+ZeAgAA2AhAgAEEVNgIMQQAhEAwvCyAAQQA2AhwgACAQNgIUIABB2o2AgAA2AhAgAEEUNgIMQQAhEAwuCyAAQQQ2AhwgACABNgIUIABBsJiAgAA2AhAgAEEVNgIMQQAhEAwtCyAAQQA2AgAgC0EBaiELC0G4ASEQDBILIABBADYCACAQQQFqIQFB9QAhEAwRCyABIQECQCAALQApQQVHDQBB4wAhEAwRC0HiACEQDBALQQAhECAAQQA2AhwgAEHkkYCAADYCECAAQQc2AgwgACAUQQFqNgIUDCgLIABBADYCACAXQQFqIQFBwAAhEAwOC0EBIQELIAAgAToALCAAQQA2AgAgF0EBaiEBC0EoIRAMCwsgASEBC0E4IRAMCQsCQCABIg8gAkYNAANAAkAgDy0AAEGAvoCAAGotAAAiAUEBRg0AIAFBAkcNAyAPQQFqIQEMBAsgD0EBaiIPIAJHDQALQT4hEAwiC0E+IRAMIQsgAEEAOgAsIA8hAQwBC0ELIRAMBgtBOiEQDAULIAFBAWohAUEtIRAMBAsgACABOgAsIABBADYCACAWQQFqIQFBDCEQDAMLIABBADYCACAXQQFqIQFBCiEQDAILIABBADYCAAsgAEEAOgAsIA0hAUEJIRAMAAsLQQAhECAAQQA2AhwgACALNgIUIABBzZCAgAA2AhAgAEEJNgIMDBcLQQAhECAAQQA2AhwgACAKNgIUIABB6YqAgAA2AhAgAEEJNgIMDBYLQQAhECAAQQA2AhwgACAJNgIUIABBt5CAgAA2AhAgAEEJNgIMDBULQQAhECAAQQA2AhwgACAINgIUIABBnJGAgAA2AhAgAEEJNgIMDBQLQQAhECAAQQA2AhwgACABNgIUIABBzZCAgAA2AhAgAEEJNgIMDBMLQQAhECAAQQA2AhwgACABNgIUIABB6YqAgAA2AhAgAEEJNgIMDBILQQAhECAAQQA2AhwgACABNgIUIABBt5CAgAA2AhAgAEEJNgIMDBELQQAhECAAQQA2AhwgACABNgIUIABBnJGAgAA2AhAgAEEJNgIMDBALQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA8LQQAhECAAQQA2AhwgACABNgIUIABBl5WAgAA2AhAgAEEPNgIMDA4LQQAhECAAQQA2AhwgACABNgIUIABBwJKAgAA2AhAgAEELNgIMDA0LQQAhECAAQQA2AhwgACABNgIUIABBlYmAgAA2AhAgAEELNgIMDAwLQQAhECAAQQA2AhwgACABNgIUIABB4Y+AgAA2AhAgAEEKNgIMDAsLQQAhECAAQQA2AhwgACABNgIUIABB+4+AgAA2AhAgAEEKNgIMDAoLQQAhECAAQQA2AhwgACABNgIUIABB8ZmAgAA2AhAgAEECNgIMDAkLQQAhECAAQQA2AhwgACABNgIUIABBxJSAgAA2AhAgAEECNgIMDAgLQQAhECAAQQA2AhwgACABNgIUIABB8pWAgAA2AhAgAEECNgIMDAcLIABBAjYCHCAAIAE2AhQgAEGcmoCAADYCECAAQRY2AgxBACEQDAYLQQEhEAwFC0HUACEQIAEiBCACRg0EIANBCGogACAEIAJB2MKAgABBChDFgICAACADKAIMIQQgAygCCA4DAQQCAAsQyoCAgAAACyAAQQA2AhwgAEG1moCAADYCECAAQRc2AgwgACAEQQFqNgIUQQAhEAwCCyAAQQA2AhwgACAENgIUIABBypqAgAA2AhAgAEEJNgIMQQAhEAwBCwJAIAEiBCACRw0AQSIhEAwBCyAAQYmAgIAANgIIIAAgBDYCBEEhIRALIANBEGokgICAgAAgEAuvAQECfyABKAIAIQYCQAJAIAIgA0YNACAEIAZqIQQgBiADaiACayEHIAIgBkF/cyAFaiIGaiEFA0ACQCACLQAAIAQtAABGDQBBAiEEDAMLAkAgBg0AQQAhBCAFIQIMAwsgBkF/aiEGIARBAWohBCACQQFqIgIgA0cNAAsgByEGIAMhAgsgAEEBNgIAIAEgBjYCACAAIAI2AgQPCyABQQA2AgAgACAENgIAIAAgAjYCBAsKACAAEMeAgIAAC/I2AQt/I4CAgIAAQRBrIgEkgICAgAACQEEAKAKg0ICAAA0AQQAQy4CAgABBgNSEgABrIgJB2QBJDQBBACEDAkBBACgC4NOAgAAiBA0AQQBCfzcC7NOAgABBAEKAgISAgIDAADcC5NOAgABBACABQQhqQXBxQdiq1aoFcyIENgLg04CAAEEAQQA2AvTTgIAAQQBBADYCxNOAgAALQQAgAjYCzNOAgABBAEGA1ISAADYCyNOAgABBAEGA1ISAADYCmNCAgABBACAENgKs0ICAAEEAQX82AqjQgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAtBgNSEgABBeEGA1ISAAGtBD3FBAEGA1ISAAEEIakEPcRsiA2oiBEEEaiACQUhqIgUgA2siA0EBcjYCAEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgABBgNSEgAAgBWpBODYCBAsCQAJAAkACQAJAAkACQAJAAkACQAJAAkAgAEHsAUsNAAJAQQAoAojQgIAAIgZBECAAQRNqQXBxIABBC0kbIgJBA3YiBHYiA0EDcUUNAAJAAkAgA0EBcSAEckEBcyIFQQN0IgRBsNCAgABqIgMgBEG40ICAAGooAgAiBCgCCCICRw0AQQAgBkF+IAV3cTYCiNCAgAAMAQsgAyACNgIIIAIgAzYCDAsgBEEIaiEDIAQgBUEDdCIFQQNyNgIEIAQgBWoiBCAEKAIEQQFyNgIEDAwLIAJBACgCkNCAgAAiB00NAQJAIANFDQACQAJAIAMgBHRBAiAEdCIDQQAgA2tycSIDQQAgA2txQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmoiBEEDdCIDQbDQgIAAaiIFIANBuNCAgABqKAIAIgMoAggiAEcNAEEAIAZBfiAEd3EiBjYCiNCAgAAMAQsgBSAANgIIIAAgBTYCDAsgAyACQQNyNgIEIAMgBEEDdCIEaiAEIAJrIgU2AgAgAyACaiIAIAVBAXI2AgQCQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhBAJAAkAgBkEBIAdBA3Z0IghxDQBBACAGIAhyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAQ2AgwgAiAENgIIIAQgAjYCDCAEIAg2AggLIANBCGohA0EAIAA2ApzQgIAAQQAgBTYCkNCAgAAMDAtBACgCjNCAgAAiCUUNASAJQQAgCWtxQX9qIgMgA0EMdkEQcSIDdiIEQQV2QQhxIgUgA3IgBCAFdiIDQQJ2QQRxIgRyIAMgBHYiA0EBdkECcSIEciADIAR2IgNBAXZBAXEiBHIgAyAEdmpBAnRBuNKAgABqKAIAIgAoAgRBeHEgAmshBCAAIQUCQANAAkAgBSgCECIDDQAgBUEUaigCACIDRQ0CCyADKAIEQXhxIAJrIgUgBCAFIARJIgUbIQQgAyAAIAUbIQAgAyEFDAALCyAAKAIYIQoCQCAAKAIMIgggAEYNACAAKAIIIgNBACgCmNCAgABJGiAIIAM2AgggAyAINgIMDAsLAkAgAEEUaiIFKAIAIgMNACAAKAIQIgNFDQMgAEEQaiEFCwNAIAUhCyADIghBFGoiBSgCACIDDQAgCEEQaiEFIAgoAhAiAw0ACyALQQA2AgAMCgtBfyECIABBv39LDQAgAEETaiIDQXBxIQJBACgCjNCAgAAiB0UNAEEAIQsCQCACQYACSQ0AQR8hCyACQf///wdLDQAgA0EIdiIDIANBgP4/akEQdkEIcSIDdCIEIARBgOAfakEQdkEEcSIEdCIFIAVBgIAPakEQdkECcSIFdEEPdiADIARyIAVyayIDQQF0IAIgA0EVanZBAXFyQRxqIQsLQQAgAmshBAJAAkACQAJAIAtBAnRBuNKAgABqKAIAIgUNAEEAIQNBACEIDAELQQAhAyACQQBBGSALQQF2ayALQR9GG3QhAEEAIQgDQAJAIAUoAgRBeHEgAmsiBiAETw0AIAYhBCAFIQggBg0AQQAhBCAFIQggBSEDDAMLIAMgBUEUaigCACIGIAYgBSAAQR12QQRxakEQaigCACIFRhsgAyAGGyEDIABBAXQhACAFDQALCwJAIAMgCHINAEEAIQhBAiALdCIDQQAgA2tyIAdxIgNFDQMgA0EAIANrcUF/aiIDIANBDHZBEHEiA3YiBUEFdkEIcSIAIANyIAUgAHYiA0ECdkEEcSIFciADIAV2IgNBAXZBAnEiBXIgAyAFdiIDQQF2QQFxIgVyIAMgBXZqQQJ0QbjSgIAAaigCACEDCyADRQ0BCwNAIAMoAgRBeHEgAmsiBiAESSEAAkAgAygCECIFDQAgA0EUaigCACEFCyAGIAQgABshBCADIAggABshCCAFIQMgBQ0ACwsgCEUNACAEQQAoApDQgIAAIAJrTw0AIAgoAhghCwJAIAgoAgwiACAIRg0AIAgoAggiA0EAKAKY0ICAAEkaIAAgAzYCCCADIAA2AgwMCQsCQCAIQRRqIgUoAgAiAw0AIAgoAhAiA0UNAyAIQRBqIQULA0AgBSEGIAMiAEEUaiIFKAIAIgMNACAAQRBqIQUgACgCECIDDQALIAZBADYCAAwICwJAQQAoApDQgIAAIgMgAkkNAEEAKAKc0ICAACEEAkACQCADIAJrIgVBEEkNACAEIAJqIgAgBUEBcjYCBEEAIAU2ApDQgIAAQQAgADYCnNCAgAAgBCADaiAFNgIAIAQgAkEDcjYCBAwBCyAEIANBA3I2AgQgBCADaiIDIAMoAgRBAXI2AgRBAEEANgKc0ICAAEEAQQA2ApDQgIAACyAEQQhqIQMMCgsCQEEAKAKU0ICAACIAIAJNDQBBACgCoNCAgAAiAyACaiIEIAAgAmsiBUEBcjYCBEEAIAU2ApTQgIAAQQAgBDYCoNCAgAAgAyACQQNyNgIEIANBCGohAwwKCwJAAkBBACgC4NOAgABFDQBBACgC6NOAgAAhBAwBC0EAQn83AuzTgIAAQQBCgICEgICAwAA3AuTTgIAAQQAgAUEMakFwcUHYqtWqBXM2AuDTgIAAQQBBADYC9NOAgABBAEEANgLE04CAAEGAgAQhBAtBACEDAkAgBCACQccAaiIHaiIGQQAgBGsiC3EiCCACSw0AQQBBMDYC+NOAgAAMCgsCQEEAKALA04CAACIDRQ0AAkBBACgCuNOAgAAiBCAIaiIFIARNDQAgBSADTQ0BC0EAIQNBAEEwNgL404CAAAwKC0EALQDE04CAAEEEcQ0EAkACQAJAQQAoAqDQgIAAIgRFDQBByNOAgAAhAwNAAkAgAygCACIFIARLDQAgBSADKAIEaiAESw0DCyADKAIIIgMNAAsLQQAQy4CAgAAiAEF/Rg0FIAghBgJAQQAoAuTTgIAAIgNBf2oiBCAAcUUNACAIIABrIAQgAGpBACADa3FqIQYLIAYgAk0NBSAGQf7///8HSw0FAkBBACgCwNOAgAAiA0UNAEEAKAK404CAACIEIAZqIgUgBE0NBiAFIANLDQYLIAYQy4CAgAAiAyAARw0BDAcLIAYgAGsgC3EiBkH+////B0sNBCAGEMuAgIAAIgAgAygCACADKAIEakYNAyAAIQMLAkAgA0F/Rg0AIAJByABqIAZNDQACQCAHIAZrQQAoAujTgIAAIgRqQQAgBGtxIgRB/v///wdNDQAgAyEADAcLAkAgBBDLgICAAEF/Rg0AIAQgBmohBiADIQAMBwtBACAGaxDLgICAABoMBAsgAyEAIANBf0cNBQwDC0EAIQgMBwtBACEADAULIABBf0cNAgtBAEEAKALE04CAAEEEcjYCxNOAgAALIAhB/v///wdLDQEgCBDLgICAACEAQQAQy4CAgAAhAyAAQX9GDQEgA0F/Rg0BIAAgA08NASADIABrIgYgAkE4ak0NAQtBAEEAKAK404CAACAGaiIDNgK404CAAAJAIANBACgCvNOAgABNDQBBACADNgK804CAAAsCQAJAAkACQEEAKAKg0ICAACIERQ0AQcjTgIAAIQMDQCAAIAMoAgAiBSADKAIEIghqRg0CIAMoAggiAw0ADAMLCwJAAkBBACgCmNCAgAAiA0UNACAAIANPDQELQQAgADYCmNCAgAALQQAhA0EAIAY2AszTgIAAQQAgADYCyNOAgABBAEF/NgKo0ICAAEEAQQAoAuDTgIAANgKs0ICAAEEAQQA2AtTTgIAAA0AgA0HE0ICAAGogA0G40ICAAGoiBDYCACAEIANBsNCAgABqIgU2AgAgA0G80ICAAGogBTYCACADQczQgIAAaiADQcDQgIAAaiIFNgIAIAUgBDYCACADQdTQgIAAaiADQcjQgIAAaiIENgIAIAQgBTYCACADQdDQgIAAaiAENgIAIANBIGoiA0GAAkcNAAsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiBCAGQUhqIgUgA2siA0EBcjYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAM2ApTQgIAAQQAgBDYCoNCAgAAgACAFakE4NgIEDAILIAMtAAxBCHENACAEIAVJDQAgBCAATw0AIARBeCAEa0EPcUEAIARBCGpBD3EbIgVqIgBBACgClNCAgAAgBmoiCyAFayIFQQFyNgIEIAMgCCAGajYCBEEAQQAoAvDTgIAANgKk0ICAAEEAIAU2ApTQgIAAQQAgADYCoNCAgAAgBCALakE4NgIEDAELAkAgAEEAKAKY0ICAACIITw0AQQAgADYCmNCAgAAgACEICyAAIAZqIQVByNOAgAAhAwJAAkACQAJAAkACQAJAA0AgAygCACAFRg0BIAMoAggiAw0ADAILCyADLQAMQQhxRQ0BC0HI04CAACEDA0ACQCADKAIAIgUgBEsNACAFIAMoAgRqIgUgBEsNAwsgAygCCCEDDAALCyADIAA2AgAgAyADKAIEIAZqNgIEIABBeCAAa0EPcUEAIABBCGpBD3EbaiILIAJBA3I2AgQgBUF4IAVrQQ9xQQAgBUEIakEPcRtqIgYgCyACaiICayEDAkAgBiAERw0AQQAgAjYCoNCAgABBAEEAKAKU0ICAACADaiIDNgKU0ICAACACIANBAXI2AgQMAwsCQCAGQQAoApzQgIAARw0AQQAgAjYCnNCAgABBAEEAKAKQ0ICAACADaiIDNgKQ0ICAACACIANBAXI2AgQgAiADaiADNgIADAMLAkAgBigCBCIEQQNxQQFHDQAgBEF4cSEHAkACQCAEQf8BSw0AIAYoAggiBSAEQQN2IghBA3RBsNCAgABqIgBGGgJAIAYoAgwiBCAFRw0AQQBBACgCiNCAgABBfiAId3E2AojQgIAADAILIAQgAEYaIAQgBTYCCCAFIAQ2AgwMAQsgBigCGCEJAkACQCAGKAIMIgAgBkYNACAGKAIIIgQgCEkaIAAgBDYCCCAEIAA2AgwMAQsCQCAGQRRqIgQoAgAiBQ0AIAZBEGoiBCgCACIFDQBBACEADAELA0AgBCEIIAUiAEEUaiIEKAIAIgUNACAAQRBqIQQgACgCECIFDQALIAhBADYCAAsgCUUNAAJAAkAgBiAGKAIcIgVBAnRBuNKAgABqIgQoAgBHDQAgBCAANgIAIAANAUEAQQAoAozQgIAAQX4gBXdxNgKM0ICAAAwCCyAJQRBBFCAJKAIQIAZGG2ogADYCACAARQ0BCyAAIAk2AhgCQCAGKAIQIgRFDQAgACAENgIQIAQgADYCGAsgBigCFCIERQ0AIABBFGogBDYCACAEIAA2AhgLIAcgA2ohAyAGIAdqIgYoAgQhBAsgBiAEQX5xNgIEIAIgA2ogAzYCACACIANBAXI2AgQCQCADQf8BSw0AIANBeHFBsNCAgABqIQQCQAJAQQAoAojQgIAAIgVBASADQQN2dCIDcQ0AQQAgBSADcjYCiNCAgAAgBCEDDAELIAQoAgghAwsgAyACNgIMIAQgAjYCCCACIAQ2AgwgAiADNgIIDAMLQR8hBAJAIANB////B0sNACADQQh2IgQgBEGA/j9qQRB2QQhxIgR0IgUgBUGA4B9qQRB2QQRxIgV0IgAgAEGAgA9qQRB2QQJxIgB0QQ92IAQgBXIgAHJrIgRBAXQgAyAEQRVqdkEBcXJBHGohBAsgAiAENgIcIAJCADcCECAEQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiAEEBIAR0IghxDQAgBSACNgIAQQAgACAIcjYCjNCAgAAgAiAFNgIYIAIgAjYCCCACIAI2AgwMAwsgA0EAQRkgBEEBdmsgBEEfRht0IQQgBSgCACEAA0AgACIFKAIEQXhxIANGDQIgBEEddiEAIARBAXQhBCAFIABBBHFqQRBqIggoAgAiAA0ACyAIIAI2AgAgAiAFNgIYIAIgAjYCDCACIAI2AggMAgsgAEF4IABrQQ9xQQAgAEEIakEPcRsiA2oiCyAGQUhqIgggA2siA0EBcjYCBCAAIAhqQTg2AgQgBCAFQTcgBWtBD3FBACAFQUlqQQ9xG2pBQWoiCCAIIARBEGpJGyIIQSM2AgRBAEEAKALw04CAADYCpNCAgABBACADNgKU0ICAAEEAIAs2AqDQgIAAIAhBEGpBACkC0NOAgAA3AgAgCEEAKQLI04CAADcCCEEAIAhBCGo2AtDTgIAAQQAgBjYCzNOAgABBACAANgLI04CAAEEAQQA2AtTTgIAAIAhBJGohAwNAIANBBzYCACADQQRqIgMgBUkNAAsgCCAERg0DIAggCCgCBEF+cTYCBCAIIAggBGsiADYCACAEIABBAXI2AgQCQCAAQf8BSw0AIABBeHFBsNCAgABqIQMCQAJAQQAoAojQgIAAIgVBASAAQQN2dCIAcQ0AQQAgBSAAcjYCiNCAgAAgAyEFDAELIAMoAgghBQsgBSAENgIMIAMgBDYCCCAEIAM2AgwgBCAFNgIIDAQLQR8hAwJAIABB////B0sNACAAQQh2IgMgA0GA/j9qQRB2QQhxIgN0IgUgBUGA4B9qQRB2QQRxIgV0IgggCEGAgA9qQRB2QQJxIgh0QQ92IAMgBXIgCHJrIgNBAXQgACADQRVqdkEBcXJBHGohAwsgBCADNgIcIARCADcCECADQQJ0QbjSgIAAaiEFAkBBACgCjNCAgAAiCEEBIAN0IgZxDQAgBSAENgIAQQAgCCAGcjYCjNCAgAAgBCAFNgIYIAQgBDYCCCAEIAQ2AgwMBAsgAEEAQRkgA0EBdmsgA0EfRht0IQMgBSgCACEIA0AgCCIFKAIEQXhxIABGDQMgA0EddiEIIANBAXQhAyAFIAhBBHFqQRBqIgYoAgAiCA0ACyAGIAQ2AgAgBCAFNgIYIAQgBDYCDCAEIAQ2AggMAwsgBSgCCCIDIAI2AgwgBSACNgIIIAJBADYCGCACIAU2AgwgAiADNgIICyALQQhqIQMMBQsgBSgCCCIDIAQ2AgwgBSAENgIIIARBADYCGCAEIAU2AgwgBCADNgIIC0EAKAKU0ICAACIDIAJNDQBBACgCoNCAgAAiBCACaiIFIAMgAmsiA0EBcjYCBEEAIAM2ApTQgIAAQQAgBTYCoNCAgAAgBCACQQNyNgIEIARBCGohAwwDC0EAIQNBAEEwNgL404CAAAwCCwJAIAtFDQACQAJAIAggCCgCHCIFQQJ0QbjSgIAAaiIDKAIARw0AIAMgADYCACAADQFBACAHQX4gBXdxIgc2AozQgIAADAILIAtBEEEUIAsoAhAgCEYbaiAANgIAIABFDQELIAAgCzYCGAJAIAgoAhAiA0UNACAAIAM2AhAgAyAANgIYCyAIQRRqKAIAIgNFDQAgAEEUaiADNgIAIAMgADYCGAsCQAJAIARBD0sNACAIIAQgAmoiA0EDcjYCBCAIIANqIgMgAygCBEEBcjYCBAwBCyAIIAJqIgAgBEEBcjYCBCAIIAJBA3I2AgQgACAEaiAENgIAAkAgBEH/AUsNACAEQXhxQbDQgIAAaiEDAkACQEEAKAKI0ICAACIFQQEgBEEDdnQiBHENAEEAIAUgBHI2AojQgIAAIAMhBAwBCyADKAIIIQQLIAQgADYCDCADIAA2AgggACADNgIMIAAgBDYCCAwBC0EfIQMCQCAEQf///wdLDQAgBEEIdiIDIANBgP4/akEQdkEIcSIDdCIFIAVBgOAfakEQdkEEcSIFdCICIAJBgIAPakEQdkECcSICdEEPdiADIAVyIAJyayIDQQF0IAQgA0EVanZBAXFyQRxqIQMLIAAgAzYCHCAAQgA3AhAgA0ECdEG40oCAAGohBQJAIAdBASADdCICcQ0AIAUgADYCAEEAIAcgAnI2AozQgIAAIAAgBTYCGCAAIAA2AgggACAANgIMDAELIARBAEEZIANBAXZrIANBH0YbdCEDIAUoAgAhAgJAA0AgAiIFKAIEQXhxIARGDQEgA0EddiECIANBAXQhAyAFIAJBBHFqQRBqIgYoAgAiAg0ACyAGIAA2AgAgACAFNgIYIAAgADYCDCAAIAA2AggMAQsgBSgCCCIDIAA2AgwgBSAANgIIIABBADYCGCAAIAU2AgwgACADNgIICyAIQQhqIQMMAQsCQCAKRQ0AAkACQCAAIAAoAhwiBUECdEG40oCAAGoiAygCAEcNACADIAg2AgAgCA0BQQAgCUF+IAV3cTYCjNCAgAAMAgsgCkEQQRQgCigCECAARhtqIAg2AgAgCEUNAQsgCCAKNgIYAkAgACgCECIDRQ0AIAggAzYCECADIAg2AhgLIABBFGooAgAiA0UNACAIQRRqIAM2AgAgAyAINgIYCwJAAkAgBEEPSw0AIAAgBCACaiIDQQNyNgIEIAAgA2oiAyADKAIEQQFyNgIEDAELIAAgAmoiBSAEQQFyNgIEIAAgAkEDcjYCBCAFIARqIAQ2AgACQCAHRQ0AIAdBeHFBsNCAgABqIQJBACgCnNCAgAAhAwJAAkBBASAHQQN2dCIIIAZxDQBBACAIIAZyNgKI0ICAACACIQgMAQsgAigCCCEICyAIIAM2AgwgAiADNgIIIAMgAjYCDCADIAg2AggLQQAgBTYCnNCAgABBACAENgKQ0ICAAAsgAEEIaiEDCyABQRBqJICAgIAAIAMLCgAgABDJgICAAAviDQEHfwJAIABFDQAgAEF4aiIBIABBfGooAgAiAkF4cSIAaiEDAkAgAkEBcQ0AIAJBA3FFDQEgASABKAIAIgJrIgFBACgCmNCAgAAiBEkNASACIABqIQACQCABQQAoApzQgIAARg0AAkAgAkH/AUsNACABKAIIIgQgAkEDdiIFQQN0QbDQgIAAaiIGRhoCQCABKAIMIgIgBEcNAEEAQQAoAojQgIAAQX4gBXdxNgKI0ICAAAwDCyACIAZGGiACIAQ2AgggBCACNgIMDAILIAEoAhghBwJAAkAgASgCDCIGIAFGDQAgASgCCCICIARJGiAGIAI2AgggAiAGNgIMDAELAkAgAUEUaiICKAIAIgQNACABQRBqIgIoAgAiBA0AQQAhBgwBCwNAIAIhBSAEIgZBFGoiAigCACIEDQAgBkEQaiECIAYoAhAiBA0ACyAFQQA2AgALIAdFDQECQAJAIAEgASgCHCIEQQJ0QbjSgIAAaiICKAIARw0AIAIgBjYCACAGDQFBAEEAKAKM0ICAAEF+IAR3cTYCjNCAgAAMAwsgB0EQQRQgBygCECABRhtqIAY2AgAgBkUNAgsgBiAHNgIYAkAgASgCECICRQ0AIAYgAjYCECACIAY2AhgLIAEoAhQiAkUNASAGQRRqIAI2AgAgAiAGNgIYDAELIAMoAgQiAkEDcUEDRw0AIAMgAkF+cTYCBEEAIAA2ApDQgIAAIAEgAGogADYCACABIABBAXI2AgQPCyABIANPDQAgAygCBCICQQFxRQ0AAkACQCACQQJxDQACQCADQQAoAqDQgIAARw0AQQAgATYCoNCAgABBAEEAKAKU0ICAACAAaiIANgKU0ICAACABIABBAXI2AgQgAUEAKAKc0ICAAEcNA0EAQQA2ApDQgIAAQQBBADYCnNCAgAAPCwJAIANBACgCnNCAgABHDQBBACABNgKc0ICAAEEAQQAoApDQgIAAIABqIgA2ApDQgIAAIAEgAEEBcjYCBCABIABqIAA2AgAPCyACQXhxIABqIQACQAJAIAJB/wFLDQAgAygCCCIEIAJBA3YiBUEDdEGw0ICAAGoiBkYaAkAgAygCDCICIARHDQBBAEEAKAKI0ICAAEF+IAV3cTYCiNCAgAAMAgsgAiAGRhogAiAENgIIIAQgAjYCDAwBCyADKAIYIQcCQAJAIAMoAgwiBiADRg0AIAMoAggiAkEAKAKY0ICAAEkaIAYgAjYCCCACIAY2AgwMAQsCQCADQRRqIgIoAgAiBA0AIANBEGoiAigCACIEDQBBACEGDAELA0AgAiEFIAQiBkEUaiICKAIAIgQNACAGQRBqIQIgBigCECIEDQALIAVBADYCAAsgB0UNAAJAAkAgAyADKAIcIgRBAnRBuNKAgABqIgIoAgBHDQAgAiAGNgIAIAYNAUEAQQAoAozQgIAAQX4gBHdxNgKM0ICAAAwCCyAHQRBBFCAHKAIQIANGG2ogBjYCACAGRQ0BCyAGIAc2AhgCQCADKAIQIgJFDQAgBiACNgIQIAIgBjYCGAsgAygCFCICRQ0AIAZBFGogAjYCACACIAY2AhgLIAEgAGogADYCACABIABBAXI2AgQgAUEAKAKc0ICAAEcNAUEAIAA2ApDQgIAADwsgAyACQX5xNgIEIAEgAGogADYCACABIABBAXI2AgQLAkAgAEH/AUsNACAAQXhxQbDQgIAAaiECAkACQEEAKAKI0ICAACIEQQEgAEEDdnQiAHENAEEAIAQgAHI2AojQgIAAIAIhAAwBCyACKAIIIQALIAAgATYCDCACIAE2AgggASACNgIMIAEgADYCCA8LQR8hAgJAIABB////B0sNACAAQQh2IgIgAkGA/j9qQRB2QQhxIgJ0IgQgBEGA4B9qQRB2QQRxIgR0IgYgBkGAgA9qQRB2QQJxIgZ0QQ92IAIgBHIgBnJrIgJBAXQgACACQRVqdkEBcXJBHGohAgsgASACNgIcIAFCADcCECACQQJ0QbjSgIAAaiEEAkACQEEAKAKM0ICAACIGQQEgAnQiA3ENACAEIAE2AgBBACAGIANyNgKM0ICAACABIAQ2AhggASABNgIIIAEgATYCDAwBCyAAQQBBGSACQQF2ayACQR9GG3QhAiAEKAIAIQYCQANAIAYiBCgCBEF4cSAARg0BIAJBHXYhBiACQQF0IQIgBCAGQQRxakEQaiIDKAIAIgYNAAsgAyABNgIAIAEgBDYCGCABIAE2AgwgASABNgIIDAELIAQoAggiACABNgIMIAQgATYCCCABQQA2AhggASAENgIMIAEgADYCCAtBAEEAKAKo0ICAAEF/aiIBQX8gARs2AqjQgIAACwsEAAAAC04AAkAgAA0APwBBEHQPCwJAIABB//8DcQ0AIABBf0wNAAJAIABBEHZAACIAQX9HDQBBAEEwNgL404CAAEF/DwsgAEEQdA8LEMqAgIAAAAvyAgIDfwF+AkAgAkUNACAAIAE6AAAgAiAAaiIDQX9qIAE6AAAgAkEDSQ0AIAAgAToAAiAAIAE6AAEgA0F9aiABOgAAIANBfmogAToAACACQQdJDQAgACABOgADIANBfGogAToAACACQQlJDQAgAEEAIABrQQNxIgRqIgMgAUH/AXFBgYKECGwiATYCACADIAIgBGtBfHEiBGoiAkF8aiABNgIAIARBCUkNACADIAE2AgggAyABNgIEIAJBeGogATYCACACQXRqIAE2AgAgBEEZSQ0AIAMgATYCGCADIAE2AhQgAyABNgIQIAMgATYCDCACQXBqIAE2AgAgAkFsaiABNgIAIAJBaGogATYCACACQWRqIAE2AgAgBCADQQRxQRhyIgVrIgJBIEkNACABrUKBgICAEH4hBiADIAVqIQEDQCABIAY3AxggASAGNwMQIAEgBjcDCCABIAY3AwAgAUEgaiEBIAJBYGoiAkEfSw0ACwsgAAsLjkgBAEGACAuGSAEAAAACAAAAAwAAAAAAAAAAAAAABAAAAAUAAAAAAAAAAAAAAAYAAAAHAAAACAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAASW52YWxpZCBjaGFyIGluIHVybCBxdWVyeQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2JvZHkAQ29udGVudC1MZW5ndGggb3ZlcmZsb3cAQ2h1bmsgc2l6ZSBvdmVyZmxvdwBSZXNwb25zZSBvdmVyZmxvdwBJbnZhbGlkIG1ldGhvZCBmb3IgSFRUUC94LnggcmVxdWVzdABJbnZhbGlkIG1ldGhvZCBmb3IgUlRTUC94LnggcmVxdWVzdABFeHBlY3RlZCBTT1VSQ0UgbWV0aG9kIGZvciBJQ0UveC54IHJlcXVlc3QASW52YWxpZCBjaGFyIGluIHVybCBmcmFnbWVudCBzdGFydABFeHBlY3RlZCBkb3QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9zdGF0dXMASW52YWxpZCByZXNwb25zZSBzdGF0dXMASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucwBVc2VyIGNhbGxiYWNrIGVycm9yAGBvbl9yZXNldGAgY2FsbGJhY2sgZXJyb3IAYG9uX2NodW5rX2hlYWRlcmAgY2FsbGJhY2sgZXJyb3IAYG9uX21lc3NhZ2VfYmVnaW5gIGNhbGxiYWNrIGVycm9yAGBvbl9jaHVua19leHRlbnNpb25fdmFsdWVgIGNhbGxiYWNrIGVycm9yAGBvbl9zdGF0dXNfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl92ZXJzaW9uX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fdXJsX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGVgIGNhbGxiYWNrIGVycm9yAGBvbl9tZXNzYWdlX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fbWV0aG9kX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlYCBjYWxsYmFjayBlcnJvcgBgb25fY2h1bmtfZXh0ZW5zaW9uX25hbWVgIGNhbGxiYWNrIGVycm9yAFVuZXhwZWN0ZWQgY2hhciBpbiB1cmwgc2VydmVyAEludmFsaWQgaGVhZGVyIHZhbHVlIGNoYXIASW52YWxpZCBoZWFkZXIgZmllbGQgY2hhcgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3ZlcnNpb24ASW52YWxpZCBtaW5vciB2ZXJzaW9uAEludmFsaWQgbWFqb3IgdmVyc2lvbgBFeHBlY3RlZCBzcGFjZSBhZnRlciB2ZXJzaW9uAEV4cGVjdGVkIENSTEYgYWZ0ZXIgdmVyc2lvbgBJbnZhbGlkIEhUVFAgdmVyc2lvbgBJbnZhbGlkIGhlYWRlciB0b2tlbgBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX3VybABJbnZhbGlkIGNoYXJhY3RlcnMgaW4gdXJsAFVuZXhwZWN0ZWQgc3RhcnQgY2hhciBpbiB1cmwARG91YmxlIEAgaW4gdXJsAEVtcHR5IENvbnRlbnQtTGVuZ3RoAEludmFsaWQgY2hhcmFjdGVyIGluIENvbnRlbnQtTGVuZ3RoAER1cGxpY2F0ZSBDb250ZW50LUxlbmd0aABJbnZhbGlkIGNoYXIgaW4gdXJsIHBhdGgAQ29udGVudC1MZW5ndGggY2FuJ3QgYmUgcHJlc2VudCB3aXRoIFRyYW5zZmVyLUVuY29kaW5nAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIHNpemUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfdmFsdWUAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9jaHVua19leHRlbnNpb25fdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyB2YWx1ZQBNaXNzaW5nIGV4cGVjdGVkIExGIGFmdGVyIGhlYWRlciB2YWx1ZQBJbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AgaGVhZGVyIHZhbHVlAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgcXVvdGUgdmFsdWUASW52YWxpZCBjaGFyYWN0ZXIgaW4gY2h1bmsgZXh0ZW5zaW9ucyBxdW90ZWQgdmFsdWUAUGF1c2VkIGJ5IG9uX2hlYWRlcnNfY29tcGxldGUASW52YWxpZCBFT0Ygc3RhdGUAb25fcmVzZXQgcGF1c2UAb25fY2h1bmtfaGVhZGVyIHBhdXNlAG9uX21lc3NhZ2VfYmVnaW4gcGF1c2UAb25fY2h1bmtfZXh0ZW5zaW9uX3ZhbHVlIHBhdXNlAG9uX3N0YXR1c19jb21wbGV0ZSBwYXVzZQBvbl92ZXJzaW9uX2NvbXBsZXRlIHBhdXNlAG9uX3VybF9jb21wbGV0ZSBwYXVzZQBvbl9jaHVua19jb21wbGV0ZSBwYXVzZQBvbl9oZWFkZXJfdmFsdWVfY29tcGxldGUgcGF1c2UAb25fbWVzc2FnZV9jb21wbGV0ZSBwYXVzZQBvbl9tZXRob2RfY29tcGxldGUgcGF1c2UAb25faGVhZGVyX2ZpZWxkX2NvbXBsZXRlIHBhdXNlAG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lIHBhdXNlAFVuZXhwZWN0ZWQgc3BhY2UgYWZ0ZXIgc3RhcnQgbGluZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX2NodW5rX2V4dGVuc2lvbl9uYW1lAEludmFsaWQgY2hhcmFjdGVyIGluIGNodW5rIGV4dGVuc2lvbnMgbmFtZQBQYXVzZSBvbiBDT05ORUNUL1VwZ3JhZGUAUGF1c2Ugb24gUFJJL1VwZ3JhZGUARXhwZWN0ZWQgSFRUUC8yIENvbm5lY3Rpb24gUHJlZmFjZQBTcGFuIGNhbGxiYWNrIGVycm9yIGluIG9uX21ldGhvZABFeHBlY3RlZCBzcGFjZSBhZnRlciBtZXRob2QAU3BhbiBjYWxsYmFjayBlcnJvciBpbiBvbl9oZWFkZXJfZmllbGQAUGF1c2VkAEludmFsaWQgd29yZCBlbmNvdW50ZXJlZABJbnZhbGlkIG1ldGhvZCBlbmNvdW50ZXJlZABVbmV4cGVjdGVkIGNoYXIgaW4gdXJsIHNjaGVtYQBSZXF1ZXN0IGhhcyBpbnZhbGlkIGBUcmFuc2Zlci1FbmNvZGluZ2AAU1dJVENIX1BST1hZAFVTRV9QUk9YWQBNS0FDVElWSVRZAFVOUFJPQ0VTU0FCTEVfRU5USVRZAENPUFkATU9WRURfUEVSTUFORU5UTFkAVE9PX0VBUkxZAE5PVElGWQBGQUlMRURfREVQRU5ERU5DWQBCQURfR0FURVdBWQBQTEFZAFBVVABDSEVDS09VVABHQVRFV0FZX1RJTUVPVVQAUkVRVUVTVF9USU1FT1VUAE5FVFdPUktfQ09OTkVDVF9USU1FT1VUAENPTk5FQ1RJT05fVElNRU9VVABMT0dJTl9USU1FT1VUAE5FVFdPUktfUkVBRF9USU1FT1VUAFBPU1QATUlTRElSRUNURURfUkVRVUVTVABDTElFTlRfQ0xPU0VEX1JFUVVFU1QAQ0xJRU5UX0NMT1NFRF9MT0FEX0JBTEFOQ0VEX1JFUVVFU1QAQkFEX1JFUVVFU1QASFRUUF9SRVFVRVNUX1NFTlRfVE9fSFRUUFNfUE9SVABSRVBPUlQASU1fQV9URUFQT1QAUkVTRVRfQ09OVEVOVABOT19DT05URU5UAFBBUlRJQUxfQ09OVEVOVABIUEVfSU5WQUxJRF9DT05TVEFOVABIUEVfQ0JfUkVTRVQAR0VUAEhQRV9TVFJJQ1QAQ09ORkxJQ1QAVEVNUE9SQVJZX1JFRElSRUNUAFBFUk1BTkVOVF9SRURJUkVDVABDT05ORUNUAE1VTFRJX1NUQVRVUwBIUEVfSU5WQUxJRF9TVEFUVVMAVE9PX01BTllfUkVRVUVTVFMARUFSTFlfSElOVFMAVU5BVkFJTEFCTEVfRk9SX0xFR0FMX1JFQVNPTlMAT1BUSU9OUwBTV0lUQ0hJTkdfUFJPVE9DT0xTAFZBUklBTlRfQUxTT19ORUdPVElBVEVTAE1VTFRJUExFX0NIT0lDRVMASU5URVJOQUxfU0VSVkVSX0VSUk9SAFdFQl9TRVJWRVJfVU5LTk9XTl9FUlJPUgBSQUlMR1VOX0VSUk9SAElERU5USVRZX1BST1ZJREVSX0FVVEhFTlRJQ0FUSU9OX0VSUk9SAFNTTF9DRVJUSUZJQ0FURV9FUlJPUgBJTlZBTElEX1hfRk9SV0FSREVEX0ZPUgBTRVRfUEFSQU1FVEVSAEdFVF9QQVJBTUVURVIASFBFX1VTRVIAU0VFX09USEVSAEhQRV9DQl9DSFVOS19IRUFERVIATUtDQUxFTkRBUgBTRVRVUABXRUJfU0VSVkVSX0lTX0RPV04AVEVBUkRPV04ASFBFX0NMT1NFRF9DT05ORUNUSU9OAEhFVVJJU1RJQ19FWFBJUkFUSU9OAERJU0NPTk5FQ1RFRF9PUEVSQVRJT04ATk9OX0FVVEhPUklUQVRJVkVfSU5GT1JNQVRJT04ASFBFX0lOVkFMSURfVkVSU0lPTgBIUEVfQ0JfTUVTU0FHRV9CRUdJTgBTSVRFX0lTX0ZST1pFTgBIUEVfSU5WQUxJRF9IRUFERVJfVE9LRU4ASU5WQUxJRF9UT0tFTgBGT1JCSURERU4ARU5IQU5DRV9ZT1VSX0NBTE0ASFBFX0lOVkFMSURfVVJMAEJMT0NLRURfQllfUEFSRU5UQUxfQ09OVFJPTABNS0NPTABBQ0wASFBFX0lOVEVSTkFMAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0VfVU5PRkZJQ0lBTABIUEVfT0sAVU5MSU5LAFVOTE9DSwBQUkkAUkVUUllfV0lUSABIUEVfSU5WQUxJRF9DT05URU5UX0xFTkdUSABIUEVfVU5FWFBFQ1RFRF9DT05URU5UX0xFTkdUSABGTFVTSABQUk9QUEFUQ0gATS1TRUFSQ0gAVVJJX1RPT19MT05HAFBST0NFU1NJTkcATUlTQ0VMTEFORU9VU19QRVJTSVNURU5UX1dBUk5JTkcATUlTQ0VMTEFORU9VU19XQVJOSU5HAEhQRV9JTlZBTElEX1RSQU5TRkVSX0VOQ09ESU5HAEV4cGVjdGVkIENSTEYASFBFX0lOVkFMSURfQ0hVTktfU0laRQBNT1ZFAENPTlRJTlVFAEhQRV9DQl9TVEFUVVNfQ09NUExFVEUASFBFX0NCX0hFQURFUlNfQ09NUExFVEUASFBFX0NCX1ZFUlNJT05fQ09NUExFVEUASFBFX0NCX1VSTF9DT01QTEVURQBIUEVfQ0JfQ0hVTktfQ09NUExFVEUASFBFX0NCX0hFQURFUl9WQUxVRV9DT01QTEVURQBIUEVfQ0JfQ0hVTktfRVhURU5TSU9OX1ZBTFVFX0NPTVBMRVRFAEhQRV9DQl9DSFVOS19FWFRFTlNJT05fTkFNRV9DT01QTEVURQBIUEVfQ0JfTUVTU0FHRV9DT01QTEVURQBIUEVfQ0JfTUVUSE9EX0NPTVBMRVRFAEhQRV9DQl9IRUFERVJfRklFTERfQ09NUExFVEUAREVMRVRFAEhQRV9JTlZBTElEX0VPRl9TVEFURQBJTlZBTElEX1NTTF9DRVJUSUZJQ0FURQBQQVVTRQBOT19SRVNQT05TRQBVTlNVUFBPUlRFRF9NRURJQV9UWVBFAEdPTkUATk9UX0FDQ0VQVEFCTEUAU0VSVklDRV9VTkFWQUlMQUJMRQBSQU5HRV9OT1RfU0FUSVNGSUFCTEUAT1JJR0lOX0lTX1VOUkVBQ0hBQkxFAFJFU1BPTlNFX0lTX1NUQUxFAFBVUkdFAE1FUkdFAFJFUVVFU1RfSEVBREVSX0ZJRUxEU19UT09fTEFSR0UAUkVRVUVTVF9IRUFERVJfVE9PX0xBUkdFAFBBWUxPQURfVE9PX0xBUkdFAElOU1VGRklDSUVOVF9TVE9SQUdFAEhQRV9QQVVTRURfVVBHUkFERQBIUEVfUEFVU0VEX0gyX1VQR1JBREUAU09VUkNFAEFOTk9VTkNFAFRSQUNFAEhQRV9VTkVYUEVDVEVEX1NQQUNFAERFU0NSSUJFAFVOU1VCU0NSSUJFAFJFQ09SRABIUEVfSU5WQUxJRF9NRVRIT0QATk9UX0ZPVU5EAFBST1BGSU5EAFVOQklORABSRUJJTkQAVU5BVVRIT1JJWkVEAE1FVEhPRF9OT1RfQUxMT1dFRABIVFRQX1ZFUlNJT05fTk9UX1NVUFBPUlRFRABBTFJFQURZX1JFUE9SVEVEAEFDQ0VQVEVEAE5PVF9JTVBMRU1FTlRFRABMT09QX0RFVEVDVEVEAEhQRV9DUl9FWFBFQ1RFRABIUEVfTEZfRVhQRUNURUQAQ1JFQVRFRABJTV9VU0VEAEhQRV9QQVVTRUQAVElNRU9VVF9PQ0NVUkVEAFBBWU1FTlRfUkVRVUlSRUQAUFJFQ09ORElUSU9OX1JFUVVJUkVEAFBST1hZX0FVVEhFTlRJQ0FUSU9OX1JFUVVJUkVEAE5FVFdPUktfQVVUSEVOVElDQVRJT05fUkVRVUlSRUQATEVOR1RIX1JFUVVJUkVEAFNTTF9DRVJUSUZJQ0FURV9SRVFVSVJFRABVUEdSQURFX1JFUVVJUkVEAFBBR0VfRVhQSVJFRABQUkVDT05ESVRJT05fRkFJTEVEAEVYUEVDVEFUSU9OX0ZBSUxFRABSRVZBTElEQVRJT05fRkFJTEVEAFNTTF9IQU5EU0hBS0VfRkFJTEVEAExPQ0tFRABUUkFOU0ZPUk1BVElPTl9BUFBMSUVEAE5PVF9NT0RJRklFRABOT1RfRVhURU5ERUQAQkFORFdJRFRIX0xJTUlUX0VYQ0VFREVEAFNJVEVfSVNfT1ZFUkxPQURFRABIRUFEAEV4cGVjdGVkIEhUVFAvAABeEwAAJhMAADAQAADwFwAAnRMAABUSAAA5FwAA8BIAAAoQAAB1EgAArRIAAIITAABPFAAAfxAAAKAVAAAjFAAAiRIAAIsUAABNFQAA1BEAAM8UAAAQGAAAyRYAANwWAADBEQAA4BcAALsUAAB0FAAAfBUAAOUUAAAIFwAAHxAAAGUVAACjFAAAKBUAAAIVAACZFQAALBAAAIsZAABPDwAA1A4AAGoQAADOEAAAAhcAAIkOAABuEwAAHBMAAGYUAABWFwAAwRMAAM0TAABsEwAAaBcAAGYXAABfFwAAIhMAAM4PAABpDgAA2A4AAGMWAADLEwAAqg4AACgXAAAmFwAAxRMAAF0WAADoEQAAZxMAAGUTAADyFgAAcxMAAB0XAAD5FgAA8xEAAM8OAADOFQAADBIAALMRAAClEQAAYRAAADIXAAC7EwAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAgMCAgICAgAAAgIAAgIAAgICAgICAgICAgAEAAAAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgAAAAICAgICAgICAgICAgICAgICAgICAgICAgICAgICAAIAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAAAAAAAAAAAAAAIAAgICAgIAAAICAAICAAICAgICAgICAgIAAwAEAAAAAgICAgICAgICAgICAgICAgICAgICAgICAgIAAAACAgICAgICAgICAgICAgICAgICAgICAgICAgICAgACAAIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABsb3NlZWVwLWFsaXZlAAAAAAAAAAAAAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQEBAQEBAQEBAQEBAgEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQFjaHVua2VkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABAQABAQEBAQAAAQEAAQEAAQEBAQEBAQEBAQAAAAAAAAABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAGVjdGlvbmVudC1sZW5ndGhvbnJveHktY29ubmVjdGlvbgAAAAAAAAAAAAAAAAAAAHJhbnNmZXItZW5jb2RpbmdwZ3JhZGUNCg0KDQpTTQ0KDQpUVFAvQ0UvVFNQLwAAAAAAAAAAAAAAAAECAAEDAAAAAAAAAAAAAAAAAAAAAAAABAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAAAAAAAAAAABAgABAwAAAAAAAAAAAAAAAAAAAAAAAAQBAQUBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAQEAAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQABAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQAAAAAAAAAAAAABAAACAAAAAAAAAAAAAAAAAAAAAAAAAwQAAAQEBAQEBAQEBAQEBQQEBAQEBAQEBAQEBAAEAAYHBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEAAQABAAEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAQEBAAAAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQAAAQAAAAAAAAAAAAAAAAAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAgAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEAAAEAAAAAAAAAAAAAAAAAAAAAAAABAAAAAAAAAAAAAgAAAAACAAAAAAAAAAAAAAAAAAAAAAADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwAAAAAAAAMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAE5PVU5DRUVDS09VVE5FQ1RFVEVDUklCRUxVU0hFVEVBRFNFQVJDSFJHRUNUSVZJVFlMRU5EQVJWRU9USUZZUFRJT05TQ0hTRUFZU1RBVENIR0VPUkRJUkVDVE9SVFJDSFBBUkFNRVRFUlVSQ0VCU0NSSUJFQVJET1dOQUNFSU5ETktDS1VCU0NSSUJFSFRUUC9BRFRQLw==' /***/ }), /***/ 50172: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.enumToMap = void 0; function enumToMap(obj) { const res = {}; Object.keys(obj).forEach((key) => { const value = obj[key]; if (typeof value === 'number') { res[key] = value; } }); return res; } exports.enumToMap = enumToMap; //# sourceMappingURL=utils.js.map /***/ }), /***/ 47501: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kClients } = __nccwpck_require__(36443) const Agent = __nccwpck_require__(59965) const { kAgent, kMockAgentSet, kMockAgentGet, kDispatches, kIsMockActive, kNetConnect, kGetNetConnect, kOptions, kFactory } = __nccwpck_require__(91117) const MockClient = __nccwpck_require__(47365) const MockPool = __nccwpck_require__(94004) const { matchValue, buildMockOptions } = __nccwpck_require__(53397) const { InvalidArgumentError, UndiciError } = __nccwpck_require__(68707) const Dispatcher = __nccwpck_require__(28611) const Pluralizer = __nccwpck_require__(91529) const PendingInterceptorsFormatter = __nccwpck_require__(56142) class FakeWeakRef { constructor (value) { this.value = value } deref () { return this.value } } class MockAgent extends Dispatcher { constructor (opts) { super(opts) this[kNetConnect] = true this[kIsMockActive] = true // Instantiate Agent and encapsulate if ((opts && opts.agent && typeof opts.agent.dispatch !== 'function')) { throw new InvalidArgumentError('Argument opts.agent must implement Agent') } const agent = opts && opts.agent ? opts.agent : new Agent(opts) this[kAgent] = agent this[kClients] = agent[kClients] this[kOptions] = buildMockOptions(opts) } get (origin) { let dispatcher = this[kMockAgentGet](origin) if (!dispatcher) { dispatcher = this[kFactory](origin) this[kMockAgentSet](origin, dispatcher) } return dispatcher } dispatch (opts, handler) { // Call MockAgent.get to perform additional setup before dispatching as normal this.get(opts.origin) return this[kAgent].dispatch(opts, handler) } async close () { await this[kAgent].close() this[kClients].clear() } deactivate () { this[kIsMockActive] = false } activate () { this[kIsMockActive] = true } enableNetConnect (matcher) { if (typeof matcher === 'string' || typeof matcher === 'function' || matcher instanceof RegExp) { if (Array.isArray(this[kNetConnect])) { this[kNetConnect].push(matcher) } else { this[kNetConnect] = [matcher] } } else if (typeof matcher === 'undefined') { this[kNetConnect] = true } else { throw new InvalidArgumentError('Unsupported matcher. Must be one of String|Function|RegExp.') } } disableNetConnect () { this[kNetConnect] = false } // This is required to bypass issues caused by using global symbols - see: // https://github.com/nodejs/undici/issues/1447 get isMockActive () { return this[kIsMockActive] } [kMockAgentSet] (origin, dispatcher) { this[kClients].set(origin, new FakeWeakRef(dispatcher)) } [kFactory] (origin) { const mockOptions = Object.assign({ agent: this }, this[kOptions]) return this[kOptions] && this[kOptions].connections === 1 ? new MockClient(origin, mockOptions) : new MockPool(origin, mockOptions) } [kMockAgentGet] (origin) { // First check if we can immediately find it const ref = this[kClients].get(origin) if (ref) { return ref.deref() } // If the origin is not a string create a dummy parent pool and return to user if (typeof origin !== 'string') { const dispatcher = this[kFactory]('http://localhost:9999') this[kMockAgentSet](origin, dispatcher) return dispatcher } // If we match, create a pool and assign the same dispatches for (const [keyMatcher, nonExplicitRef] of Array.from(this[kClients])) { const nonExplicitDispatcher = nonExplicitRef.deref() if (nonExplicitDispatcher && typeof keyMatcher !== 'string' && matchValue(keyMatcher, origin)) { const dispatcher = this[kFactory](origin) this[kMockAgentSet](origin, dispatcher) dispatcher[kDispatches] = nonExplicitDispatcher[kDispatches] return dispatcher } } } [kGetNetConnect] () { return this[kNetConnect] } pendingInterceptors () { const mockAgentClients = this[kClients] return Array.from(mockAgentClients.entries()) .flatMap(([origin, scope]) => scope.deref()[kDispatches].map(dispatch => ({ ...dispatch, origin }))) .filter(({ pending }) => pending) } assertNoPendingInterceptors ({ pendingInterceptorsFormatter = new PendingInterceptorsFormatter() } = {}) { const pending = this.pendingInterceptors() if (pending.length === 0) { return } const pluralizer = new Pluralizer('interceptor', 'interceptors').pluralize(pending.length) throw new UndiciError(` ${pluralizer.count} ${pluralizer.noun} ${pluralizer.is} pending: ${pendingInterceptorsFormatter.format(pending)} `.trim()) } } module.exports = MockAgent /***/ }), /***/ 47365: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { promisify } = __nccwpck_require__(39023) const Client = __nccwpck_require__(86197) const { buildMockDispatch } = __nccwpck_require__(53397) const { kDispatches, kMockAgent, kClose, kOriginalClose, kOrigin, kOriginalDispatch, kConnected } = __nccwpck_require__(91117) const { MockInterceptor } = __nccwpck_require__(31511) const Symbols = __nccwpck_require__(36443) const { InvalidArgumentError } = __nccwpck_require__(68707) /** * MockClient provides an API that extends the Client to influence the mockDispatches. */ class MockClient extends Client { constructor (origin, opts) { super(origin, opts) if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { throw new InvalidArgumentError('Argument opts.agent must implement Agent') } this[kMockAgent] = opts.agent this[kOrigin] = origin this[kDispatches] = [] this[kConnected] = 1 this[kOriginalDispatch] = this.dispatch this[kOriginalClose] = this.close.bind(this) this.dispatch = buildMockDispatch.call(this) this.close = this[kClose] } get [Symbols.kConnected] () { return this[kConnected] } /** * Sets up the base interceptor for mocking replies from undici. */ intercept (opts) { return new MockInterceptor(opts, this[kDispatches]) } async [kClose] () { await promisify(this[kOriginalClose])() this[kConnected] = 0 this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } } module.exports = MockClient /***/ }), /***/ 52429: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { UndiciError } = __nccwpck_require__(68707) class MockNotMatchedError extends UndiciError { constructor (message) { super(message) Error.captureStackTrace(this, MockNotMatchedError) this.name = 'MockNotMatchedError' this.message = message || 'The request does not match any registered mock dispatches' this.code = 'UND_MOCK_ERR_MOCK_NOT_MATCHED' } } module.exports = { MockNotMatchedError } /***/ }), /***/ 31511: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { getResponseData, buildKey, addMockDispatch } = __nccwpck_require__(53397) const { kDispatches, kDispatchKey, kDefaultHeaders, kDefaultTrailers, kContentLength, kMockDispatch } = __nccwpck_require__(91117) const { InvalidArgumentError } = __nccwpck_require__(68707) const { buildURL } = __nccwpck_require__(3440) /** * Defines the scope API for an interceptor reply */ class MockScope { constructor (mockDispatch) { this[kMockDispatch] = mockDispatch } /** * Delay a reply by a set amount in ms. */ delay (waitInMs) { if (typeof waitInMs !== 'number' || !Number.isInteger(waitInMs) || waitInMs <= 0) { throw new InvalidArgumentError('waitInMs must be a valid integer > 0') } this[kMockDispatch].delay = waitInMs return this } /** * For a defined reply, never mark as consumed. */ persist () { this[kMockDispatch].persist = true return this } /** * Allow one to define a reply for a set amount of matching requests. */ times (repeatTimes) { if (typeof repeatTimes !== 'number' || !Number.isInteger(repeatTimes) || repeatTimes <= 0) { throw new InvalidArgumentError('repeatTimes must be a valid integer > 0') } this[kMockDispatch].times = repeatTimes return this } } /** * Defines an interceptor for a Mock */ class MockInterceptor { constructor (opts, mockDispatches) { if (typeof opts !== 'object') { throw new InvalidArgumentError('opts must be an object') } if (typeof opts.path === 'undefined') { throw new InvalidArgumentError('opts.path must be defined') } if (typeof opts.method === 'undefined') { opts.method = 'GET' } // See https://github.com/nodejs/undici/issues/1245 // As per RFC 3986, clients are not supposed to send URI // fragments to servers when they retrieve a document, if (typeof opts.path === 'string') { if (opts.query) { opts.path = buildURL(opts.path, opts.query) } else { // Matches https://github.com/nodejs/undici/blob/main/lib/fetch/index.js#L1811 const parsedURL = new URL(opts.path, 'data://') opts.path = parsedURL.pathname + parsedURL.search } } if (typeof opts.method === 'string') { opts.method = opts.method.toUpperCase() } this[kDispatchKey] = buildKey(opts) this[kDispatches] = mockDispatches this[kDefaultHeaders] = {} this[kDefaultTrailers] = {} this[kContentLength] = false } createMockScopeDispatchData (statusCode, data, responseOptions = {}) { const responseData = getResponseData(data) const contentLength = this[kContentLength] ? { 'content-length': responseData.length } : {} const headers = { ...this[kDefaultHeaders], ...contentLength, ...responseOptions.headers } const trailers = { ...this[kDefaultTrailers], ...responseOptions.trailers } return { statusCode, data, headers, trailers } } validateReplyParameters (statusCode, data, responseOptions) { if (typeof statusCode === 'undefined') { throw new InvalidArgumentError('statusCode must be defined') } if (typeof data === 'undefined') { throw new InvalidArgumentError('data must be defined') } if (typeof responseOptions !== 'object') { throw new InvalidArgumentError('responseOptions must be an object') } } /** * Mock an undici request with a defined reply. */ reply (replyData) { // Values of reply aren't available right now as they // can only be available when the reply callback is invoked. if (typeof replyData === 'function') { // We'll first wrap the provided callback in another function, // this function will properly resolve the data from the callback // when invoked. const wrappedDefaultsCallback = (opts) => { // Our reply options callback contains the parameter for statusCode, data and options. const resolvedData = replyData(opts) // Check if it is in the right format if (typeof resolvedData !== 'object') { throw new InvalidArgumentError('reply options callback must return an object') } const { statusCode, data = '', responseOptions = {} } = resolvedData this.validateReplyParameters(statusCode, data, responseOptions) // Since the values can be obtained immediately we return them // from this higher order function that will be resolved later. return { ...this.createMockScopeDispatchData(statusCode, data, responseOptions) } } // Add usual dispatch data, but this time set the data parameter to function that will eventually provide data. const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], wrappedDefaultsCallback) return new MockScope(newMockDispatch) } // We can have either one or three parameters, if we get here, // we should have 1-3 parameters. So we spread the arguments of // this function to obtain the parameters, since replyData will always // just be the statusCode. const [statusCode, data = '', responseOptions = {}] = [...arguments] this.validateReplyParameters(statusCode, data, responseOptions) // Send in-already provided data like usual const dispatchData = this.createMockScopeDispatchData(statusCode, data, responseOptions) const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], dispatchData) return new MockScope(newMockDispatch) } /** * Mock an undici request with a defined error. */ replyWithError (error) { if (typeof error === 'undefined') { throw new InvalidArgumentError('error must be defined') } const newMockDispatch = addMockDispatch(this[kDispatches], this[kDispatchKey], { error }) return new MockScope(newMockDispatch) } /** * Set default reply headers on the interceptor for subsequent replies */ defaultReplyHeaders (headers) { if (typeof headers === 'undefined') { throw new InvalidArgumentError('headers must be defined') } this[kDefaultHeaders] = headers return this } /** * Set default reply trailers on the interceptor for subsequent replies */ defaultReplyTrailers (trailers) { if (typeof trailers === 'undefined') { throw new InvalidArgumentError('trailers must be defined') } this[kDefaultTrailers] = trailers return this } /** * Set reply content length header for replies on the interceptor */ replyContentLength () { this[kContentLength] = true return this } } module.exports.MockInterceptor = MockInterceptor module.exports.MockScope = MockScope /***/ }), /***/ 94004: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { promisify } = __nccwpck_require__(39023) const Pool = __nccwpck_require__(35076) const { buildMockDispatch } = __nccwpck_require__(53397) const { kDispatches, kMockAgent, kClose, kOriginalClose, kOrigin, kOriginalDispatch, kConnected } = __nccwpck_require__(91117) const { MockInterceptor } = __nccwpck_require__(31511) const Symbols = __nccwpck_require__(36443) const { InvalidArgumentError } = __nccwpck_require__(68707) /** * MockPool provides an API that extends the Pool to influence the mockDispatches. */ class MockPool extends Pool { constructor (origin, opts) { super(origin, opts) if (!opts || !opts.agent || typeof opts.agent.dispatch !== 'function') { throw new InvalidArgumentError('Argument opts.agent must implement Agent') } this[kMockAgent] = opts.agent this[kOrigin] = origin this[kDispatches] = [] this[kConnected] = 1 this[kOriginalDispatch] = this.dispatch this[kOriginalClose] = this.close.bind(this) this.dispatch = buildMockDispatch.call(this) this.close = this[kClose] } get [Symbols.kConnected] () { return this[kConnected] } /** * Sets up the base interceptor for mocking replies from undici. */ intercept (opts) { return new MockInterceptor(opts, this[kDispatches]) } async [kClose] () { await promisify(this[kOriginalClose])() this[kConnected] = 0 this[kMockAgent][Symbols.kClients].delete(this[kOrigin]) } } module.exports = MockPool /***/ }), /***/ 91117: /***/ ((module) => { "use strict"; module.exports = { kAgent: Symbol('agent'), kOptions: Symbol('options'), kFactory: Symbol('factory'), kDispatches: Symbol('dispatches'), kDispatchKey: Symbol('dispatch key'), kDefaultHeaders: Symbol('default headers'), kDefaultTrailers: Symbol('default trailers'), kContentLength: Symbol('content length'), kMockAgent: Symbol('mock agent'), kMockAgentSet: Symbol('mock agent set'), kMockAgentGet: Symbol('mock agent get'), kMockDispatch: Symbol('mock dispatch'), kClose: Symbol('close'), kOriginalClose: Symbol('original agent close'), kOrigin: Symbol('origin'), kIsMockActive: Symbol('is mock active'), kNetConnect: Symbol('net connect'), kGetNetConnect: Symbol('get net connect'), kConnected: Symbol('connected') } /***/ }), /***/ 53397: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { MockNotMatchedError } = __nccwpck_require__(52429) const { kDispatches, kMockAgent, kOriginalDispatch, kOrigin, kGetNetConnect } = __nccwpck_require__(91117) const { buildURL, nop } = __nccwpck_require__(3440) const { STATUS_CODES } = __nccwpck_require__(58611) const { types: { isPromise } } = __nccwpck_require__(39023) function matchValue (match, value) { if (typeof match === 'string') { return match === value } if (match instanceof RegExp) { return match.test(value) } if (typeof match === 'function') { return match(value) === true } return false } function lowerCaseEntries (headers) { return Object.fromEntries( Object.entries(headers).map(([headerName, headerValue]) => { return [headerName.toLocaleLowerCase(), headerValue] }) ) } /** * @param {import('../../index').Headers|string[]|Record} headers * @param {string} key */ function getHeaderByName (headers, key) { if (Array.isArray(headers)) { for (let i = 0; i < headers.length; i += 2) { if (headers[i].toLocaleLowerCase() === key.toLocaleLowerCase()) { return headers[i + 1] } } return undefined } else if (typeof headers.get === 'function') { return headers.get(key) } else { return lowerCaseEntries(headers)[key.toLocaleLowerCase()] } } /** @param {string[]} headers */ function buildHeadersFromArray (headers) { // fetch HeadersList const clone = headers.slice() const entries = [] for (let index = 0; index < clone.length; index += 2) { entries.push([clone[index], clone[index + 1]]) } return Object.fromEntries(entries) } function matchHeaders (mockDispatch, headers) { if (typeof mockDispatch.headers === 'function') { if (Array.isArray(headers)) { // fetch HeadersList headers = buildHeadersFromArray(headers) } return mockDispatch.headers(headers ? lowerCaseEntries(headers) : {}) } if (typeof mockDispatch.headers === 'undefined') { return true } if (typeof headers !== 'object' || typeof mockDispatch.headers !== 'object') { return false } for (const [matchHeaderName, matchHeaderValue] of Object.entries(mockDispatch.headers)) { const headerValue = getHeaderByName(headers, matchHeaderName) if (!matchValue(matchHeaderValue, headerValue)) { return false } } return true } function safeUrl (path) { if (typeof path !== 'string') { return path } const pathSegments = path.split('?') if (pathSegments.length !== 2) { return path } const qp = new URLSearchParams(pathSegments.pop()) qp.sort() return [...pathSegments, qp.toString()].join('?') } function matchKey (mockDispatch, { path, method, body, headers }) { const pathMatch = matchValue(mockDispatch.path, path) const methodMatch = matchValue(mockDispatch.method, method) const bodyMatch = typeof mockDispatch.body !== 'undefined' ? matchValue(mockDispatch.body, body) : true const headersMatch = matchHeaders(mockDispatch, headers) return pathMatch && methodMatch && bodyMatch && headersMatch } function getResponseData (data) { if (Buffer.isBuffer(data)) { return data } else if (typeof data === 'object') { return JSON.stringify(data) } else { return data.toString() } } function getMockDispatch (mockDispatches, key) { const basePath = key.query ? buildURL(key.path, key.query) : key.path const resolvedPath = typeof basePath === 'string' ? safeUrl(basePath) : basePath // Match path let matchedMockDispatches = mockDispatches.filter(({ consumed }) => !consumed).filter(({ path }) => matchValue(safeUrl(path), resolvedPath)) if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for path '${resolvedPath}'`) } // Match method matchedMockDispatches = matchedMockDispatches.filter(({ method }) => matchValue(method, key.method)) if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for method '${key.method}'`) } // Match body matchedMockDispatches = matchedMockDispatches.filter(({ body }) => typeof body !== 'undefined' ? matchValue(body, key.body) : true) if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for body '${key.body}'`) } // Match headers matchedMockDispatches = matchedMockDispatches.filter((mockDispatch) => matchHeaders(mockDispatch, key.headers)) if (matchedMockDispatches.length === 0) { throw new MockNotMatchedError(`Mock dispatch not matched for headers '${typeof key.headers === 'object' ? JSON.stringify(key.headers) : key.headers}'`) } return matchedMockDispatches[0] } function addMockDispatch (mockDispatches, key, data) { const baseData = { timesInvoked: 0, times: 1, persist: false, consumed: false } const replyData = typeof data === 'function' ? { callback: data } : { ...data } const newMockDispatch = { ...baseData, ...key, pending: true, data: { error: null, ...replyData } } mockDispatches.push(newMockDispatch) return newMockDispatch } function deleteMockDispatch (mockDispatches, key) { const index = mockDispatches.findIndex(dispatch => { if (!dispatch.consumed) { return false } return matchKey(dispatch, key) }) if (index !== -1) { mockDispatches.splice(index, 1) } } function buildKey (opts) { const { path, method, body, headers, query } = opts return { path, method, body, headers, query } } function generateKeyValues (data) { return Object.entries(data).reduce((keyValuePairs, [key, value]) => [ ...keyValuePairs, Buffer.from(`${key}`), Array.isArray(value) ? value.map(x => Buffer.from(`${x}`)) : Buffer.from(`${value}`) ], []) } /** * @see https://developer.mozilla.org/en-US/docs/Web/HTTP/Status * @param {number} statusCode */ function getStatusText (statusCode) { return STATUS_CODES[statusCode] || 'unknown' } async function getResponse (body) { const buffers = [] for await (const data of body) { buffers.push(data) } return Buffer.concat(buffers).toString('utf8') } /** * Mock dispatch function used to simulate undici dispatches */ function mockDispatch (opts, handler) { // Get mock dispatch from built key const key = buildKey(opts) const mockDispatch = getMockDispatch(this[kDispatches], key) mockDispatch.timesInvoked++ // Here's where we resolve a callback if a callback is present for the dispatch data. if (mockDispatch.data.callback) { mockDispatch.data = { ...mockDispatch.data, ...mockDispatch.data.callback(opts) } } // Parse mockDispatch data const { data: { statusCode, data, headers, trailers, error }, delay, persist } = mockDispatch const { timesInvoked, times } = mockDispatch // If it's used up and not persistent, mark as consumed mockDispatch.consumed = !persist && timesInvoked >= times mockDispatch.pending = timesInvoked < times // If specified, trigger dispatch error if (error !== null) { deleteMockDispatch(this[kDispatches], key) handler.onError(error) return true } // Handle the request with a delay if necessary if (typeof delay === 'number' && delay > 0) { setTimeout(() => { handleReply(this[kDispatches]) }, delay) } else { handleReply(this[kDispatches]) } function handleReply (mockDispatches, _data = data) { // fetch's HeadersList is a 1D string array const optsHeaders = Array.isArray(opts.headers) ? buildHeadersFromArray(opts.headers) : opts.headers const body = typeof _data === 'function' ? _data({ ...opts, headers: optsHeaders }) : _data // util.types.isPromise is likely needed for jest. if (isPromise(body)) { // If handleReply is asynchronous, throwing an error // in the callback will reject the promise, rather than // synchronously throw the error, which breaks some tests. // Rather, we wait for the callback to resolve if it is a // promise, and then re-run handleReply with the new body. body.then((newData) => handleReply(mockDispatches, newData)) return } const responseData = getResponseData(body) const responseHeaders = generateKeyValues(headers) const responseTrailers = generateKeyValues(trailers) handler.abort = nop handler.onHeaders(statusCode, responseHeaders, resume, getStatusText(statusCode)) handler.onData(Buffer.from(responseData)) handler.onComplete(responseTrailers) deleteMockDispatch(mockDispatches, key) } function resume () {} return true } function buildMockDispatch () { const agent = this[kMockAgent] const origin = this[kOrigin] const originalDispatch = this[kOriginalDispatch] return function dispatch (opts, handler) { if (agent.isMockActive) { try { mockDispatch.call(this, opts, handler) } catch (error) { if (error instanceof MockNotMatchedError) { const netConnect = agent[kGetNetConnect]() if (netConnect === false) { throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect disabled)`) } if (checkNetConnect(netConnect, origin)) { originalDispatch.call(this, opts, handler) } else { throw new MockNotMatchedError(`${error.message}: subsequent request to origin ${origin} was not allowed (net.connect is not enabled for this origin)`) } } else { throw error } } } else { originalDispatch.call(this, opts, handler) } } } function checkNetConnect (netConnect, origin) { const url = new URL(origin) if (netConnect === true) { return true } else if (Array.isArray(netConnect) && netConnect.some((matcher) => matchValue(matcher, url.host))) { return true } return false } function buildMockOptions (opts) { if (opts) { const { agent, ...mockOptions } = opts return mockOptions } } module.exports = { getResponseData, getMockDispatch, addMockDispatch, deleteMockDispatch, buildKey, generateKeyValues, matchValue, getResponse, getStatusText, mockDispatch, buildMockDispatch, checkNetConnect, buildMockOptions, getHeaderByName } /***/ }), /***/ 56142: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { Transform } = __nccwpck_require__(2203) const { Console } = __nccwpck_require__(64236) /** * Gets the output of `console.table(…)` as a string. */ module.exports = class PendingInterceptorsFormatter { constructor ({ disableColors } = {}) { this.transform = new Transform({ transform (chunk, _enc, cb) { cb(null, chunk) } }) this.logger = new Console({ stdout: this.transform, inspectOptions: { colors: !disableColors && !process.env.CI } }) } format (pendingInterceptors) { const withPrettyHeaders = pendingInterceptors.map( ({ method, path, data: { statusCode }, persist, times, timesInvoked, origin }) => ({ Method: method, Origin: origin, Path: path, 'Status code': statusCode, Persistent: persist ? '✅' : '❌', Invocations: timesInvoked, Remaining: persist ? Infinity : times - timesInvoked })) this.logger.table(withPrettyHeaders) return this.transform.read().toString() } } /***/ }), /***/ 91529: /***/ ((module) => { "use strict"; const singulars = { pronoun: 'it', is: 'is', was: 'was', this: 'this' } const plurals = { pronoun: 'they', is: 'are', was: 'were', this: 'these' } module.exports = class Pluralizer { constructor (singular, plural) { this.singular = singular this.plural = plural } pluralize (count) { const one = count === 1 const keys = one ? singulars : plurals const noun = one ? this.singular : this.plural return { ...keys, count, noun } } } /***/ }), /***/ 34869: /***/ ((module) => { "use strict"; /* eslint-disable */ // Extracted from node/lib/internal/fixed_queue.js // Currently optimal queue size, tested on V8 6.0 - 6.6. Must be power of two. const kSize = 2048; const kMask = kSize - 1; // The FixedQueue is implemented as a singly-linked list of fixed-size // circular buffers. It looks something like this: // // head tail // | | // v v // +-----------+ <-----\ +-----------+ <------\ +-----------+ // | [null] | \----- | next | \------- | next | // +-----------+ +-----------+ +-----------+ // | item | <-- bottom | item | <-- bottom | [empty] | // | item | | item | | [empty] | // | item | | item | | [empty] | // | item | | item | | [empty] | // | item | | item | bottom --> | item | // | item | | item | | item | // | ... | | ... | | ... | // | item | | item | | item | // | item | | item | | item | // | [empty] | <-- top | item | | item | // | [empty] | | item | | item | // | [empty] | | [empty] | <-- top top --> | [empty] | // +-----------+ +-----------+ +-----------+ // // Or, if there is only one circular buffer, it looks something // like either of these: // // head tail head tail // | | | | // v v v v // +-----------+ +-----------+ // | [null] | | [null] | // +-----------+ +-----------+ // | [empty] | | item | // | [empty] | | item | // | item | <-- bottom top --> | [empty] | // | item | | [empty] | // | [empty] | <-- top bottom --> | item | // | [empty] | | item | // +-----------+ +-----------+ // // Adding a value means moving `top` forward by one, removing means // moving `bottom` forward by one. After reaching the end, the queue // wraps around. // // When `top === bottom` the current queue is empty and when // `top + 1 === bottom` it's full. This wastes a single space of storage // but allows much quicker checks. class FixedCircularBuffer { constructor() { this.bottom = 0; this.top = 0; this.list = new Array(kSize); this.next = null; } isEmpty() { return this.top === this.bottom; } isFull() { return ((this.top + 1) & kMask) === this.bottom; } push(data) { this.list[this.top] = data; this.top = (this.top + 1) & kMask; } shift() { const nextItem = this.list[this.bottom]; if (nextItem === undefined) return null; this.list[this.bottom] = undefined; this.bottom = (this.bottom + 1) & kMask; return nextItem; } } module.exports = class FixedQueue { constructor() { this.head = this.tail = new FixedCircularBuffer(); } isEmpty() { return this.head.isEmpty(); } push(data) { if (this.head.isFull()) { // Head is full: Creates a new queue, sets the old queue's `.next` to it, // and sets it as the new main queue. this.head = this.head.next = new FixedCircularBuffer(); } this.head.push(data); } shift() { const tail = this.tail; const next = tail.shift(); if (tail.isEmpty() && tail.next !== null) { // If there is another queue, it forms the new tail. this.tail = tail.next; } return next; } }; /***/ }), /***/ 58640: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const DispatcherBase = __nccwpck_require__(50001) const FixedQueue = __nccwpck_require__(34869) const { kConnected, kSize, kRunning, kPending, kQueued, kBusy, kFree, kUrl, kClose, kDestroy, kDispatch } = __nccwpck_require__(36443) const PoolStats = __nccwpck_require__(24622) const kClients = Symbol('clients') const kNeedDrain = Symbol('needDrain') const kQueue = Symbol('queue') const kClosedResolve = Symbol('closed resolve') const kOnDrain = Symbol('onDrain') const kOnConnect = Symbol('onConnect') const kOnDisconnect = Symbol('onDisconnect') const kOnConnectionError = Symbol('onConnectionError') const kGetDispatcher = Symbol('get dispatcher') const kAddClient = Symbol('add client') const kRemoveClient = Symbol('remove client') const kStats = Symbol('stats') class PoolBase extends DispatcherBase { constructor () { super() this[kQueue] = new FixedQueue() this[kClients] = [] this[kQueued] = 0 const pool = this this[kOnDrain] = function onDrain (origin, targets) { const queue = pool[kQueue] let needDrain = false while (!needDrain) { const item = queue.shift() if (!item) { break } pool[kQueued]-- needDrain = !this.dispatch(item.opts, item.handler) } this[kNeedDrain] = needDrain if (!this[kNeedDrain] && pool[kNeedDrain]) { pool[kNeedDrain] = false pool.emit('drain', origin, [pool, ...targets]) } if (pool[kClosedResolve] && queue.isEmpty()) { Promise .all(pool[kClients].map(c => c.close())) .then(pool[kClosedResolve]) } } this[kOnConnect] = (origin, targets) => { pool.emit('connect', origin, [pool, ...targets]) } this[kOnDisconnect] = (origin, targets, err) => { pool.emit('disconnect', origin, [pool, ...targets], err) } this[kOnConnectionError] = (origin, targets, err) => { pool.emit('connectionError', origin, [pool, ...targets], err) } this[kStats] = new PoolStats(this) } get [kBusy] () { return this[kNeedDrain] } get [kConnected] () { return this[kClients].filter(client => client[kConnected]).length } get [kFree] () { return this[kClients].filter(client => client[kConnected] && !client[kNeedDrain]).length } get [kPending] () { let ret = this[kQueued] for (const { [kPending]: pending } of this[kClients]) { ret += pending } return ret } get [kRunning] () { let ret = 0 for (const { [kRunning]: running } of this[kClients]) { ret += running } return ret } get [kSize] () { let ret = this[kQueued] for (const { [kSize]: size } of this[kClients]) { ret += size } return ret } get stats () { return this[kStats] } async [kClose] () { if (this[kQueue].isEmpty()) { return Promise.all(this[kClients].map(c => c.close())) } else { return new Promise((resolve) => { this[kClosedResolve] = resolve }) } } async [kDestroy] (err) { while (true) { const item = this[kQueue].shift() if (!item) { break } item.handler.onError(err) } return Promise.all(this[kClients].map(c => c.destroy(err))) } [kDispatch] (opts, handler) { const dispatcher = this[kGetDispatcher]() if (!dispatcher) { this[kNeedDrain] = true this[kQueue].push({ opts, handler }) this[kQueued]++ } else if (!dispatcher.dispatch(opts, handler)) { dispatcher[kNeedDrain] = true this[kNeedDrain] = !this[kGetDispatcher]() } return !this[kNeedDrain] } [kAddClient] (client) { client .on('drain', this[kOnDrain]) .on('connect', this[kOnConnect]) .on('disconnect', this[kOnDisconnect]) .on('connectionError', this[kOnConnectionError]) this[kClients].push(client) if (this[kNeedDrain]) { process.nextTick(() => { if (this[kNeedDrain]) { this[kOnDrain](client[kUrl], [this, client]) } }) } return this } [kRemoveClient] (client) { client.close(() => { const idx = this[kClients].indexOf(client) if (idx !== -1) { this[kClients].splice(idx, 1) } }) this[kNeedDrain] = this[kClients].some(dispatcher => ( !dispatcher[kNeedDrain] && dispatcher.closed !== true && dispatcher.destroyed !== true )) } } module.exports = { PoolBase, kClients, kNeedDrain, kAddClient, kRemoveClient, kGetDispatcher } /***/ }), /***/ 24622: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const { kFree, kConnected, kPending, kQueued, kRunning, kSize } = __nccwpck_require__(36443) const kPool = Symbol('pool') class PoolStats { constructor (pool) { this[kPool] = pool } get connected () { return this[kPool][kConnected] } get free () { return this[kPool][kFree] } get pending () { return this[kPool][kPending] } get queued () { return this[kPool][kQueued] } get running () { return this[kPool][kRunning] } get size () { return this[kPool][kSize] } } module.exports = PoolStats /***/ }), /***/ 35076: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { PoolBase, kClients, kNeedDrain, kAddClient, kGetDispatcher } = __nccwpck_require__(58640) const Client = __nccwpck_require__(86197) const { InvalidArgumentError } = __nccwpck_require__(68707) const util = __nccwpck_require__(3440) const { kUrl, kInterceptors } = __nccwpck_require__(36443) const buildConnector = __nccwpck_require__(59136) const kOptions = Symbol('options') const kConnections = Symbol('connections') const kFactory = Symbol('factory') function defaultFactory (origin, opts) { return new Client(origin, opts) } class Pool extends PoolBase { constructor (origin, { connections, factory = defaultFactory, connect, connectTimeout, tls, maxCachedSessions, socketPath, autoSelectFamily, autoSelectFamilyAttemptTimeout, allowH2, ...options } = {}) { super() if (connections != null && (!Number.isFinite(connections) || connections < 0)) { throw new InvalidArgumentError('invalid connections') } if (typeof factory !== 'function') { throw new InvalidArgumentError('factory must be a function.') } if (connect != null && typeof connect !== 'function' && typeof connect !== 'object') { throw new InvalidArgumentError('connect must be a function or an object') } if (typeof connect !== 'function') { connect = buildConnector({ ...tls, maxCachedSessions, allowH2, socketPath, timeout: connectTimeout, ...(util.nodeHasAutoSelectFamily && autoSelectFamily ? { autoSelectFamily, autoSelectFamilyAttemptTimeout } : undefined), ...connect }) } this[kInterceptors] = options.interceptors && options.interceptors.Pool && Array.isArray(options.interceptors.Pool) ? options.interceptors.Pool : [] this[kConnections] = connections || null this[kUrl] = util.parseOrigin(origin) this[kOptions] = { ...util.deepClone(options), connect, allowH2 } this[kOptions].interceptors = options.interceptors ? { ...options.interceptors } : undefined this[kFactory] = factory this.on('connectionError', (origin, targets, error) => { // If a connection error occurs, we remove the client from the pool, // and emit a connectionError event. They will not be re-used. // Fixes https://github.com/nodejs/undici/issues/3895 for (const target of targets) { // Do not use kRemoveClient here, as it will close the client, // but the client cannot be closed in this state. const idx = this[kClients].indexOf(target) if (idx !== -1) { this[kClients].splice(idx, 1) } } }) } [kGetDispatcher] () { let dispatcher = this[kClients].find(dispatcher => !dispatcher[kNeedDrain]) if (dispatcher) { return dispatcher } if (!this[kConnections] || this[kClients].length < this[kConnections]) { dispatcher = this[kFactory](this[kUrl], this[kOptions]) this[kAddClient](dispatcher) } return dispatcher } } module.exports = Pool /***/ }), /***/ 22720: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kProxy, kClose, kDestroy, kInterceptors } = __nccwpck_require__(36443) const { URL } = __nccwpck_require__(87016) const Agent = __nccwpck_require__(59965) const Pool = __nccwpck_require__(35076) const DispatcherBase = __nccwpck_require__(50001) const { InvalidArgumentError, RequestAbortedError } = __nccwpck_require__(68707) const buildConnector = __nccwpck_require__(59136) const kAgent = Symbol('proxy agent') const kClient = Symbol('proxy client') const kProxyHeaders = Symbol('proxy headers') const kRequestTls = Symbol('request tls settings') const kProxyTls = Symbol('proxy tls settings') const kConnectEndpoint = Symbol('connect endpoint function') function defaultProtocolPort (protocol) { return protocol === 'https:' ? 443 : 80 } function buildProxyOptions (opts) { if (typeof opts === 'string') { opts = { uri: opts } } if (!opts || !opts.uri) { throw new InvalidArgumentError('Proxy opts.uri is mandatory') } return { uri: opts.uri, protocol: opts.protocol || 'https' } } function defaultFactory (origin, opts) { return new Pool(origin, opts) } class ProxyAgent extends DispatcherBase { constructor (opts) { super(opts) this[kProxy] = buildProxyOptions(opts) this[kAgent] = new Agent(opts) this[kInterceptors] = opts.interceptors && opts.interceptors.ProxyAgent && Array.isArray(opts.interceptors.ProxyAgent) ? opts.interceptors.ProxyAgent : [] if (typeof opts === 'string') { opts = { uri: opts } } if (!opts || !opts.uri) { throw new InvalidArgumentError('Proxy opts.uri is mandatory') } const { clientFactory = defaultFactory } = opts if (typeof clientFactory !== 'function') { throw new InvalidArgumentError('Proxy opts.clientFactory must be a function.') } this[kRequestTls] = opts.requestTls this[kProxyTls] = opts.proxyTls this[kProxyHeaders] = opts.headers || {} const resolvedUrl = new URL(opts.uri) const { origin, port, host, username, password } = resolvedUrl if (opts.auth && opts.token) { throw new InvalidArgumentError('opts.auth cannot be used in combination with opts.token') } else if (opts.auth) { /* @deprecated in favour of opts.token */ this[kProxyHeaders]['proxy-authorization'] = `Basic ${opts.auth}` } else if (opts.token) { this[kProxyHeaders]['proxy-authorization'] = opts.token } else if (username && password) { this[kProxyHeaders]['proxy-authorization'] = `Basic ${Buffer.from(`${decodeURIComponent(username)}:${decodeURIComponent(password)}`).toString('base64')}` } const connect = buildConnector({ ...opts.proxyTls }) this[kConnectEndpoint] = buildConnector({ ...opts.requestTls }) this[kClient] = clientFactory(resolvedUrl, { connect }) this[kAgent] = new Agent({ ...opts, connect: async (opts, callback) => { let requestedHost = opts.host if (!opts.port) { requestedHost += `:${defaultProtocolPort(opts.protocol)}` } try { const { socket, statusCode } = await this[kClient].connect({ origin, port, path: requestedHost, signal: opts.signal, headers: { ...this[kProxyHeaders], host } }) if (statusCode !== 200) { socket.on('error', () => {}).destroy() callback(new RequestAbortedError(`Proxy response (${statusCode}) !== 200 when HTTP Tunneling`)) } if (opts.protocol !== 'https:') { callback(null, socket) return } let servername if (this[kRequestTls]) { servername = this[kRequestTls].servername } else { servername = opts.servername } this[kConnectEndpoint]({ ...opts, servername, httpSocket: socket }, callback) } catch (err) { callback(err) } } }) } dispatch (opts, handler) { const { host } = new URL(opts.origin) const headers = buildHeaders(opts.headers) throwIfProxyAuthIsSent(headers) return this[kAgent].dispatch( { ...opts, headers: { ...headers, host } }, handler ) } async [kClose] () { await this[kAgent].close() await this[kClient].close() } async [kDestroy] () { await this[kAgent].destroy() await this[kClient].destroy() } } /** * @param {string[] | Record} headers * @returns {Record} */ function buildHeaders (headers) { // When using undici.fetch, the headers list is stored // as an array. if (Array.isArray(headers)) { /** @type {Record} */ const headersPair = {} for (let i = 0; i < headers.length; i += 2) { headersPair[headers[i]] = headers[i + 1] } return headersPair } return headers } /** * @param {Record} headers * * Previous versions of ProxyAgent suggests the Proxy-Authorization in request headers * Nevertheless, it was changed and to avoid a security vulnerability by end users * this check was created. * It should be removed in the next major version for performance reasons */ function throwIfProxyAuthIsSent (headers) { const existProxyAuth = headers && Object.keys(headers) .find((key) => key.toLowerCase() === 'proxy-authorization') if (existProxyAuth) { throw new InvalidArgumentError('Proxy-Authorization should be sent in ProxyAgent constructor') } } module.exports = ProxyAgent /***/ }), /***/ 28804: /***/ ((module) => { "use strict"; let fastNow = Date.now() let fastNowTimeout const fastTimers = [] function onTimeout () { fastNow = Date.now() let len = fastTimers.length let idx = 0 while (idx < len) { const timer = fastTimers[idx] if (timer.state === 0) { timer.state = fastNow + timer.delay } else if (timer.state > 0 && fastNow >= timer.state) { timer.state = -1 timer.callback(timer.opaque) } if (timer.state === -1) { timer.state = -2 if (idx !== len - 1) { fastTimers[idx] = fastTimers.pop() } else { fastTimers.pop() } len -= 1 } else { idx += 1 } } if (fastTimers.length > 0) { refreshTimeout() } } function refreshTimeout () { if (fastNowTimeout && fastNowTimeout.refresh) { fastNowTimeout.refresh() } else { clearTimeout(fastNowTimeout) fastNowTimeout = setTimeout(onTimeout, 1e3) if (fastNowTimeout.unref) { fastNowTimeout.unref() } } } class Timeout { constructor (callback, delay, opaque) { this.callback = callback this.delay = delay this.opaque = opaque // -2 not in timer list // -1 in timer list but inactive // 0 in timer list waiting for time // > 0 in timer list waiting for time to expire this.state = -2 this.refresh() } refresh () { if (this.state === -2) { fastTimers.push(this) if (!fastNowTimeout || fastTimers.length === 1) { refreshTimeout() } } this.state = 0 } clear () { this.state = -1 } } module.exports = { setTimeout (callback, delay, opaque) { return delay < 1e3 ? setTimeout(callback, delay, opaque) : new Timeout(callback, delay, opaque) }, clearTimeout (timeout) { if (timeout instanceof Timeout) { timeout.clear() } else { clearTimeout(timeout) } } } /***/ }), /***/ 68550: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const diagnosticsChannel = __nccwpck_require__(31637) const { uid, states } = __nccwpck_require__(45913) const { kReadyState, kSentClose, kByteParser, kReceivedClose } = __nccwpck_require__(62933) const { fireEvent, failWebsocketConnection } = __nccwpck_require__(3574) const { CloseEvent } = __nccwpck_require__(46255) const { makeRequest } = __nccwpck_require__(25194) const { fetching } = __nccwpck_require__(12315) const { Headers } = __nccwpck_require__(26349) const { getGlobalDispatcher } = __nccwpck_require__(32581) const { kHeadersList } = __nccwpck_require__(36443) const channels = {} channels.open = diagnosticsChannel.channel('undici:websocket:open') channels.close = diagnosticsChannel.channel('undici:websocket:close') channels.socketError = diagnosticsChannel.channel('undici:websocket:socket_error') /** @type {import('crypto')} */ let crypto try { crypto = __nccwpck_require__(76982) } catch { } /** * @see https://websockets.spec.whatwg.org/#concept-websocket-establish * @param {URL} url * @param {string|string[]} protocols * @param {import('./websocket').WebSocket} ws * @param {(response: any) => void} onEstablish * @param {Partial} options */ function establishWebSocketConnection (url, protocols, ws, onEstablish, options) { // 1. Let requestURL be a copy of url, with its scheme set to "http", if url’s // scheme is "ws", and to "https" otherwise. const requestURL = url requestURL.protocol = url.protocol === 'ws:' ? 'http:' : 'https:' // 2. Let request be a new request, whose URL is requestURL, client is client, // service-workers mode is "none", referrer is "no-referrer", mode is // "websocket", credentials mode is "include", cache mode is "no-store" , // and redirect mode is "error". const request = makeRequest({ urlList: [requestURL], serviceWorkers: 'none', referrer: 'no-referrer', mode: 'websocket', credentials: 'include', cache: 'no-store', redirect: 'error' }) // Note: undici extension, allow setting custom headers. if (options.headers) { const headersList = new Headers(options.headers)[kHeadersList] request.headersList = headersList } // 3. Append (`Upgrade`, `websocket`) to request’s header list. // 4. Append (`Connection`, `Upgrade`) to request’s header list. // Note: both of these are handled by undici currently. // https://github.com/nodejs/undici/blob/68c269c4144c446f3f1220951338daef4a6b5ec4/lib/client.js#L1397 // 5. Let keyValue be a nonce consisting of a randomly selected // 16-byte value that has been forgiving-base64-encoded and // isomorphic encoded. const keyValue = crypto.randomBytes(16).toString('base64') // 6. Append (`Sec-WebSocket-Key`, keyValue) to request’s // header list. request.headersList.append('sec-websocket-key', keyValue) // 7. Append (`Sec-WebSocket-Version`, `13`) to request’s // header list. request.headersList.append('sec-websocket-version', '13') // 8. For each protocol in protocols, combine // (`Sec-WebSocket-Protocol`, protocol) in request’s header // list. for (const protocol of protocols) { request.headersList.append('sec-websocket-protocol', protocol) } // 9. Let permessageDeflate be a user-agent defined // "permessage-deflate" extension header value. // https://github.com/mozilla/gecko-dev/blob/ce78234f5e653a5d3916813ff990f053510227bc/netwerk/protocol/websocket/WebSocketChannel.cpp#L2673 // TODO: enable once permessage-deflate is supported const permessageDeflate = '' // 'permessage-deflate; 15' // 10. Append (`Sec-WebSocket-Extensions`, permessageDeflate) to // request’s header list. // request.headersList.append('sec-websocket-extensions', permessageDeflate) // 11. Fetch request with useParallelQueue set to true, and // processResponse given response being these steps: const controller = fetching({ request, useParallelQueue: true, dispatcher: options.dispatcher ?? getGlobalDispatcher(), processResponse (response) { // 1. If response is a network error or its status is not 101, // fail the WebSocket connection. if (response.type === 'error' || response.status !== 101) { failWebsocketConnection(ws, 'Received network error or non-101 status code.') return } // 2. If protocols is not the empty list and extracting header // list values given `Sec-WebSocket-Protocol` and response’s // header list results in null, failure, or the empty byte // sequence, then fail the WebSocket connection. if (protocols.length !== 0 && !response.headersList.get('Sec-WebSocket-Protocol')) { failWebsocketConnection(ws, 'Server did not respond with sent protocols.') return } // 3. Follow the requirements stated step 2 to step 6, inclusive, // of the last set of steps in section 4.1 of The WebSocket // Protocol to validate response. This either results in fail // the WebSocket connection or the WebSocket connection is // established. // 2. If the response lacks an |Upgrade| header field or the |Upgrade| // header field contains a value that is not an ASCII case- // insensitive match for the value "websocket", the client MUST // _Fail the WebSocket Connection_. if (response.headersList.get('Upgrade')?.toLowerCase() !== 'websocket') { failWebsocketConnection(ws, 'Server did not set Upgrade header to "websocket".') return } // 3. If the response lacks a |Connection| header field or the // |Connection| header field doesn't contain a token that is an // ASCII case-insensitive match for the value "Upgrade", the client // MUST _Fail the WebSocket Connection_. if (response.headersList.get('Connection')?.toLowerCase() !== 'upgrade') { failWebsocketConnection(ws, 'Server did not set Connection header to "upgrade".') return } // 4. If the response lacks a |Sec-WebSocket-Accept| header field or // the |Sec-WebSocket-Accept| contains a value other than the // base64-encoded SHA-1 of the concatenation of the |Sec-WebSocket- // Key| (as a string, not base64-decoded) with the string "258EAFA5- // E914-47DA-95CA-C5AB0DC85B11" but ignoring any leading and // trailing whitespace, the client MUST _Fail the WebSocket // Connection_. const secWSAccept = response.headersList.get('Sec-WebSocket-Accept') const digest = crypto.createHash('sha1').update(keyValue + uid).digest('base64') if (secWSAccept !== digest) { failWebsocketConnection(ws, 'Incorrect hash received in Sec-WebSocket-Accept header.') return } // 5. If the response includes a |Sec-WebSocket-Extensions| header // field and this header field indicates the use of an extension // that was not present in the client's handshake (the server has // indicated an extension not requested by the client), the client // MUST _Fail the WebSocket Connection_. (The parsing of this // header field to determine which extensions are requested is // discussed in Section 9.1.) const secExtension = response.headersList.get('Sec-WebSocket-Extensions') if (secExtension !== null && secExtension !== permessageDeflate) { failWebsocketConnection(ws, 'Received different permessage-deflate than the one set.') return } // 6. If the response includes a |Sec-WebSocket-Protocol| header field // and this header field indicates the use of a subprotocol that was // not present in the client's handshake (the server has indicated a // subprotocol not requested by the client), the client MUST _Fail // the WebSocket Connection_. const secProtocol = response.headersList.get('Sec-WebSocket-Protocol') if (secProtocol !== null && secProtocol !== request.headersList.get('Sec-WebSocket-Protocol')) { failWebsocketConnection(ws, 'Protocol was not set in the opening handshake.') return } response.socket.on('data', onSocketData) response.socket.on('close', onSocketClose) response.socket.on('error', onSocketError) if (channels.open.hasSubscribers) { channels.open.publish({ address: response.socket.address(), protocol: secProtocol, extensions: secExtension }) } onEstablish(response) } }) return controller } /** * @param {Buffer} chunk */ function onSocketData (chunk) { if (!this.ws[kByteParser].write(chunk)) { this.pause() } } /** * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.4 */ function onSocketClose () { const { ws } = this // If the TCP connection was closed after the // WebSocket closing handshake was completed, the WebSocket connection // is said to have been closed _cleanly_. const wasClean = ws[kSentClose] && ws[kReceivedClose] let code = 1005 let reason = '' const result = ws[kByteParser].closingInfo if (result) { code = result.code ?? 1005 reason = result.reason } else if (!ws[kSentClose]) { // If _The WebSocket // Connection is Closed_ and no Close control frame was received by the // endpoint (such as could occur if the underlying transport connection // is lost), _The WebSocket Connection Close Code_ is considered to be // 1006. code = 1006 } // 1. Change the ready state to CLOSED (3). ws[kReadyState] = states.CLOSED // 2. If the user agent was required to fail the WebSocket // connection, or if the WebSocket connection was closed // after being flagged as full, fire an event named error // at the WebSocket object. // TODO // 3. Fire an event named close at the WebSocket object, // using CloseEvent, with the wasClean attribute // initialized to true if the connection closed cleanly // and false otherwise, the code attribute initialized to // the WebSocket connection close code, and the reason // attribute initialized to the result of applying UTF-8 // decode without BOM to the WebSocket connection close // reason. fireEvent('close', ws, CloseEvent, { wasClean, code, reason }) if (channels.close.hasSubscribers) { channels.close.publish({ websocket: ws, code, reason }) } } function onSocketError (error) { const { ws } = this ws[kReadyState] = states.CLOSING if (channels.socketError.hasSubscribers) { channels.socketError.publish(error) } this.destroy() } module.exports = { establishWebSocketConnection } /***/ }), /***/ 45913: /***/ ((module) => { "use strict"; // This is a Globally Unique Identifier unique used // to validate that the endpoint accepts websocket // connections. // See https://www.rfc-editor.org/rfc/rfc6455.html#section-1.3 const uid = '258EAFA5-E914-47DA-95CA-C5AB0DC85B11' /** @type {PropertyDescriptor} */ const staticPropertyDescriptors = { enumerable: true, writable: false, configurable: false } const states = { CONNECTING: 0, OPEN: 1, CLOSING: 2, CLOSED: 3 } const opcodes = { CONTINUATION: 0x0, TEXT: 0x1, BINARY: 0x2, CLOSE: 0x8, PING: 0x9, PONG: 0xA } const maxUnsigned16Bit = 2 ** 16 - 1 // 65535 const parserStates = { INFO: 0, PAYLOADLENGTH_16: 2, PAYLOADLENGTH_64: 3, READ_DATA: 4 } const emptyBuffer = Buffer.allocUnsafe(0) module.exports = { uid, staticPropertyDescriptors, states, opcodes, maxUnsigned16Bit, parserStates, emptyBuffer } /***/ }), /***/ 46255: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { webidl } = __nccwpck_require__(74222) const { kEnumerableProperty } = __nccwpck_require__(3440) const { MessagePort } = __nccwpck_require__(28167) /** * @see https://html.spec.whatwg.org/multipage/comms.html#messageevent */ class MessageEvent extends Event { #eventInit constructor (type, eventInitDict = {}) { webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent constructor' }) type = webidl.converters.DOMString(type) eventInitDict = webidl.converters.MessageEventInit(eventInitDict) super(type, eventInitDict) this.#eventInit = eventInitDict } get data () { webidl.brandCheck(this, MessageEvent) return this.#eventInit.data } get origin () { webidl.brandCheck(this, MessageEvent) return this.#eventInit.origin } get lastEventId () { webidl.brandCheck(this, MessageEvent) return this.#eventInit.lastEventId } get source () { webidl.brandCheck(this, MessageEvent) return this.#eventInit.source } get ports () { webidl.brandCheck(this, MessageEvent) if (!Object.isFrozen(this.#eventInit.ports)) { Object.freeze(this.#eventInit.ports) } return this.#eventInit.ports } initMessageEvent ( type, bubbles = false, cancelable = false, data = null, origin = '', lastEventId = '', source = null, ports = [] ) { webidl.brandCheck(this, MessageEvent) webidl.argumentLengthCheck(arguments, 1, { header: 'MessageEvent.initMessageEvent' }) return new MessageEvent(type, { bubbles, cancelable, data, origin, lastEventId, source, ports }) } } /** * @see https://websockets.spec.whatwg.org/#the-closeevent-interface */ class CloseEvent extends Event { #eventInit constructor (type, eventInitDict = {}) { webidl.argumentLengthCheck(arguments, 1, { header: 'CloseEvent constructor' }) type = webidl.converters.DOMString(type) eventInitDict = webidl.converters.CloseEventInit(eventInitDict) super(type, eventInitDict) this.#eventInit = eventInitDict } get wasClean () { webidl.brandCheck(this, CloseEvent) return this.#eventInit.wasClean } get code () { webidl.brandCheck(this, CloseEvent) return this.#eventInit.code } get reason () { webidl.brandCheck(this, CloseEvent) return this.#eventInit.reason } } // https://html.spec.whatwg.org/multipage/webappapis.html#the-errorevent-interface class ErrorEvent extends Event { #eventInit constructor (type, eventInitDict) { webidl.argumentLengthCheck(arguments, 1, { header: 'ErrorEvent constructor' }) super(type, eventInitDict) type = webidl.converters.DOMString(type) eventInitDict = webidl.converters.ErrorEventInit(eventInitDict ?? {}) this.#eventInit = eventInitDict } get message () { webidl.brandCheck(this, ErrorEvent) return this.#eventInit.message } get filename () { webidl.brandCheck(this, ErrorEvent) return this.#eventInit.filename } get lineno () { webidl.brandCheck(this, ErrorEvent) return this.#eventInit.lineno } get colno () { webidl.brandCheck(this, ErrorEvent) return this.#eventInit.colno } get error () { webidl.brandCheck(this, ErrorEvent) return this.#eventInit.error } } Object.defineProperties(MessageEvent.prototype, { [Symbol.toStringTag]: { value: 'MessageEvent', configurable: true }, data: kEnumerableProperty, origin: kEnumerableProperty, lastEventId: kEnumerableProperty, source: kEnumerableProperty, ports: kEnumerableProperty, initMessageEvent: kEnumerableProperty }) Object.defineProperties(CloseEvent.prototype, { [Symbol.toStringTag]: { value: 'CloseEvent', configurable: true }, reason: kEnumerableProperty, code: kEnumerableProperty, wasClean: kEnumerableProperty }) Object.defineProperties(ErrorEvent.prototype, { [Symbol.toStringTag]: { value: 'ErrorEvent', configurable: true }, message: kEnumerableProperty, filename: kEnumerableProperty, lineno: kEnumerableProperty, colno: kEnumerableProperty, error: kEnumerableProperty }) webidl.converters.MessagePort = webidl.interfaceConverter(MessagePort) webidl.converters['sequence'] = webidl.sequenceConverter( webidl.converters.MessagePort ) const eventInit = [ { key: 'bubbles', converter: webidl.converters.boolean, defaultValue: false }, { key: 'cancelable', converter: webidl.converters.boolean, defaultValue: false }, { key: 'composed', converter: webidl.converters.boolean, defaultValue: false } ] webidl.converters.MessageEventInit = webidl.dictionaryConverter([ ...eventInit, { key: 'data', converter: webidl.converters.any, defaultValue: null }, { key: 'origin', converter: webidl.converters.USVString, defaultValue: '' }, { key: 'lastEventId', converter: webidl.converters.DOMString, defaultValue: '' }, { key: 'source', // Node doesn't implement WindowProxy or ServiceWorker, so the only // valid value for source is a MessagePort. converter: webidl.nullableConverter(webidl.converters.MessagePort), defaultValue: null }, { key: 'ports', converter: webidl.converters['sequence'], get defaultValue () { return [] } } ]) webidl.converters.CloseEventInit = webidl.dictionaryConverter([ ...eventInit, { key: 'wasClean', converter: webidl.converters.boolean, defaultValue: false }, { key: 'code', converter: webidl.converters['unsigned short'], defaultValue: 0 }, { key: 'reason', converter: webidl.converters.USVString, defaultValue: '' } ]) webidl.converters.ErrorEventInit = webidl.dictionaryConverter([ ...eventInit, { key: 'message', converter: webidl.converters.DOMString, defaultValue: '' }, { key: 'filename', converter: webidl.converters.USVString, defaultValue: '' }, { key: 'lineno', converter: webidl.converters['unsigned long'], defaultValue: 0 }, { key: 'colno', converter: webidl.converters['unsigned long'], defaultValue: 0 }, { key: 'error', converter: webidl.converters.any } ]) module.exports = { MessageEvent, CloseEvent, ErrorEvent } /***/ }), /***/ 31237: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { maxUnsigned16Bit } = __nccwpck_require__(45913) /** @type {import('crypto')} */ let crypto try { crypto = __nccwpck_require__(76982) } catch { } class WebsocketFrameSend { /** * @param {Buffer|undefined} data */ constructor (data) { this.frameData = data this.maskKey = crypto.randomBytes(4) } createFrame (opcode) { const bodyLength = this.frameData?.byteLength ?? 0 /** @type {number} */ let payloadLength = bodyLength // 0-125 let offset = 6 if (bodyLength > maxUnsigned16Bit) { offset += 8 // payload length is next 8 bytes payloadLength = 127 } else if (bodyLength > 125) { offset += 2 // payload length is next 2 bytes payloadLength = 126 } const buffer = Buffer.allocUnsafe(bodyLength + offset) // Clear first 2 bytes, everything else is overwritten buffer[0] = buffer[1] = 0 buffer[0] |= 0x80 // FIN buffer[0] = (buffer[0] & 0xF0) + opcode // opcode /*! ws. MIT License. Einar Otto Stangvik */ buffer[offset - 4] = this.maskKey[0] buffer[offset - 3] = this.maskKey[1] buffer[offset - 2] = this.maskKey[2] buffer[offset - 1] = this.maskKey[3] buffer[1] = payloadLength if (payloadLength === 126) { buffer.writeUInt16BE(bodyLength, 2) } else if (payloadLength === 127) { // Clear extended payload length buffer[2] = buffer[3] = 0 buffer.writeUIntBE(bodyLength, 4, 6) } buffer[1] |= 0x80 // MASK // mask body for (let i = 0; i < bodyLength; i++) { buffer[offset + i] = this.frameData[i] ^ this.maskKey[i % 4] } return buffer } } module.exports = { WebsocketFrameSend } /***/ }), /***/ 43171: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { Writable } = __nccwpck_require__(2203) const diagnosticsChannel = __nccwpck_require__(31637) const { parserStates, opcodes, states, emptyBuffer } = __nccwpck_require__(45913) const { kReadyState, kSentClose, kResponse, kReceivedClose } = __nccwpck_require__(62933) const { isValidStatusCode, failWebsocketConnection, websocketMessageReceived } = __nccwpck_require__(3574) const { WebsocketFrameSend } = __nccwpck_require__(31237) // This code was influenced by ws released under the MIT license. // Copyright (c) 2011 Einar Otto Stangvik // Copyright (c) 2013 Arnout Kazemier and contributors // Copyright (c) 2016 Luigi Pinca and contributors const channels = {} channels.ping = diagnosticsChannel.channel('undici:websocket:ping') channels.pong = diagnosticsChannel.channel('undici:websocket:pong') class ByteParser extends Writable { #buffers = [] #byteOffset = 0 #state = parserStates.INFO #info = {} #fragments = [] constructor (ws) { super() this.ws = ws } /** * @param {Buffer} chunk * @param {() => void} callback */ _write (chunk, _, callback) { this.#buffers.push(chunk) this.#byteOffset += chunk.length this.run(callback) } /** * Runs whenever a new chunk is received. * Callback is called whenever there are no more chunks buffering, * or not enough bytes are buffered to parse. */ run (callback) { while (true) { if (this.#state === parserStates.INFO) { // If there aren't enough bytes to parse the payload length, etc. if (this.#byteOffset < 2) { return callback() } const buffer = this.consume(2) this.#info.fin = (buffer[0] & 0x80) !== 0 this.#info.opcode = buffer[0] & 0x0F // If we receive a fragmented message, we use the type of the first // frame to parse the full message as binary/text, when it's terminated this.#info.originalOpcode ??= this.#info.opcode this.#info.fragmented = !this.#info.fin && this.#info.opcode !== opcodes.CONTINUATION if (this.#info.fragmented && this.#info.opcode !== opcodes.BINARY && this.#info.opcode !== opcodes.TEXT) { // Only text and binary frames can be fragmented failWebsocketConnection(this.ws, 'Invalid frame type was fragmented.') return } const payloadLength = buffer[1] & 0x7F if (payloadLength <= 125) { this.#info.payloadLength = payloadLength this.#state = parserStates.READ_DATA } else if (payloadLength === 126) { this.#state = parserStates.PAYLOADLENGTH_16 } else if (payloadLength === 127) { this.#state = parserStates.PAYLOADLENGTH_64 } if (this.#info.fragmented && payloadLength > 125) { // A fragmented frame can't be fragmented itself failWebsocketConnection(this.ws, 'Fragmented frame exceeded 125 bytes.') return } else if ( (this.#info.opcode === opcodes.PING || this.#info.opcode === opcodes.PONG || this.#info.opcode === opcodes.CLOSE) && payloadLength > 125 ) { // Control frames can have a payload length of 125 bytes MAX failWebsocketConnection(this.ws, 'Payload length for control frame exceeded 125 bytes.') return } else if (this.#info.opcode === opcodes.CLOSE) { if (payloadLength === 1) { failWebsocketConnection(this.ws, 'Received close frame with a 1-byte body.') return } const body = this.consume(payloadLength) this.#info.closeInfo = this.parseCloseBody(false, body) if (!this.ws[kSentClose]) { // If an endpoint receives a Close frame and did not previously send a // Close frame, the endpoint MUST send a Close frame in response. (When // sending a Close frame in response, the endpoint typically echos the // status code it received.) const body = Buffer.allocUnsafe(2) body.writeUInt16BE(this.#info.closeInfo.code, 0) const closeFrame = new WebsocketFrameSend(body) this.ws[kResponse].socket.write( closeFrame.createFrame(opcodes.CLOSE), (err) => { if (!err) { this.ws[kSentClose] = true } } ) } // Upon either sending or receiving a Close control frame, it is said // that _The WebSocket Closing Handshake is Started_ and that the // WebSocket connection is in the CLOSING state. this.ws[kReadyState] = states.CLOSING this.ws[kReceivedClose] = true this.end() return } else if (this.#info.opcode === opcodes.PING) { // Upon receipt of a Ping frame, an endpoint MUST send a Pong frame in // response, unless it already received a Close frame. // A Pong frame sent in response to a Ping frame must have identical // "Application data" const body = this.consume(payloadLength) if (!this.ws[kReceivedClose]) { const frame = new WebsocketFrameSend(body) this.ws[kResponse].socket.write(frame.createFrame(opcodes.PONG)) if (channels.ping.hasSubscribers) { channels.ping.publish({ payload: body }) } } this.#state = parserStates.INFO if (this.#byteOffset > 0) { continue } else { callback() return } } else if (this.#info.opcode === opcodes.PONG) { // A Pong frame MAY be sent unsolicited. This serves as a // unidirectional heartbeat. A response to an unsolicited Pong frame is // not expected. const body = this.consume(payloadLength) if (channels.pong.hasSubscribers) { channels.pong.publish({ payload: body }) } if (this.#byteOffset > 0) { continue } else { callback() return } } } else if (this.#state === parserStates.PAYLOADLENGTH_16) { if (this.#byteOffset < 2) { return callback() } const buffer = this.consume(2) this.#info.payloadLength = buffer.readUInt16BE(0) this.#state = parserStates.READ_DATA } else if (this.#state === parserStates.PAYLOADLENGTH_64) { if (this.#byteOffset < 8) { return callback() } const buffer = this.consume(8) const upper = buffer.readUInt32BE(0) // 2^31 is the maxinimum bytes an arraybuffer can contain // on 32-bit systems. Although, on 64-bit systems, this is // 2^53-1 bytes. // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Errors/Invalid_array_length // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/common/globals.h;drc=1946212ac0100668f14eb9e2843bdd846e510a1e;bpv=1;bpt=1;l=1275 // https://source.chromium.org/chromium/chromium/src/+/main:v8/src/objects/js-array-buffer.h;l=34;drc=1946212ac0100668f14eb9e2843bdd846e510a1e if (upper > 2 ** 31 - 1) { failWebsocketConnection(this.ws, 'Received payload length > 2^31 bytes.') return } const lower = buffer.readUInt32BE(4) this.#info.payloadLength = (upper << 8) + lower this.#state = parserStates.READ_DATA } else if (this.#state === parserStates.READ_DATA) { if (this.#byteOffset < this.#info.payloadLength) { // If there is still more data in this chunk that needs to be read return callback() } else if (this.#byteOffset >= this.#info.payloadLength) { // If the server sent multiple frames in a single chunk const body = this.consume(this.#info.payloadLength) this.#fragments.push(body) // If the frame is unfragmented, or a fragmented frame was terminated, // a message was received if (!this.#info.fragmented || (this.#info.fin && this.#info.opcode === opcodes.CONTINUATION)) { const fullMessage = Buffer.concat(this.#fragments) websocketMessageReceived(this.ws, this.#info.originalOpcode, fullMessage) this.#info = {} this.#fragments.length = 0 } this.#state = parserStates.INFO } } if (this.#byteOffset > 0) { continue } else { callback() break } } } /** * Take n bytes from the buffered Buffers * @param {number} n * @returns {Buffer|null} */ consume (n) { if (n > this.#byteOffset) { return null } else if (n === 0) { return emptyBuffer } if (this.#buffers[0].length === n) { this.#byteOffset -= this.#buffers[0].length return this.#buffers.shift() } const buffer = Buffer.allocUnsafe(n) let offset = 0 while (offset !== n) { const next = this.#buffers[0] const { length } = next if (length + offset === n) { buffer.set(this.#buffers.shift(), offset) break } else if (length + offset > n) { buffer.set(next.subarray(0, n - offset), offset) this.#buffers[0] = next.subarray(n - offset) break } else { buffer.set(this.#buffers.shift(), offset) offset += next.length } } this.#byteOffset -= n return buffer } parseCloseBody (onlyCode, data) { // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.5 /** @type {number|undefined} */ let code if (data.length >= 2) { // _The WebSocket Connection Close Code_ is // defined as the status code (Section 7.4) contained in the first Close // control frame received by the application code = data.readUInt16BE(0) } if (onlyCode) { if (!isValidStatusCode(code)) { return null } return { code } } // https://datatracker.ietf.org/doc/html/rfc6455#section-7.1.6 /** @type {Buffer} */ let reason = data.subarray(2) // Remove BOM if (reason[0] === 0xEF && reason[1] === 0xBB && reason[2] === 0xBF) { reason = reason.subarray(3) } if (code !== undefined && !isValidStatusCode(code)) { return null } try { // TODO: optimize this reason = new TextDecoder('utf-8', { fatal: true }).decode(reason) } catch { return null } return { code, reason } } get closingInfo () { return this.#info.closeInfo } } module.exports = { ByteParser } /***/ }), /***/ 62933: /***/ ((module) => { "use strict"; module.exports = { kWebSocketURL: Symbol('url'), kReadyState: Symbol('ready state'), kController: Symbol('controller'), kResponse: Symbol('response'), kBinaryType: Symbol('binary type'), kSentClose: Symbol('sent close'), kReceivedClose: Symbol('received close'), kByteParser: Symbol('byte parser') } /***/ }), /***/ 3574: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { kReadyState, kController, kResponse, kBinaryType, kWebSocketURL } = __nccwpck_require__(62933) const { states, opcodes } = __nccwpck_require__(45913) const { MessageEvent, ErrorEvent } = __nccwpck_require__(46255) /* globals Blob */ /** * @param {import('./websocket').WebSocket} ws */ function isEstablished (ws) { // If the server's response is validated as provided for above, it is // said that _The WebSocket Connection is Established_ and that the // WebSocket Connection is in the OPEN state. return ws[kReadyState] === states.OPEN } /** * @param {import('./websocket').WebSocket} ws */ function isClosing (ws) { // Upon either sending or receiving a Close control frame, it is said // that _The WebSocket Closing Handshake is Started_ and that the // WebSocket connection is in the CLOSING state. return ws[kReadyState] === states.CLOSING } /** * @param {import('./websocket').WebSocket} ws */ function isClosed (ws) { return ws[kReadyState] === states.CLOSED } /** * @see https://dom.spec.whatwg.org/#concept-event-fire * @param {string} e * @param {EventTarget} target * @param {EventInit | undefined} eventInitDict */ function fireEvent (e, target, eventConstructor = Event, eventInitDict) { // 1. If eventConstructor is not given, then let eventConstructor be Event. // 2. Let event be the result of creating an event given eventConstructor, // in the relevant realm of target. // 3. Initialize event’s type attribute to e. const event = new eventConstructor(e, eventInitDict) // eslint-disable-line new-cap // 4. Initialize any other IDL attributes of event as described in the // invocation of this algorithm. // 5. Return the result of dispatching event at target, with legacy target // override flag set if set. target.dispatchEvent(event) } /** * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol * @param {import('./websocket').WebSocket} ws * @param {number} type Opcode * @param {Buffer} data application data */ function websocketMessageReceived (ws, type, data) { // 1. If ready state is not OPEN (1), then return. if (ws[kReadyState] !== states.OPEN) { return } // 2. Let dataForEvent be determined by switching on type and binary type: let dataForEvent if (type === opcodes.TEXT) { // -> type indicates that the data is Text // a new DOMString containing data try { dataForEvent = new TextDecoder('utf-8', { fatal: true }).decode(data) } catch { failWebsocketConnection(ws, 'Received invalid UTF-8 in text frame.') return } } else if (type === opcodes.BINARY) { if (ws[kBinaryType] === 'blob') { // -> type indicates that the data is Binary and binary type is "blob" // a new Blob object, created in the relevant Realm of the WebSocket // object, that represents data as its raw data dataForEvent = new Blob([data]) } else { // -> type indicates that the data is Binary and binary type is "arraybuffer" // a new ArrayBuffer object, created in the relevant Realm of the // WebSocket object, whose contents are data dataForEvent = new Uint8Array(data).buffer } } // 3. Fire an event named message at the WebSocket object, using MessageEvent, // with the origin attribute initialized to the serialization of the WebSocket // object’s url's origin, and the data attribute initialized to dataForEvent. fireEvent('message', ws, MessageEvent, { origin: ws[kWebSocketURL].origin, data: dataForEvent }) } /** * @see https://datatracker.ietf.org/doc/html/rfc6455 * @see https://datatracker.ietf.org/doc/html/rfc2616 * @see https://bugs.chromium.org/p/chromium/issues/detail?id=398407 * @param {string} protocol */ function isValidSubprotocol (protocol) { // If present, this value indicates one // or more comma-separated subprotocol the client wishes to speak, // ordered by preference. The elements that comprise this value // MUST be non-empty strings with characters in the range U+0021 to // U+007E not including separator characters as defined in // [RFC2616] and MUST all be unique strings. if (protocol.length === 0) { return false } for (const char of protocol) { const code = char.charCodeAt(0) if ( code < 0x21 || code > 0x7E || char === '(' || char === ')' || char === '<' || char === '>' || char === '@' || char === ',' || char === ';' || char === ':' || char === '\\' || char === '"' || char === '/' || char === '[' || char === ']' || char === '?' || char === '=' || char === '{' || char === '}' || code === 32 || // SP code === 9 // HT ) { return false } } return true } /** * @see https://datatracker.ietf.org/doc/html/rfc6455#section-7-4 * @param {number} code */ function isValidStatusCode (code) { if (code >= 1000 && code < 1015) { return ( code !== 1004 && // reserved code !== 1005 && // "MUST NOT be set as a status code" code !== 1006 // "MUST NOT be set as a status code" ) } return code >= 3000 && code <= 4999 } /** * @param {import('./websocket').WebSocket} ws * @param {string|undefined} reason */ function failWebsocketConnection (ws, reason) { const { [kController]: controller, [kResponse]: response } = ws controller.abort() if (response?.socket && !response.socket.destroyed) { response.socket.destroy() } if (reason) { fireEvent('error', ws, ErrorEvent, { error: new Error(reason) }) } } module.exports = { isEstablished, isClosing, isClosed, fireEvent, isValidSubprotocol, isValidStatusCode, failWebsocketConnection, websocketMessageReceived } /***/ }), /***/ 55171: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const { webidl } = __nccwpck_require__(74222) const { DOMException } = __nccwpck_require__(87326) const { URLSerializer } = __nccwpck_require__(94322) const { getGlobalOrigin } = __nccwpck_require__(75628) const { staticPropertyDescriptors, states, opcodes, emptyBuffer } = __nccwpck_require__(45913) const { kWebSocketURL, kReadyState, kController, kBinaryType, kResponse, kSentClose, kByteParser } = __nccwpck_require__(62933) const { isEstablished, isClosing, isValidSubprotocol, failWebsocketConnection, fireEvent } = __nccwpck_require__(3574) const { establishWebSocketConnection } = __nccwpck_require__(68550) const { WebsocketFrameSend } = __nccwpck_require__(31237) const { ByteParser } = __nccwpck_require__(43171) const { kEnumerableProperty, isBlobLike } = __nccwpck_require__(3440) const { getGlobalDispatcher } = __nccwpck_require__(32581) const { types } = __nccwpck_require__(39023) let experimentalWarned = false // https://websockets.spec.whatwg.org/#interface-definition class WebSocket extends EventTarget { #events = { open: null, error: null, close: null, message: null } #bufferedAmount = 0 #protocol = '' #extensions = '' /** * @param {string} url * @param {string|string[]} protocols */ constructor (url, protocols = []) { super() webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket constructor' }) if (!experimentalWarned) { experimentalWarned = true process.emitWarning('WebSockets are experimental, expect them to change at any time.', { code: 'UNDICI-WS' }) } const options = webidl.converters['DOMString or sequence or WebSocketInit'](protocols) url = webidl.converters.USVString(url) protocols = options.protocols // 1. Let baseURL be this's relevant settings object's API base URL. const baseURL = getGlobalOrigin() // 1. Let urlRecord be the result of applying the URL parser to url with baseURL. let urlRecord try { urlRecord = new URL(url, baseURL) } catch (e) { // 3. If urlRecord is failure, then throw a "SyntaxError" DOMException. throw new DOMException(e, 'SyntaxError') } // 4. If urlRecord’s scheme is "http", then set urlRecord’s scheme to "ws". if (urlRecord.protocol === 'http:') { urlRecord.protocol = 'ws:' } else if (urlRecord.protocol === 'https:') { // 5. Otherwise, if urlRecord’s scheme is "https", set urlRecord’s scheme to "wss". urlRecord.protocol = 'wss:' } // 6. If urlRecord’s scheme is not "ws" or "wss", then throw a "SyntaxError" DOMException. if (urlRecord.protocol !== 'ws:' && urlRecord.protocol !== 'wss:') { throw new DOMException( `Expected a ws: or wss: protocol, got ${urlRecord.protocol}`, 'SyntaxError' ) } // 7. If urlRecord’s fragment is non-null, then throw a "SyntaxError" // DOMException. if (urlRecord.hash || urlRecord.href.endsWith('#')) { throw new DOMException('Got fragment', 'SyntaxError') } // 8. If protocols is a string, set protocols to a sequence consisting // of just that string. if (typeof protocols === 'string') { protocols = [protocols] } // 9. If any of the values in protocols occur more than once or otherwise // fail to match the requirements for elements that comprise the value // of `Sec-WebSocket-Protocol` fields as defined by The WebSocket // protocol, then throw a "SyntaxError" DOMException. if (protocols.length !== new Set(protocols.map(p => p.toLowerCase())).size) { throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') } if (protocols.length > 0 && !protocols.every(p => isValidSubprotocol(p))) { throw new DOMException('Invalid Sec-WebSocket-Protocol value', 'SyntaxError') } // 10. Set this's url to urlRecord. this[kWebSocketURL] = new URL(urlRecord.href) // 11. Let client be this's relevant settings object. // 12. Run this step in parallel: // 1. Establish a WebSocket connection given urlRecord, protocols, // and client. this[kController] = establishWebSocketConnection( urlRecord, protocols, this, (response) => this.#onConnectionEstablished(response), options ) // Each WebSocket object has an associated ready state, which is a // number representing the state of the connection. Initially it must // be CONNECTING (0). this[kReadyState] = WebSocket.CONNECTING // The extensions attribute must initially return the empty string. // The protocol attribute must initially return the empty string. // Each WebSocket object has an associated binary type, which is a // BinaryType. Initially it must be "blob". this[kBinaryType] = 'blob' } /** * @see https://websockets.spec.whatwg.org/#dom-websocket-close * @param {number|undefined} code * @param {string|undefined} reason */ close (code = undefined, reason = undefined) { webidl.brandCheck(this, WebSocket) if (code !== undefined) { code = webidl.converters['unsigned short'](code, { clamp: true }) } if (reason !== undefined) { reason = webidl.converters.USVString(reason) } // 1. If code is present, but is neither an integer equal to 1000 nor an // integer in the range 3000 to 4999, inclusive, throw an // "InvalidAccessError" DOMException. if (code !== undefined) { if (code !== 1000 && (code < 3000 || code > 4999)) { throw new DOMException('invalid code', 'InvalidAccessError') } } let reasonByteLength = 0 // 2. If reason is present, then run these substeps: if (reason !== undefined) { // 1. Let reasonBytes be the result of encoding reason. // 2. If reasonBytes is longer than 123 bytes, then throw a // "SyntaxError" DOMException. reasonByteLength = Buffer.byteLength(reason) if (reasonByteLength > 123) { throw new DOMException( `Reason must be less than 123 bytes; received ${reasonByteLength}`, 'SyntaxError' ) } } // 3. Run the first matching steps from the following list: if (this[kReadyState] === WebSocket.CLOSING || this[kReadyState] === WebSocket.CLOSED) { // If this's ready state is CLOSING (2) or CLOSED (3) // Do nothing. } else if (!isEstablished(this)) { // If the WebSocket connection is not yet established // Fail the WebSocket connection and set this's ready state // to CLOSING (2). failWebsocketConnection(this, 'Connection was closed before it was established.') this[kReadyState] = WebSocket.CLOSING } else if (!isClosing(this)) { // If the WebSocket closing handshake has not yet been started // Start the WebSocket closing handshake and set this's ready // state to CLOSING (2). // - If neither code nor reason is present, the WebSocket Close // message must not have a body. // - If code is present, then the status code to use in the // WebSocket Close message must be the integer given by code. // - If reason is also present, then reasonBytes must be // provided in the Close message after the status code. const frame = new WebsocketFrameSend() // If neither code nor reason is present, the WebSocket Close // message must not have a body. // If code is present, then the status code to use in the // WebSocket Close message must be the integer given by code. if (code !== undefined && reason === undefined) { frame.frameData = Buffer.allocUnsafe(2) frame.frameData.writeUInt16BE(code, 0) } else if (code !== undefined && reason !== undefined) { // If reason is also present, then reasonBytes must be // provided in the Close message after the status code. frame.frameData = Buffer.allocUnsafe(2 + reasonByteLength) frame.frameData.writeUInt16BE(code, 0) // the body MAY contain UTF-8-encoded data with value /reason/ frame.frameData.write(reason, 2, 'utf-8') } else { frame.frameData = emptyBuffer } /** @type {import('stream').Duplex} */ const socket = this[kResponse].socket socket.write(frame.createFrame(opcodes.CLOSE), (err) => { if (!err) { this[kSentClose] = true } }) // Upon either sending or receiving a Close control frame, it is said // that _The WebSocket Closing Handshake is Started_ and that the // WebSocket connection is in the CLOSING state. this[kReadyState] = states.CLOSING } else { // Otherwise // Set this's ready state to CLOSING (2). this[kReadyState] = WebSocket.CLOSING } } /** * @see https://websockets.spec.whatwg.org/#dom-websocket-send * @param {NodeJS.TypedArray|ArrayBuffer|Blob|string} data */ send (data) { webidl.brandCheck(this, WebSocket) webidl.argumentLengthCheck(arguments, 1, { header: 'WebSocket.send' }) data = webidl.converters.WebSocketSendData(data) // 1. If this's ready state is CONNECTING, then throw an // "InvalidStateError" DOMException. if (this[kReadyState] === WebSocket.CONNECTING) { throw new DOMException('Sent before connected.', 'InvalidStateError') } // 2. Run the appropriate set of steps from the following list: // https://datatracker.ietf.org/doc/html/rfc6455#section-6.1 // https://datatracker.ietf.org/doc/html/rfc6455#section-5.2 if (!isEstablished(this) || isClosing(this)) { return } /** @type {import('stream').Duplex} */ const socket = this[kResponse].socket // If data is a string if (typeof data === 'string') { // If the WebSocket connection is established and the WebSocket // closing handshake has not yet started, then the user agent // must send a WebSocket Message comprised of the data argument // using a text frame opcode; if the data cannot be sent, e.g. // because it would need to be buffered but the buffer is full, // the user agent must flag the WebSocket as full and then close // the WebSocket connection. Any invocation of this method with a // string argument that does not throw an exception must increase // the bufferedAmount attribute by the number of bytes needed to // express the argument as UTF-8. const value = Buffer.from(data) const frame = new WebsocketFrameSend(value) const buffer = frame.createFrame(opcodes.TEXT) this.#bufferedAmount += value.byteLength socket.write(buffer, () => { this.#bufferedAmount -= value.byteLength }) } else if (types.isArrayBuffer(data)) { // If the WebSocket connection is established, and the WebSocket // closing handshake has not yet started, then the user agent must // send a WebSocket Message comprised of data using a binary frame // opcode; if the data cannot be sent, e.g. because it would need // to be buffered but the buffer is full, the user agent must flag // the WebSocket as full and then close the WebSocket connection. // The data to be sent is the data stored in the buffer described // by the ArrayBuffer object. Any invocation of this method with an // ArrayBuffer argument that does not throw an exception must // increase the bufferedAmount attribute by the length of the // ArrayBuffer in bytes. const value = Buffer.from(data) const frame = new WebsocketFrameSend(value) const buffer = frame.createFrame(opcodes.BINARY) this.#bufferedAmount += value.byteLength socket.write(buffer, () => { this.#bufferedAmount -= value.byteLength }) } else if (ArrayBuffer.isView(data)) { // If the WebSocket connection is established, and the WebSocket // closing handshake has not yet started, then the user agent must // send a WebSocket Message comprised of data using a binary frame // opcode; if the data cannot be sent, e.g. because it would need to // be buffered but the buffer is full, the user agent must flag the // WebSocket as full and then close the WebSocket connection. The // data to be sent is the data stored in the section of the buffer // described by the ArrayBuffer object that data references. Any // invocation of this method with this kind of argument that does // not throw an exception must increase the bufferedAmount attribute // by the length of data’s buffer in bytes. const ab = Buffer.from(data, data.byteOffset, data.byteLength) const frame = new WebsocketFrameSend(ab) const buffer = frame.createFrame(opcodes.BINARY) this.#bufferedAmount += ab.byteLength socket.write(buffer, () => { this.#bufferedAmount -= ab.byteLength }) } else if (isBlobLike(data)) { // If the WebSocket connection is established, and the WebSocket // closing handshake has not yet started, then the user agent must // send a WebSocket Message comprised of data using a binary frame // opcode; if the data cannot be sent, e.g. because it would need to // be buffered but the buffer is full, the user agent must flag the // WebSocket as full and then close the WebSocket connection. The data // to be sent is the raw data represented by the Blob object. Any // invocation of this method with a Blob argument that does not throw // an exception must increase the bufferedAmount attribute by the size // of the Blob object’s raw data, in bytes. const frame = new WebsocketFrameSend() data.arrayBuffer().then((ab) => { const value = Buffer.from(ab) frame.frameData = value const buffer = frame.createFrame(opcodes.BINARY) this.#bufferedAmount += value.byteLength socket.write(buffer, () => { this.#bufferedAmount -= value.byteLength }) }) } } get readyState () { webidl.brandCheck(this, WebSocket) // The readyState getter steps are to return this's ready state. return this[kReadyState] } get bufferedAmount () { webidl.brandCheck(this, WebSocket) return this.#bufferedAmount } get url () { webidl.brandCheck(this, WebSocket) // The url getter steps are to return this's url, serialized. return URLSerializer(this[kWebSocketURL]) } get extensions () { webidl.brandCheck(this, WebSocket) return this.#extensions } get protocol () { webidl.brandCheck(this, WebSocket) return this.#protocol } get onopen () { webidl.brandCheck(this, WebSocket) return this.#events.open } set onopen (fn) { webidl.brandCheck(this, WebSocket) if (this.#events.open) { this.removeEventListener('open', this.#events.open) } if (typeof fn === 'function') { this.#events.open = fn this.addEventListener('open', fn) } else { this.#events.open = null } } get onerror () { webidl.brandCheck(this, WebSocket) return this.#events.error } set onerror (fn) { webidl.brandCheck(this, WebSocket) if (this.#events.error) { this.removeEventListener('error', this.#events.error) } if (typeof fn === 'function') { this.#events.error = fn this.addEventListener('error', fn) } else { this.#events.error = null } } get onclose () { webidl.brandCheck(this, WebSocket) return this.#events.close } set onclose (fn) { webidl.brandCheck(this, WebSocket) if (this.#events.close) { this.removeEventListener('close', this.#events.close) } if (typeof fn === 'function') { this.#events.close = fn this.addEventListener('close', fn) } else { this.#events.close = null } } get onmessage () { webidl.brandCheck(this, WebSocket) return this.#events.message } set onmessage (fn) { webidl.brandCheck(this, WebSocket) if (this.#events.message) { this.removeEventListener('message', this.#events.message) } if (typeof fn === 'function') { this.#events.message = fn this.addEventListener('message', fn) } else { this.#events.message = null } } get binaryType () { webidl.brandCheck(this, WebSocket) return this[kBinaryType] } set binaryType (type) { webidl.brandCheck(this, WebSocket) if (type !== 'blob' && type !== 'arraybuffer') { this[kBinaryType] = 'blob' } else { this[kBinaryType] = type } } /** * @see https://websockets.spec.whatwg.org/#feedback-from-the-protocol */ #onConnectionEstablished (response) { // processResponse is called when the "response’s header list has been received and initialized." // once this happens, the connection is open this[kResponse] = response const parser = new ByteParser(this) parser.on('drain', function onParserDrain () { this.ws[kResponse].socket.resume() }) response.socket.ws = this this[kByteParser] = parser // 1. Change the ready state to OPEN (1). this[kReadyState] = states.OPEN // 2. Change the extensions attribute’s value to the extensions in use, if // it is not the null value. // https://datatracker.ietf.org/doc/html/rfc6455#section-9.1 const extensions = response.headersList.get('sec-websocket-extensions') if (extensions !== null) { this.#extensions = extensions } // 3. Change the protocol attribute’s value to the subprotocol in use, if // it is not the null value. // https://datatracker.ietf.org/doc/html/rfc6455#section-1.9 const protocol = response.headersList.get('sec-websocket-protocol') if (protocol !== null) { this.#protocol = protocol } // 4. Fire an event named open at the WebSocket object. fireEvent('open', this) } } // https://websockets.spec.whatwg.org/#dom-websocket-connecting WebSocket.CONNECTING = WebSocket.prototype.CONNECTING = states.CONNECTING // https://websockets.spec.whatwg.org/#dom-websocket-open WebSocket.OPEN = WebSocket.prototype.OPEN = states.OPEN // https://websockets.spec.whatwg.org/#dom-websocket-closing WebSocket.CLOSING = WebSocket.prototype.CLOSING = states.CLOSING // https://websockets.spec.whatwg.org/#dom-websocket-closed WebSocket.CLOSED = WebSocket.prototype.CLOSED = states.CLOSED Object.defineProperties(WebSocket.prototype, { CONNECTING: staticPropertyDescriptors, OPEN: staticPropertyDescriptors, CLOSING: staticPropertyDescriptors, CLOSED: staticPropertyDescriptors, url: kEnumerableProperty, readyState: kEnumerableProperty, bufferedAmount: kEnumerableProperty, onopen: kEnumerableProperty, onerror: kEnumerableProperty, onclose: kEnumerableProperty, close: kEnumerableProperty, onmessage: kEnumerableProperty, binaryType: kEnumerableProperty, send: kEnumerableProperty, extensions: kEnumerableProperty, protocol: kEnumerableProperty, [Symbol.toStringTag]: { value: 'WebSocket', writable: false, enumerable: false, configurable: true } }) Object.defineProperties(WebSocket, { CONNECTING: staticPropertyDescriptors, OPEN: staticPropertyDescriptors, CLOSING: staticPropertyDescriptors, CLOSED: staticPropertyDescriptors }) webidl.converters['sequence'] = webidl.sequenceConverter( webidl.converters.DOMString ) webidl.converters['DOMString or sequence'] = function (V) { if (webidl.util.Type(V) === 'Object' && Symbol.iterator in V) { return webidl.converters['sequence'](V) } return webidl.converters.DOMString(V) } // This implements the propsal made in https://github.com/whatwg/websockets/issues/42 webidl.converters.WebSocketInit = webidl.dictionaryConverter([ { key: 'protocols', converter: webidl.converters['DOMString or sequence'], get defaultValue () { return [] } }, { key: 'dispatcher', converter: (V) => V, get defaultValue () { return getGlobalDispatcher() } }, { key: 'headers', converter: webidl.nullableConverter(webidl.converters.HeadersInit) } ]) webidl.converters['DOMString or sequence or WebSocketInit'] = function (V) { if (webidl.util.Type(V) === 'Object' && !(Symbol.iterator in V)) { return webidl.converters.WebSocketInit(V) } return { protocols: webidl.converters['DOMString or sequence'](V) } } webidl.converters.WebSocketSendData = function (V) { if (webidl.util.Type(V) === 'Object') { if (isBlobLike(V)) { return webidl.converters.Blob(V, { strict: false }) } if (ArrayBuffer.isView(V) || types.isAnyArrayBuffer(V)) { return webidl.converters.BufferSource(V) } } return webidl.converters.USVString(V) } module.exports = { WebSocket } /***/ }), /***/ 33843: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); function getUserAgent() { if (typeof navigator === "object" && "userAgent" in navigator) { return navigator.userAgent; } if (typeof process === "object" && process.version !== undefined) { return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; } return ""; } exports.getUserAgent = getUserAgent; //# sourceMappingURL=index.js.map /***/ }), /***/ 24488: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /** * For Node.js, simply re-export the core `util.deprecate` function. */ module.exports = __nccwpck_require__(39023).deprecate; /***/ }), /***/ 87723: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var v1 = __nccwpck_require__(46626); var v4 = __nccwpck_require__(99021); var uuid = v4; uuid.v1 = v1; uuid.v4 = v4; module.exports = uuid; /***/ }), /***/ 38682: /***/ ((module) => { /** * Convert array of 16 byte values to UUID string format of the form: * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX */ var byteToHex = []; for (var i = 0; i < 256; ++i) { byteToHex[i] = (i + 0x100).toString(16).substr(1); } function bytesToUuid(buf, offset) { var i = offset || 0; var bth = byteToHex; // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 return ([ bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], '-', bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]], bth[buf[i++]] ]).join(''); } module.exports = bytesToUuid; /***/ }), /***/ 61694: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { // Unique ID creation requires a high quality random # generator. In node.js // this is pretty straight-forward - we use the crypto API. var crypto = __nccwpck_require__(76982); module.exports = function nodeRNG() { return crypto.randomBytes(16); }; /***/ }), /***/ 46626: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var rng = __nccwpck_require__(61694); var bytesToUuid = __nccwpck_require__(38682); // **`v1()` - Generate time-based UUID** // // Inspired by https://github.com/LiosK/UUID.js // and http://docs.python.org/library/uuid.html var _nodeId; var _clockseq; // Previous uuid creation time var _lastMSecs = 0; var _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details function v1(options, buf, offset) { var i = buf && offset || 0; var b = buf || []; options = options || {}; var node = options.node || _nodeId; var clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not // specified. We do this lazily to minimize issues related to insufficient // system entropy. See #189 if (node == null || clockseq == null) { var seedBytes = rng(); if (node == null) { // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) node = _nodeId = [ seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5] ]; } if (clockseq == null) { // Per 4.2.2, randomize (14 bit) clockseq clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; } } // UUID timestamps are 100 nano-second units since the Gregorian epoch, // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. var msecs = options.msecs !== undefined ? options.msecs : new Date().getTime(); // Per 4.2.1.2, use count of uuid's generated during the current clock // cycle to simulate higher resolution clock var nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) var dt = (msecs - _lastMSecs) + (nsecs - _lastNSecs)/10000; // Per 4.2.1.2, Bump clockseq on clock regression if (dt < 0 && options.clockseq === undefined) { clockseq = clockseq + 1 & 0x3fff; } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new // time interval if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { nsecs = 0; } // Per 4.2.1.2 Throw error if too many uuids are requested if (nsecs >= 10000) { throw new Error('uuid.v1(): Can\'t create more than 10M uuids/sec'); } _lastMSecs = msecs; _lastNSecs = nsecs; _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch msecs += 12219292800000; // `time_low` var tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; b[i++] = tl >>> 24 & 0xff; b[i++] = tl >>> 16 & 0xff; b[i++] = tl >>> 8 & 0xff; b[i++] = tl & 0xff; // `time_mid` var tmh = (msecs / 0x100000000 * 10000) & 0xfffffff; b[i++] = tmh >>> 8 & 0xff; b[i++] = tmh & 0xff; // `time_high_and_version` b[i++] = tmh >>> 24 & 0xf | 0x10; // include version b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` b[i++] = clockseq & 0xff; // `node` for (var n = 0; n < 6; ++n) { b[i + n] = node[n]; } return buf ? buf : bytesToUuid(b); } module.exports = v1; /***/ }), /***/ 99021: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { var rng = __nccwpck_require__(61694); var bytesToUuid = __nccwpck_require__(38682); function v4(options, buf, offset) { var i = buf && offset || 0; if (typeof(options) == 'string') { buf = options === 'binary' ? new Array(16) : null; options = null; } options = options || {}; var rnds = options.random || (options.rng || rng)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` rnds[6] = (rnds[6] & 0x0f) | 0x40; rnds[8] = (rnds[8] & 0x3f) | 0x80; // Copy bytes to buffer, if provided if (buf) { for (var ii = 0; ii < 16; ++ii) { buf[i + ii] = rnds[ii]; } } return buf || bytesToUuid(rnds); } module.exports = v4; /***/ }), /***/ 37125: /***/ ((module) => { "use strict"; var conversions = {}; module.exports = conversions; function sign(x) { return x < 0 ? -1 : 1; } function evenRound(x) { // Round x to the nearest integer, choosing the even integer if it lies halfway between two. if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor) return Math.floor(x); } else { return Math.round(x); } } function createNumberConversion(bitLength, typeOpts) { if (!typeOpts.unsigned) { --bitLength; } const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength); const upperBound = Math.pow(2, bitLength) - 1; const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength); const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1); return function(V, opts) { if (!opts) opts = {}; let x = +V; if (opts.enforceRange) { if (!Number.isFinite(x)) { throw new TypeError("Argument is not a finite number"); } x = sign(x) * Math.floor(Math.abs(x)); if (x < lowerBound || x > upperBound) { throw new TypeError("Argument is not in byte range"); } return x; } if (!isNaN(x) && opts.clamp) { x = evenRound(x); if (x < lowerBound) x = lowerBound; if (x > upperBound) x = upperBound; return x; } if (!Number.isFinite(x) || x === 0) { return 0; } x = sign(x) * Math.floor(Math.abs(x)); x = x % moduloVal; if (!typeOpts.unsigned && x >= moduloBound) { return x - moduloVal; } else if (typeOpts.unsigned) { if (x < 0) { x += moduloVal; } else if (x === -0) { // don't return negative zero return 0; } } return x; } } conversions["void"] = function () { return undefined; }; conversions["boolean"] = function (val) { return !!val; }; conversions["byte"] = createNumberConversion(8, { unsigned: false }); conversions["octet"] = createNumberConversion(8, { unsigned: true }); conversions["short"] = createNumberConversion(16, { unsigned: false }); conversions["unsigned short"] = createNumberConversion(16, { unsigned: true }); conversions["long"] = createNumberConversion(32, { unsigned: false }); conversions["unsigned long"] = createNumberConversion(32, { unsigned: true }); conversions["long long"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 }); conversions["unsigned long long"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 }); conversions["double"] = function (V) { const x = +V; if (!Number.isFinite(x)) { throw new TypeError("Argument is not a finite floating-point value"); } return x; }; conversions["unrestricted double"] = function (V) { const x = +V; if (isNaN(x)) { throw new TypeError("Argument is NaN"); } return x; }; // not quite valid, but good enough for JS conversions["float"] = conversions["double"]; conversions["unrestricted float"] = conversions["unrestricted double"]; conversions["DOMString"] = function (V, opts) { if (!opts) opts = {}; if (opts.treatNullAsEmptyString && V === null) { return ""; } return String(V); }; conversions["ByteString"] = function (V, opts) { const x = String(V); let c = undefined; for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) { if (c > 255) { throw new TypeError("Argument is not a valid bytestring"); } } return x; }; conversions["USVString"] = function (V) { const S = String(V); const n = S.length; const U = []; for (let i = 0; i < n; ++i) { const c = S.charCodeAt(i); if (c < 0xD800 || c > 0xDFFF) { U.push(String.fromCodePoint(c)); } else if (0xDC00 <= c && c <= 0xDFFF) { U.push(String.fromCodePoint(0xFFFD)); } else { if (i === n - 1) { U.push(String.fromCodePoint(0xFFFD)); } else { const d = S.charCodeAt(i + 1); if (0xDC00 <= d && d <= 0xDFFF) { const a = c & 0x3FF; const b = d & 0x3FF; U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b)); ++i; } else { U.push(String.fromCodePoint(0xFFFD)); } } } } return U.join(''); }; conversions["Date"] = function (V, opts) { if (!(V instanceof Date)) { throw new TypeError("Argument is not a Date object"); } if (isNaN(V)) { return undefined; } return V; }; conversions["RegExp"] = function (V, opts) { if (!(V instanceof RegExp)) { V = new RegExp(V); } return V; }; /***/ }), /***/ 23184: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; const usm = __nccwpck_require__(20905); exports.implementation = class URLImpl { constructor(constructorArgs) { const url = constructorArgs[0]; const base = constructorArgs[1]; let parsedBase = null; if (base !== undefined) { parsedBase = usm.basicURLParse(base); if (parsedBase === "failure") { throw new TypeError("Invalid base URL"); } } const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase }); if (parsedURL === "failure") { throw new TypeError("Invalid URL"); } this._url = parsedURL; // TODO: query stuff } get href() { return usm.serializeURL(this._url); } set href(v) { const parsedURL = usm.basicURLParse(v); if (parsedURL === "failure") { throw new TypeError("Invalid URL"); } this._url = parsedURL; } get origin() { return usm.serializeURLOrigin(this._url); } get protocol() { return this._url.scheme + ":"; } set protocol(v) { usm.basicURLParse(v + ":", { url: this._url, stateOverride: "scheme start" }); } get username() { return this._url.username; } set username(v) { if (usm.cannotHaveAUsernamePasswordPort(this._url)) { return; } usm.setTheUsername(this._url, v); } get password() { return this._url.password; } set password(v) { if (usm.cannotHaveAUsernamePasswordPort(this._url)) { return; } usm.setThePassword(this._url, v); } get host() { const url = this._url; if (url.host === null) { return ""; } if (url.port === null) { return usm.serializeHost(url.host); } return usm.serializeHost(url.host) + ":" + usm.serializeInteger(url.port); } set host(v) { if (this._url.cannotBeABaseURL) { return; } usm.basicURLParse(v, { url: this._url, stateOverride: "host" }); } get hostname() { if (this._url.host === null) { return ""; } return usm.serializeHost(this._url.host); } set hostname(v) { if (this._url.cannotBeABaseURL) { return; } usm.basicURLParse(v, { url: this._url, stateOverride: "hostname" }); } get port() { if (this._url.port === null) { return ""; } return usm.serializeInteger(this._url.port); } set port(v) { if (usm.cannotHaveAUsernamePasswordPort(this._url)) { return; } if (v === "") { this._url.port = null; } else { usm.basicURLParse(v, { url: this._url, stateOverride: "port" }); } } get pathname() { if (this._url.cannotBeABaseURL) { return this._url.path[0]; } if (this._url.path.length === 0) { return ""; } return "/" + this._url.path.join("/"); } set pathname(v) { if (this._url.cannotBeABaseURL) { return; } this._url.path = []; usm.basicURLParse(v, { url: this._url, stateOverride: "path start" }); } get search() { if (this._url.query === null || this._url.query === "") { return ""; } return "?" + this._url.query; } set search(v) { // TODO: query stuff const url = this._url; if (v === "") { url.query = null; return; } const input = v[0] === "?" ? v.substring(1) : v; url.query = ""; usm.basicURLParse(input, { url, stateOverride: "query" }); } get hash() { if (this._url.fragment === null || this._url.fragment === "") { return ""; } return "#" + this._url.fragment; } set hash(v) { if (v === "") { this._url.fragment = null; return; } const input = v[0] === "#" ? v.substring(1) : v; this._url.fragment = ""; usm.basicURLParse(input, { url: this._url, stateOverride: "fragment" }); } toJSON() { return this.href; } }; /***/ }), /***/ 66633: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const conversions = __nccwpck_require__(37125); const utils = __nccwpck_require__(39857); const Impl = __nccwpck_require__(23184); const impl = utils.implSymbol; function URL(url) { if (!this || this[impl] || !(this instanceof URL)) { throw new TypeError("Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function."); } if (arguments.length < 1) { throw new TypeError("Failed to construct 'URL': 1 argument required, but only " + arguments.length + " present."); } const args = []; for (let i = 0; i < arguments.length && i < 2; ++i) { args[i] = arguments[i]; } args[0] = conversions["USVString"](args[0]); if (args[1] !== undefined) { args[1] = conversions["USVString"](args[1]); } module.exports.setup(this, args); } URL.prototype.toJSON = function toJSON() { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } const args = []; for (let i = 0; i < arguments.length && i < 0; ++i) { args[i] = arguments[i]; } return this[impl].toJSON.apply(this[impl], args); }; Object.defineProperty(URL.prototype, "href", { get() { return this[impl].href; }, set(V) { V = conversions["USVString"](V); this[impl].href = V; }, enumerable: true, configurable: true }); URL.prototype.toString = function () { if (!this || !module.exports.is(this)) { throw new TypeError("Illegal invocation"); } return this.href; }; Object.defineProperty(URL.prototype, "origin", { get() { return this[impl].origin; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "protocol", { get() { return this[impl].protocol; }, set(V) { V = conversions["USVString"](V); this[impl].protocol = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "username", { get() { return this[impl].username; }, set(V) { V = conversions["USVString"](V); this[impl].username = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "password", { get() { return this[impl].password; }, set(V) { V = conversions["USVString"](V); this[impl].password = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "host", { get() { return this[impl].host; }, set(V) { V = conversions["USVString"](V); this[impl].host = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "hostname", { get() { return this[impl].hostname; }, set(V) { V = conversions["USVString"](V); this[impl].hostname = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "port", { get() { return this[impl].port; }, set(V) { V = conversions["USVString"](V); this[impl].port = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "pathname", { get() { return this[impl].pathname; }, set(V) { V = conversions["USVString"](V); this[impl].pathname = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "search", { get() { return this[impl].search; }, set(V) { V = conversions["USVString"](V); this[impl].search = V; }, enumerable: true, configurable: true }); Object.defineProperty(URL.prototype, "hash", { get() { return this[impl].hash; }, set(V) { V = conversions["USVString"](V); this[impl].hash = V; }, enumerable: true, configurable: true }); module.exports = { is(obj) { return !!obj && obj[impl] instanceof Impl.implementation; }, create(constructorArgs, privateData) { let obj = Object.create(URL.prototype); this.setup(obj, constructorArgs, privateData); return obj; }, setup(obj, constructorArgs, privateData) { if (!privateData) privateData = {}; privateData.wrapper = obj; obj[impl] = new Impl.implementation(constructorArgs, privateData); obj[impl][utils.wrapperSymbol] = obj; }, interface: URL, expose: { Window: { URL: URL }, Worker: { URL: URL } } }; /***/ }), /***/ 62686: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; exports.URL = __nccwpck_require__(66633)["interface"]; exports.serializeURL = __nccwpck_require__(20905).serializeURL; exports.serializeURLOrigin = __nccwpck_require__(20905).serializeURLOrigin; exports.basicURLParse = __nccwpck_require__(20905).basicURLParse; exports.setTheUsername = __nccwpck_require__(20905).setTheUsername; exports.setThePassword = __nccwpck_require__(20905).setThePassword; exports.serializeHost = __nccwpck_require__(20905).serializeHost; exports.serializeInteger = __nccwpck_require__(20905).serializeInteger; exports.parseURL = __nccwpck_require__(20905).parseURL; /***/ }), /***/ 20905: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const punycode = __nccwpck_require__(24876); const tr46 = __nccwpck_require__(1552); const specialSchemes = { ftp: 21, file: null, gopher: 70, http: 80, https: 443, ws: 80, wss: 443 }; const failure = Symbol("failure"); function countSymbols(str) { return punycode.ucs2.decode(str).length; } function at(input, idx) { const c = input[idx]; return isNaN(c) ? undefined : String.fromCodePoint(c); } function isASCIIDigit(c) { return c >= 0x30 && c <= 0x39; } function isASCIIAlpha(c) { return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); } function isASCIIAlphanumeric(c) { return isASCIIAlpha(c) || isASCIIDigit(c); } function isASCIIHex(c) { return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); } function isSingleDot(buffer) { return buffer === "." || buffer.toLowerCase() === "%2e"; } function isDoubleDot(buffer) { buffer = buffer.toLowerCase(); return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; } function isWindowsDriveLetterCodePoints(cp1, cp2) { return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); } function isWindowsDriveLetterString(string) { return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); } function isNormalizedWindowsDriveLetterString(string) { return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; } function containsForbiddenHostCodePoint(string) { return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; } function containsForbiddenHostCodePointExcludingPercent(string) { return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; } function isSpecialScheme(scheme) { return specialSchemes[scheme] !== undefined; } function isSpecial(url) { return isSpecialScheme(url.scheme); } function defaultPort(scheme) { return specialSchemes[scheme]; } function percentEncode(c) { let hex = c.toString(16).toUpperCase(); if (hex.length === 1) { hex = "0" + hex; } return "%" + hex; } function utf8PercentEncode(c) { const buf = new Buffer(c); let str = ""; for (let i = 0; i < buf.length; ++i) { str += percentEncode(buf[i]); } return str; } function utf8PercentDecode(str) { const input = new Buffer(str); const output = []; for (let i = 0; i < input.length; ++i) { if (input[i] !== 37) { output.push(input[i]); } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); i += 2; } else { output.push(input[i]); } } return new Buffer(output).toString(); } function isC0ControlPercentEncode(c) { return c <= 0x1F || c > 0x7E; } const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); function isPathPercentEncode(c) { return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); } const extraUserinfoPercentEncodeSet = new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); function isUserinfoPercentEncode(c) { return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); } function percentEncodeChar(c, encodeSetPredicate) { const cStr = String.fromCodePoint(c); if (encodeSetPredicate(c)) { return utf8PercentEncode(cStr); } return cStr; } function parseIPv4Number(input) { let R = 10; if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { input = input.substring(2); R = 16; } else if (input.length >= 2 && input.charAt(0) === "0") { input = input.substring(1); R = 8; } if (input === "") { return 0; } const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); if (regex.test(input)) { return failure; } return parseInt(input, R); } function parseIPv4(input) { const parts = input.split("."); if (parts[parts.length - 1] === "") { if (parts.length > 1) { parts.pop(); } } if (parts.length > 4) { return input; } const numbers = []; for (const part of parts) { if (part === "") { return input; } const n = parseIPv4Number(part); if (n === failure) { return input; } numbers.push(n); } for (let i = 0; i < numbers.length - 1; ++i) { if (numbers[i] > 255) { return failure; } } if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { return failure; } let ipv4 = numbers.pop(); let counter = 0; for (const n of numbers) { ipv4 += n * Math.pow(256, 3 - counter); ++counter; } return ipv4; } function serializeIPv4(address) { let output = ""; let n = address; for (let i = 1; i <= 4; ++i) { output = String(n % 256) + output; if (i !== 4) { output = "." + output; } n = Math.floor(n / 256); } return output; } function parseIPv6(input) { const address = [0, 0, 0, 0, 0, 0, 0, 0]; let pieceIndex = 0; let compress = null; let pointer = 0; input = punycode.ucs2.decode(input); if (input[pointer] === 58) { if (input[pointer + 1] !== 58) { return failure; } pointer += 2; ++pieceIndex; compress = pieceIndex; } while (pointer < input.length) { if (pieceIndex === 8) { return failure; } if (input[pointer] === 58) { if (compress !== null) { return failure; } ++pointer; ++pieceIndex; compress = pieceIndex; continue; } let value = 0; let length = 0; while (length < 4 && isASCIIHex(input[pointer])) { value = value * 0x10 + parseInt(at(input, pointer), 16); ++pointer; ++length; } if (input[pointer] === 46) { if (length === 0) { return failure; } pointer -= length; if (pieceIndex > 6) { return failure; } let numbersSeen = 0; while (input[pointer] !== undefined) { let ipv4Piece = null; if (numbersSeen > 0) { if (input[pointer] === 46 && numbersSeen < 4) { ++pointer; } else { return failure; } } if (!isASCIIDigit(input[pointer])) { return failure; } while (isASCIIDigit(input[pointer])) { const number = parseInt(at(input, pointer)); if (ipv4Piece === null) { ipv4Piece = number; } else if (ipv4Piece === 0) { return failure; } else { ipv4Piece = ipv4Piece * 10 + number; } if (ipv4Piece > 255) { return failure; } ++pointer; } address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; ++numbersSeen; if (numbersSeen === 2 || numbersSeen === 4) { ++pieceIndex; } } if (numbersSeen !== 4) { return failure; } break; } else if (input[pointer] === 58) { ++pointer; if (input[pointer] === undefined) { return failure; } } else if (input[pointer] !== undefined) { return failure; } address[pieceIndex] = value; ++pieceIndex; } if (compress !== null) { let swaps = pieceIndex - compress; pieceIndex = 7; while (pieceIndex !== 0 && swaps > 0) { const temp = address[compress + swaps - 1]; address[compress + swaps - 1] = address[pieceIndex]; address[pieceIndex] = temp; --pieceIndex; --swaps; } } else if (compress === null && pieceIndex !== 8) { return failure; } return address; } function serializeIPv6(address) { let output = ""; const seqResult = findLongestZeroSequence(address); const compress = seqResult.idx; let ignore0 = false; for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { if (ignore0 && address[pieceIndex] === 0) { continue; } else if (ignore0) { ignore0 = false; } if (compress === pieceIndex) { const separator = pieceIndex === 0 ? "::" : ":"; output += separator; ignore0 = true; continue; } output += address[pieceIndex].toString(16); if (pieceIndex !== 7) { output += ":"; } } return output; } function parseHost(input, isSpecialArg) { if (input[0] === "[") { if (input[input.length - 1] !== "]") { return failure; } return parseIPv6(input.substring(1, input.length - 1)); } if (!isSpecialArg) { return parseOpaqueHost(input); } const domain = utf8PercentDecode(input); const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); if (asciiDomain === null) { return failure; } if (containsForbiddenHostCodePoint(asciiDomain)) { return failure; } const ipv4Host = parseIPv4(asciiDomain); if (typeof ipv4Host === "number" || ipv4Host === failure) { return ipv4Host; } return asciiDomain; } function parseOpaqueHost(input) { if (containsForbiddenHostCodePointExcludingPercent(input)) { return failure; } let output = ""; const decoded = punycode.ucs2.decode(input); for (let i = 0; i < decoded.length; ++i) { output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); } return output; } function findLongestZeroSequence(arr) { let maxIdx = null; let maxLen = 1; // only find elements > 1 let currStart = null; let currLen = 0; for (let i = 0; i < arr.length; ++i) { if (arr[i] !== 0) { if (currLen > maxLen) { maxIdx = currStart; maxLen = currLen; } currStart = null; currLen = 0; } else { if (currStart === null) { currStart = i; } ++currLen; } } // if trailing zeros if (currLen > maxLen) { maxIdx = currStart; maxLen = currLen; } return { idx: maxIdx, len: maxLen }; } function serializeHost(host) { if (typeof host === "number") { return serializeIPv4(host); } // IPv6 serializer if (host instanceof Array) { return "[" + serializeIPv6(host) + "]"; } return host; } function trimControlChars(url) { return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); } function trimTabAndNewline(url) { return url.replace(/\u0009|\u000A|\u000D/g, ""); } function shortenPath(url) { const path = url.path; if (path.length === 0) { return; } if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { return; } path.pop(); } function includesCredentials(url) { return url.username !== "" || url.password !== ""; } function cannotHaveAUsernamePasswordPort(url) { return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; } function isNormalizedWindowsDriveLetter(string) { return /^[A-Za-z]:$/.test(string); } function URLStateMachine(input, base, encodingOverride, url, stateOverride) { this.pointer = 0; this.input = input; this.base = base || null; this.encodingOverride = encodingOverride || "utf-8"; this.stateOverride = stateOverride; this.url = url; this.failure = false; this.parseError = false; if (!this.url) { this.url = { scheme: "", username: "", password: "", host: null, port: null, path: [], query: null, fragment: null, cannotBeABaseURL: false }; const res = trimControlChars(this.input); if (res !== this.input) { this.parseError = true; } this.input = res; } const res = trimTabAndNewline(this.input); if (res !== this.input) { this.parseError = true; } this.input = res; this.state = stateOverride || "scheme start"; this.buffer = ""; this.atFlag = false; this.arrFlag = false; this.passwordTokenSeenFlag = false; this.input = punycode.ucs2.decode(this.input); for (; this.pointer <= this.input.length; ++this.pointer) { const c = this.input[this.pointer]; const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); // exec state machine const ret = this["parse " + this.state](c, cStr); if (!ret) { break; // terminate algorithm } else if (ret === failure) { this.failure = true; break; } } } URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { if (isASCIIAlpha(c)) { this.buffer += cStr.toLowerCase(); this.state = "scheme"; } else if (!this.stateOverride) { this.state = "no scheme"; --this.pointer; } else { this.parseError = true; return failure; } return true; }; URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { this.buffer += cStr.toLowerCase(); } else if (c === 58) { if (this.stateOverride) { if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { return false; } if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { return false; } if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { return false; } if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { return false; } } this.url.scheme = this.buffer; this.buffer = ""; if (this.stateOverride) { return false; } if (this.url.scheme === "file") { if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { this.parseError = true; } this.state = "file"; } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { this.state = "special relative or authority"; } else if (isSpecial(this.url)) { this.state = "special authority slashes"; } else if (this.input[this.pointer + 1] === 47) { this.state = "path or authority"; ++this.pointer; } else { this.url.cannotBeABaseURL = true; this.url.path.push(""); this.state = "cannot-be-a-base-URL path"; } } else if (!this.stateOverride) { this.buffer = ""; this.state = "no scheme"; this.pointer = -1; } else { this.parseError = true; return failure; } return true; }; URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { return failure; } else if (this.base.cannotBeABaseURL && c === 35) { this.url.scheme = this.base.scheme; this.url.path = this.base.path.slice(); this.url.query = this.base.query; this.url.fragment = ""; this.url.cannotBeABaseURL = true; this.state = "fragment"; } else if (this.base.scheme === "file") { this.state = "file"; --this.pointer; } else { this.state = "relative"; --this.pointer; } return true; }; URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { if (c === 47 && this.input[this.pointer + 1] === 47) { this.state = "special authority ignore slashes"; ++this.pointer; } else { this.parseError = true; this.state = "relative"; --this.pointer; } return true; }; URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { if (c === 47) { this.state = "authority"; } else { this.state = "path"; --this.pointer; } return true; }; URLStateMachine.prototype["parse relative"] = function parseRelative(c) { this.url.scheme = this.base.scheme; if (isNaN(c)) { this.url.username = this.base.username; this.url.password = this.base.password; this.url.host = this.base.host; this.url.port = this.base.port; this.url.path = this.base.path.slice(); this.url.query = this.base.query; } else if (c === 47) { this.state = "relative slash"; } else if (c === 63) { this.url.username = this.base.username; this.url.password = this.base.password; this.url.host = this.base.host; this.url.port = this.base.port; this.url.path = this.base.path.slice(); this.url.query = ""; this.state = "query"; } else if (c === 35) { this.url.username = this.base.username; this.url.password = this.base.password; this.url.host = this.base.host; this.url.port = this.base.port; this.url.path = this.base.path.slice(); this.url.query = this.base.query; this.url.fragment = ""; this.state = "fragment"; } else if (isSpecial(this.url) && c === 92) { this.parseError = true; this.state = "relative slash"; } else { this.url.username = this.base.username; this.url.password = this.base.password; this.url.host = this.base.host; this.url.port = this.base.port; this.url.path = this.base.path.slice(0, this.base.path.length - 1); this.state = "path"; --this.pointer; } return true; }; URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { if (isSpecial(this.url) && (c === 47 || c === 92)) { if (c === 92) { this.parseError = true; } this.state = "special authority ignore slashes"; } else if (c === 47) { this.state = "authority"; } else { this.url.username = this.base.username; this.url.password = this.base.password; this.url.host = this.base.host; this.url.port = this.base.port; this.state = "path"; --this.pointer; } return true; }; URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { if (c === 47 && this.input[this.pointer + 1] === 47) { this.state = "special authority ignore slashes"; ++this.pointer; } else { this.parseError = true; this.state = "special authority ignore slashes"; --this.pointer; } return true; }; URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { if (c !== 47 && c !== 92) { this.state = "authority"; --this.pointer; } else { this.parseError = true; } return true; }; URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { if (c === 64) { this.parseError = true; if (this.atFlag) { this.buffer = "%40" + this.buffer; } this.atFlag = true; // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars const len = countSymbols(this.buffer); for (let pointer = 0; pointer < len; ++pointer) { const codePoint = this.buffer.codePointAt(pointer); if (codePoint === 58 && !this.passwordTokenSeenFlag) { this.passwordTokenSeenFlag = true; continue; } const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); if (this.passwordTokenSeenFlag) { this.url.password += encodedCodePoints; } else { this.url.username += encodedCodePoints; } } this.buffer = ""; } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || (isSpecial(this.url) && c === 92)) { if (this.atFlag && this.buffer === "") { this.parseError = true; return failure; } this.pointer -= countSymbols(this.buffer) + 1; this.buffer = ""; this.state = "host"; } else { this.buffer += cStr; } return true; }; URLStateMachine.prototype["parse hostname"] = URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { if (this.stateOverride && this.url.scheme === "file") { --this.pointer; this.state = "file host"; } else if (c === 58 && !this.arrFlag) { if (this.buffer === "") { this.parseError = true; return failure; } const host = parseHost(this.buffer, isSpecial(this.url)); if (host === failure) { return failure; } this.url.host = host; this.buffer = ""; this.state = "port"; if (this.stateOverride === "hostname") { return false; } } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || (isSpecial(this.url) && c === 92)) { --this.pointer; if (isSpecial(this.url) && this.buffer === "") { this.parseError = true; return failure; } else if (this.stateOverride && this.buffer === "" && (includesCredentials(this.url) || this.url.port !== null)) { this.parseError = true; return false; } const host = parseHost(this.buffer, isSpecial(this.url)); if (host === failure) { return failure; } this.url.host = host; this.buffer = ""; this.state = "path start"; if (this.stateOverride) { return false; } } else { if (c === 91) { this.arrFlag = true; } else if (c === 93) { this.arrFlag = false; } this.buffer += cStr; } return true; }; URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { if (isASCIIDigit(c)) { this.buffer += cStr; } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || (isSpecial(this.url) && c === 92) || this.stateOverride) { if (this.buffer !== "") { const port = parseInt(this.buffer); if (port > Math.pow(2, 16) - 1) { this.parseError = true; return failure; } this.url.port = port === defaultPort(this.url.scheme) ? null : port; this.buffer = ""; } if (this.stateOverride) { return false; } this.state = "path start"; --this.pointer; } else { this.parseError = true; return failure; } return true; }; const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); URLStateMachine.prototype["parse file"] = function parseFile(c) { this.url.scheme = "file"; if (c === 47 || c === 92) { if (c === 92) { this.parseError = true; } this.state = "file slash"; } else if (this.base !== null && this.base.scheme === "file") { if (isNaN(c)) { this.url.host = this.base.host; this.url.path = this.base.path.slice(); this.url.query = this.base.query; } else if (c === 63) { this.url.host = this.base.host; this.url.path = this.base.path.slice(); this.url.query = ""; this.state = "query"; } else if (c === 35) { this.url.host = this.base.host; this.url.path = this.base.path.slice(); this.url.query = this.base.query; this.url.fragment = ""; this.state = "fragment"; } else { if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { this.url.host = this.base.host; this.url.path = this.base.path.slice(); shortenPath(this.url); } else { this.parseError = true; } this.state = "path"; --this.pointer; } } else { this.state = "path"; --this.pointer; } return true; }; URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { if (c === 47 || c === 92) { if (c === 92) { this.parseError = true; } this.state = "file host"; } else { if (this.base !== null && this.base.scheme === "file") { if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { this.url.path.push(this.base.path[0]); } else { this.url.host = this.base.host; } } this.state = "path"; --this.pointer; } return true; }; URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { --this.pointer; if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { this.parseError = true; this.state = "path"; } else if (this.buffer === "") { this.url.host = ""; if (this.stateOverride) { return false; } this.state = "path start"; } else { let host = parseHost(this.buffer, isSpecial(this.url)); if (host === failure) { return failure; } if (host === "localhost") { host = ""; } this.url.host = host; if (this.stateOverride) { return false; } this.buffer = ""; this.state = "path start"; } } else { this.buffer += cStr; } return true; }; URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { if (isSpecial(this.url)) { if (c === 92) { this.parseError = true; } this.state = "path"; if (c !== 47 && c !== 92) { --this.pointer; } } else if (!this.stateOverride && c === 63) { this.url.query = ""; this.state = "query"; } else if (!this.stateOverride && c === 35) { this.url.fragment = ""; this.state = "fragment"; } else if (c !== undefined) { this.state = "path"; if (c !== 47) { --this.pointer; } } return true; }; URLStateMachine.prototype["parse path"] = function parsePath(c) { if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || (!this.stateOverride && (c === 63 || c === 35))) { if (isSpecial(this.url) && c === 92) { this.parseError = true; } if (isDoubleDot(this.buffer)) { shortenPath(this.url); if (c !== 47 && !(isSpecial(this.url) && c === 92)) { this.url.path.push(""); } } else if (isSingleDot(this.buffer) && c !== 47 && !(isSpecial(this.url) && c === 92)) { this.url.path.push(""); } else if (!isSingleDot(this.buffer)) { if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { if (this.url.host !== "" && this.url.host !== null) { this.parseError = true; this.url.host = ""; } this.buffer = this.buffer[0] + ":"; } this.url.path.push(this.buffer); } this.buffer = ""; if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { while (this.url.path.length > 1 && this.url.path[0] === "") { this.parseError = true; this.url.path.shift(); } } if (c === 63) { this.url.query = ""; this.state = "query"; } if (c === 35) { this.url.fragment = ""; this.state = "fragment"; } } else { // TODO: If c is not a URL code point and not "%", parse error. if (c === 37 && (!isASCIIHex(this.input[this.pointer + 1]) || !isASCIIHex(this.input[this.pointer + 2]))) { this.parseError = true; } this.buffer += percentEncodeChar(c, isPathPercentEncode); } return true; }; URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { if (c === 63) { this.url.query = ""; this.state = "query"; } else if (c === 35) { this.url.fragment = ""; this.state = "fragment"; } else { // TODO: Add: not a URL code point if (!isNaN(c) && c !== 37) { this.parseError = true; } if (c === 37 && (!isASCIIHex(this.input[this.pointer + 1]) || !isASCIIHex(this.input[this.pointer + 2]))) { this.parseError = true; } if (!isNaN(c)) { this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); } } return true; }; URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { if (isNaN(c) || (!this.stateOverride && c === 35)) { if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { this.encodingOverride = "utf-8"; } const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead for (let i = 0; i < buffer.length; ++i) { if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || buffer[i] === 0x3C || buffer[i] === 0x3E) { this.url.query += percentEncode(buffer[i]); } else { this.url.query += String.fromCodePoint(buffer[i]); } } this.buffer = ""; if (c === 35) { this.url.fragment = ""; this.state = "fragment"; } } else { // TODO: If c is not a URL code point and not "%", parse error. if (c === 37 && (!isASCIIHex(this.input[this.pointer + 1]) || !isASCIIHex(this.input[this.pointer + 2]))) { this.parseError = true; } this.buffer += cStr; } return true; }; URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { if (isNaN(c)) { // do nothing } else if (c === 0x0) { this.parseError = true; } else { // TODO: If c is not a URL code point and not "%", parse error. if (c === 37 && (!isASCIIHex(this.input[this.pointer + 1]) || !isASCIIHex(this.input[this.pointer + 2]))) { this.parseError = true; } this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); } return true; }; function serializeURL(url, excludeFragment) { let output = url.scheme + ":"; if (url.host !== null) { output += "//"; if (url.username !== "" || url.password !== "") { output += url.username; if (url.password !== "") { output += ":" + url.password; } output += "@"; } output += serializeHost(url.host); if (url.port !== null) { output += ":" + url.port; } } else if (url.host === null && url.scheme === "file") { output += "//"; } if (url.cannotBeABaseURL) { output += url.path[0]; } else { for (const string of url.path) { output += "/" + string; } } if (url.query !== null) { output += "?" + url.query; } if (!excludeFragment && url.fragment !== null) { output += "#" + url.fragment; } return output; } function serializeOrigin(tuple) { let result = tuple.scheme + "://"; result += serializeHost(tuple.host); if (tuple.port !== null) { result += ":" + tuple.port; } return result; } module.exports.serializeURL = serializeURL; module.exports.serializeURLOrigin = function (url) { // https://url.spec.whatwg.org/#concept-url-origin switch (url.scheme) { case "blob": try { return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); } catch (e) { // serializing an opaque origin returns "null" return "null"; } case "ftp": case "gopher": case "http": case "https": case "ws": case "wss": return serializeOrigin({ scheme: url.scheme, host: url.host, port: url.port }); case "file": // spec says "exercise to the reader", chrome says "file://" return "file://"; default: // serializing an opaque origin returns "null" return "null"; } }; module.exports.basicURLParse = function (input, options) { if (options === undefined) { options = {}; } const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); if (usm.failure) { return "failure"; } return usm.url; }; module.exports.setTheUsername = function (url, username) { url.username = ""; const decoded = punycode.ucs2.decode(username); for (let i = 0; i < decoded.length; ++i) { url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); } }; module.exports.setThePassword = function (url, password) { url.password = ""; const decoded = punycode.ucs2.decode(password); for (let i = 0; i < decoded.length; ++i) { url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); } }; module.exports.serializeHost = serializeHost; module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; module.exports.serializeInteger = function (integer) { return String(integer); }; module.exports.parseURL = function (input, options) { if (options === undefined) { options = {}; } // We don't handle blobs, so this just delegates: return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); }; /***/ }), /***/ 39857: /***/ ((module) => { "use strict"; module.exports.mixin = function mixin(target, source) { const keys = Object.getOwnPropertyNames(source); for (let i = 0; i < keys.length; ++i) { Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i])); } }; module.exports.wrapperSymbol = Symbol("wrapper"); module.exports.implSymbol = Symbol("impl"); module.exports.wrapperForImpl = function (impl) { return impl[module.exports.wrapperSymbol]; }; module.exports.implForWrapper = function (wrapper) { return wrapper[module.exports.implSymbol]; }; /***/ }), /***/ 58264: /***/ ((module) => { // Returns a wrapper function that returns a wrapped callback // The wrapper function should do some stuff, and return a // presumably different callback function. // This makes sure that own properties are retained, so that // decorations and such are not lost along the way. module.exports = wrappy function wrappy (fn, cb) { if (fn && cb) return wrappy(fn)(cb) if (typeof fn !== 'function') throw new TypeError('need wrapper function') Object.keys(fn).forEach(function (k) { wrapper[k] = fn[k] }) return wrapper function wrapper() { var args = new Array(arguments.length) for (var i = 0; i < args.length; i++) { args[i] = arguments[i] } var ret = fn.apply(this, args) var cb = args[args.length-1] if (typeof ret === 'function' && ret !== cb) { Object.keys(cb).forEach(function (k) { ret[k] = cb[k] }) } return ret } } /***/ }), /***/ 30538: /***/ ((module) => { class Node { /// value; /// next; constructor(value) { this.value = value; // TODO: Remove this when targeting Node.js 12. this.next = undefined; } } class Queue { // TODO: Use private class fields when targeting Node.js 12. // #_head; // #_tail; // #_size; constructor() { this.clear(); } enqueue(value) { const node = new Node(value); if (this._head) { this._tail.next = node; this._tail = node; } else { this._head = node; this._tail = node; } this._size++; } dequeue() { const current = this._head; if (!current) { return; } this._head = this._head.next; this._size--; return current.value; } clear() { this._head = undefined; this._tail = undefined; this._size = 0; } get size() { return this._size; } * [Symbol.iterator]() { let current = this._head; while (current) { yield current.value; current = current.next; } } } module.exports = Queue; /***/ }), /***/ 42078: /***/ ((module) => { module.exports = eval("require")("encoding"); /***/ }), /***/ 60075: /***/ ((module) => { module.exports = eval("require")("supports-color"); /***/ }), /***/ 42613: /***/ ((module) => { "use strict"; module.exports = require("assert"); /***/ }), /***/ 90290: /***/ ((module) => { "use strict"; module.exports = require("async_hooks"); /***/ }), /***/ 20181: /***/ ((module) => { "use strict"; module.exports = require("buffer"); /***/ }), /***/ 35317: /***/ ((module) => { "use strict"; module.exports = require("child_process"); /***/ }), /***/ 64236: /***/ ((module) => { "use strict"; module.exports = require("console"); /***/ }), /***/ 76982: /***/ ((module) => { "use strict"; module.exports = require("crypto"); /***/ }), /***/ 31637: /***/ ((module) => { "use strict"; module.exports = require("diagnostics_channel"); /***/ }), /***/ 24434: /***/ ((module) => { "use strict"; module.exports = require("events"); /***/ }), /***/ 79896: /***/ ((module) => { "use strict"; module.exports = require("fs"); /***/ }), /***/ 91943: /***/ ((module) => { "use strict"; module.exports = require("fs/promises"); /***/ }), /***/ 58611: /***/ ((module) => { "use strict"; module.exports = require("http"); /***/ }), /***/ 85675: /***/ ((module) => { "use strict"; module.exports = require("http2"); /***/ }), /***/ 65692: /***/ ((module) => { "use strict"; module.exports = require("https"); /***/ }), /***/ 69278: /***/ ((module) => { "use strict"; module.exports = require("net"); /***/ }), /***/ 77598: /***/ ((module) => { "use strict"; module.exports = require("node:crypto"); /***/ }), /***/ 78474: /***/ ((module) => { "use strict"; module.exports = require("node:events"); /***/ }), /***/ 37067: /***/ ((module) => { "use strict"; module.exports = require("node:http"); /***/ }), /***/ 44708: /***/ ((module) => { "use strict"; module.exports = require("node:https"); /***/ }), /***/ 48161: /***/ ((module) => { "use strict"; module.exports = require("node:os"); /***/ }), /***/ 1708: /***/ ((module) => { "use strict"; module.exports = require("node:process"); /***/ }), /***/ 57075: /***/ ((module) => { "use strict"; module.exports = require("node:stream"); /***/ }), /***/ 57975: /***/ ((module) => { "use strict"; module.exports = require("node:util"); /***/ }), /***/ 38522: /***/ ((module) => { "use strict"; module.exports = require("node:zlib"); /***/ }), /***/ 70857: /***/ ((module) => { "use strict"; module.exports = require("os"); /***/ }), /***/ 16928: /***/ ((module) => { "use strict"; module.exports = require("path"); /***/ }), /***/ 82987: /***/ ((module) => { "use strict"; module.exports = require("perf_hooks"); /***/ }), /***/ 24876: /***/ ((module) => { "use strict"; module.exports = require("punycode"); /***/ }), /***/ 83480: /***/ ((module) => { "use strict"; module.exports = require("querystring"); /***/ }), /***/ 2203: /***/ ((module) => { "use strict"; module.exports = require("stream"); /***/ }), /***/ 63774: /***/ ((module) => { "use strict"; module.exports = require("stream/web"); /***/ }), /***/ 13193: /***/ ((module) => { "use strict"; module.exports = require("string_decoder"); /***/ }), /***/ 53557: /***/ ((module) => { "use strict"; module.exports = require("timers"); /***/ }), /***/ 64756: /***/ ((module) => { "use strict"; module.exports = require("tls"); /***/ }), /***/ 52018: /***/ ((module) => { "use strict"; module.exports = require("tty"); /***/ }), /***/ 87016: /***/ ((module) => { "use strict"; module.exports = require("url"); /***/ }), /***/ 39023: /***/ ((module) => { "use strict"; module.exports = require("util"); /***/ }), /***/ 98253: /***/ ((module) => { "use strict"; module.exports = require("util/types"); /***/ }), /***/ 28167: /***/ ((module) => { "use strict"; module.exports = require("worker_threads"); /***/ }), /***/ 43106: /***/ ((module) => { "use strict"; module.exports = require("zlib"); /***/ }), /***/ 50198: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AzureKeyCredential = void 0; /** * A static-key-based credential that supports updating * the underlying key value. */ class AzureKeyCredential { /** * The value of the key to be used in authentication */ get key() { return this._key; } /** * Create an instance of an AzureKeyCredential for use * with a service client. * * @param key - The initial value of the key to use in authentication */ constructor(key) { if (!key) { throw new Error("key must be a non-empty string"); } this._key = key; } /** * Change the value of the key. * * Updates will take effect upon the next request after * updating the key value. * * @param newKey - The new key value to be used */ update(newKey) { this._key = newKey; } } exports.AzureKeyCredential = AzureKeyCredential; //# sourceMappingURL=azureKeyCredential.js.map /***/ }), /***/ 41295: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AzureNamedKeyCredential = void 0; exports.isNamedKeyCredential = isNamedKeyCredential; const core_util_1 = __nccwpck_require__(87779); /** * A static name/key-based credential that supports updating * the underlying name and key values. */ class AzureNamedKeyCredential { /** * The value of the key to be used in authentication. */ get key() { return this._key; } /** * The value of the name to be used in authentication. */ get name() { return this._name; } /** * Create an instance of an AzureNamedKeyCredential for use * with a service client. * * @param name - The initial value of the name to use in authentication. * @param key - The initial value of the key to use in authentication. */ constructor(name, key) { if (!name || !key) { throw new TypeError("name and key must be non-empty strings"); } this._name = name; this._key = key; } /** * Change the value of the key. * * Updates will take effect upon the next request after * updating the key value. * * @param newName - The new name value to be used. * @param newKey - The new key value to be used. */ update(newName, newKey) { if (!newName || !newKey) { throw new TypeError("newName and newKey must be non-empty strings"); } this._name = newName; this._key = newKey; } } exports.AzureNamedKeyCredential = AzureNamedKeyCredential; /** * Tests an object to determine whether it implements NamedKeyCredential. * * @param credential - The assumed NamedKeyCredential to be tested. */ function isNamedKeyCredential(credential) { return ((0, core_util_1.isObjectWithProperties)(credential, ["name", "key"]) && typeof credential.key === "string" && typeof credential.name === "string"); } //# sourceMappingURL=azureNamedKeyCredential.js.map /***/ }), /***/ 56608: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AzureSASCredential = void 0; exports.isSASCredential = isSASCredential; const core_util_1 = __nccwpck_require__(87779); /** * A static-signature-based credential that supports updating * the underlying signature value. */ class AzureSASCredential { /** * The value of the shared access signature to be used in authentication */ get signature() { return this._signature; } /** * Create an instance of an AzureSASCredential for use * with a service client. * * @param signature - The initial value of the shared access signature to use in authentication */ constructor(signature) { if (!signature) { throw new Error("shared access signature must be a non-empty string"); } this._signature = signature; } /** * Change the value of the signature. * * Updates will take effect upon the next request after * updating the signature value. * * @param newSignature - The new shared access signature value to be used */ update(newSignature) { if (!newSignature) { throw new Error("shared access signature must be a non-empty string"); } this._signature = newSignature; } } exports.AzureSASCredential = AzureSASCredential; /** * Tests an object to determine whether it implements SASCredential. * * @param credential - The assumed SASCredential to be tested. */ function isSASCredential(credential) { return ((0, core_util_1.isObjectWithProperties)(credential, ["signature"]) && typeof credential.signature === "string"); } //# sourceMappingURL=azureSASCredential.js.map /***/ }), /***/ 50417: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isTokenCredential = exports.isSASCredential = exports.AzureSASCredential = exports.isNamedKeyCredential = exports.AzureNamedKeyCredential = exports.isKeyCredential = exports.AzureKeyCredential = void 0; var azureKeyCredential_js_1 = __nccwpck_require__(50198); Object.defineProperty(exports, "AzureKeyCredential", ({ enumerable: true, get: function () { return azureKeyCredential_js_1.AzureKeyCredential; } })); var keyCredential_js_1 = __nccwpck_require__(99155); Object.defineProperty(exports, "isKeyCredential", ({ enumerable: true, get: function () { return keyCredential_js_1.isKeyCredential; } })); var azureNamedKeyCredential_js_1 = __nccwpck_require__(41295); Object.defineProperty(exports, "AzureNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.AzureNamedKeyCredential; } })); Object.defineProperty(exports, "isNamedKeyCredential", ({ enumerable: true, get: function () { return azureNamedKeyCredential_js_1.isNamedKeyCredential; } })); var azureSASCredential_js_1 = __nccwpck_require__(56608); Object.defineProperty(exports, "AzureSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.AzureSASCredential; } })); Object.defineProperty(exports, "isSASCredential", ({ enumerable: true, get: function () { return azureSASCredential_js_1.isSASCredential; } })); var tokenCredential_js_1 = __nccwpck_require__(86881); Object.defineProperty(exports, "isTokenCredential", ({ enumerable: true, get: function () { return tokenCredential_js_1.isTokenCredential; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 99155: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isKeyCredential = isKeyCredential; const core_util_1 = __nccwpck_require__(87779); /** * Tests an object to determine whether it implements KeyCredential. * * @param credential - The assumed KeyCredential to be tested. */ function isKeyCredential(credential) { return (0, core_util_1.isObjectWithProperties)(credential, ["key"]) && typeof credential.key === "string"; } //# sourceMappingURL=keyCredential.js.map /***/ }), /***/ 86881: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isBearerToken = isBearerToken; exports.isPopToken = isPopToken; exports.isTokenCredential = isTokenCredential; /** * @internal * @param accessToken - Access token * @returns Whether a token is bearer type or not */ function isBearerToken(accessToken) { return !accessToken.tokenType || accessToken.tokenType === "Bearer"; } /** * @internal * @param accessToken - Access token * @returns Whether a token is Pop token or not */ function isPopToken(accessToken) { return accessToken.tokenType === "pop"; } /** * Tests an object to determine whether it implements TokenCredential. * * @param credential - The assumed TokenCredential to be tested. */ function isTokenCredential(credential) { // Check for an object with a 'getToken' function and possibly with // a 'signRequest' function. We do this check to make sure that // a ServiceClientCredentials implementor (like TokenClientCredentials // in ms-rest-nodeauth) doesn't get mistaken for a TokenCredential if // it doesn't actually implement TokenCredential also. const castCredential = credential; return (castCredential && typeof castCredential.getToken === "function" && (castCredential.signRequest === undefined || castCredential.getToken.length > 0)); } //# sourceMappingURL=tokenCredential.js.map /***/ }), /***/ 17698: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.authorizeRequestOnClaimChallenge = exports.parseCAEChallenge = void 0; const log_js_1 = __nccwpck_require__(89994); const base64_js_1 = __nccwpck_require__(20741); /** * Converts: `Bearer a="b", c="d", Bearer d="e", f="g"`. * Into: `[ { a: 'b', c: 'd' }, { d: 'e', f: 'g' } ]`. * * @internal */ function parseCAEChallenge(challenges) { const bearerChallenges = `, ${challenges.trim()}`.split(", Bearer ").filter((x) => x); return bearerChallenges.map((challenge) => { const challengeParts = `${challenge.trim()}, `.split('", ').filter((x) => x); const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split('="'))); // Key-value pairs to plain object: return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); }); } exports.parseCAEChallenge = parseCAEChallenge; /** * This function can be used as a callback for the `bearerTokenAuthenticationPolicy` of `@azure/core-rest-pipeline`, to support CAE challenges: * [Continuous Access Evaluation](https://docs.microsoft.com/azure/active-directory/conditional-access/concept-continuous-access-evaluation). * * Call the `bearerTokenAuthenticationPolicy` with the following options: * * ```ts * import { bearerTokenAuthenticationPolicy } from "@azure/core-rest-pipeline"; * import { authorizeRequestOnClaimChallenge } from "@azure/core-client"; * * const bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy({ * authorizeRequestOnChallenge: authorizeRequestOnClaimChallenge * }); * ``` * * Once provided, the `bearerTokenAuthenticationPolicy` policy will internally handle Continuous Access Evaluation (CAE) challenges. * When it can't complete a challenge it will return the 401 (unauthorized) response from ARM. * * Example challenge with claims: * * ``` * Bearer authorization_uri="https://login.windows-ppe.net/", error="invalid_token", * error_description="User session has been revoked", * claims="eyJhY2Nlc3NfdG9rZW4iOnsibmJmIjp7ImVzc2VudGlhbCI6dHJ1ZSwgInZhbHVlIjoiMTYwMzc0MjgwMCJ9fX0=" * ``` */ async function authorizeRequestOnClaimChallenge(onChallengeOptions) { const { scopes, response } = onChallengeOptions; const logger = onChallengeOptions.logger || log_js_1.logger; const challenge = response.headers.get("WWW-Authenticate"); if (!challenge) { logger.info(`The WWW-Authenticate header was missing. Failed to perform the Continuous Access Evaluation authentication flow.`); return false; } const challenges = parseCAEChallenge(challenge) || []; const parsedChallenge = challenges.find((x) => x.claims); if (!parsedChallenge) { logger.info(`The WWW-Authenticate header was missing the necessary "claims" to perform the Continuous Access Evaluation authentication flow.`); return false; } const accessToken = await onChallengeOptions.getAccessToken(parsedChallenge.scope ? [parsedChallenge.scope] : scopes, { claims: (0, base64_js_1.decodeStringToString)(parsedChallenge.claims), }); if (!accessToken) { return false; } onChallengeOptions.request.headers.set("Authorization", `Bearer ${accessToken.token}`); return true; } exports.authorizeRequestOnClaimChallenge = authorizeRequestOnClaimChallenge; //# sourceMappingURL=authorizeRequestOnClaimChallenge.js.map /***/ }), /***/ 97454: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.authorizeRequestOnTenantChallenge = void 0; /** * A set of constants used internally when processing requests. */ const Constants = { DefaultScope: "/.default", /** * Defines constants for use with HTTP headers. */ HeaderConstants: { /** * The Authorization header. */ AUTHORIZATION: "authorization", }, }; function isUuid(text) { return /^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$/.test(text); } /** * Defines a callback to handle auth challenge for Storage APIs. * This implements the bearer challenge process described here: https://docs.microsoft.com/rest/api/storageservices/authorize-with-azure-active-directory#bearer-challenge * Handling has specific features for storage that departs to the general AAD challenge docs. **/ const authorizeRequestOnTenantChallenge = async (challengeOptions) => { const requestOptions = requestToOptions(challengeOptions.request); const challenge = getChallenge(challengeOptions.response); if (challenge) { const challengeInfo = parseChallenge(challenge); const challengeScopes = buildScopes(challengeOptions, challengeInfo); const tenantId = extractTenantId(challengeInfo); if (!tenantId) { return false; } const accessToken = await challengeOptions.getAccessToken(challengeScopes, Object.assign(Object.assign({}, requestOptions), { tenantId })); if (!accessToken) { return false; } challengeOptions.request.headers.set(Constants.HeaderConstants.AUTHORIZATION, `Bearer ${accessToken.token}`); return true; } return false; }; exports.authorizeRequestOnTenantChallenge = authorizeRequestOnTenantChallenge; /** * Extracts the tenant id from the challenge information * The tenant id is contained in the authorization_uri as the first * path part. */ function extractTenantId(challengeInfo) { const parsedAuthUri = new URL(challengeInfo.authorization_uri); const pathSegments = parsedAuthUri.pathname.split("/"); const tenantId = pathSegments[1]; if (tenantId && isUuid(tenantId)) { return tenantId; } return undefined; } /** * Builds the authentication scopes based on the information that comes in the * challenge information. Scopes url is present in the resource_id, if it is empty * we keep using the original scopes. */ function buildScopes(challengeOptions, challengeInfo) { if (!challengeInfo.resource_id) { return challengeOptions.scopes; } const challengeScopes = new URL(challengeInfo.resource_id); challengeScopes.pathname = Constants.DefaultScope; let scope = challengeScopes.toString(); if (scope === "https://disk.azure.com/.default") { // the extra slash is required by the service scope = "https://disk.azure.com//.default"; } return [scope]; } /** * We will retrieve the challenge only if the response status code was 401, * and if the response contained the header "WWW-Authenticate" with a non-empty value. */ function getChallenge(response) { const challenge = response.headers.get("WWW-Authenticate"); if (response.status === 401 && challenge) { return challenge; } return; } /** * Converts: `Bearer a="b" c="d"`. * Into: `[ { a: 'b', c: 'd' }]`. * * @internal */ function parseChallenge(challenge) { const bearerChallenge = challenge.slice("Bearer ".length); const challengeParts = `${bearerChallenge.trim()} `.split(" ").filter((x) => x); const keyValuePairs = challengeParts.map((keyValue) => (([key, value]) => ({ [key]: value }))(keyValue.trim().split("="))); // Key-value pairs to plain object: return keyValuePairs.reduce((a, b) => (Object.assign(Object.assign({}, a), b)), {}); } /** * Extracts the options form a Pipeline Request for later re-use */ function requestToOptions(request) { return { abortSignal: request.abortSignal, requestOptions: { timeout: request.timeout, }, tracingOptions: request.tracingOptions, }; } //# sourceMappingURL=authorizeRequestOnTenantChallenge.js.map /***/ }), /***/ 20741: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.decodeStringToString = exports.decodeString = exports.encodeByteArray = exports.encodeString = void 0; /** * Encodes a string in base64 format. * @param value - the string to encode * @internal */ function encodeString(value) { return Buffer.from(value).toString("base64"); } exports.encodeString = encodeString; /** * Encodes a byte array in base64 format. * @param value - the Uint8Aray to encode * @internal */ function encodeByteArray(value) { const bufferValue = value instanceof Buffer ? value : Buffer.from(value.buffer); return bufferValue.toString("base64"); } exports.encodeByteArray = encodeByteArray; /** * Decodes a base64 string into a byte array. * @param value - the base64 string to decode * @internal */ function decodeString(value) { return Buffer.from(value, "base64"); } exports.decodeString = decodeString; /** * Decodes a base64 string into a string. * @param value - the base64 string to decode * @internal */ function decodeStringToString(value) { return Buffer.from(value, "base64").toString(); } exports.decodeStringToString = decodeStringToString; //# sourceMappingURL=base64.js.map /***/ }), /***/ 90111: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.deserializationPolicy = exports.deserializationPolicyName = void 0; const interfaces_js_1 = __nccwpck_require__(56058); const core_rest_pipeline_1 = __nccwpck_require__(20778); const serializer_js_1 = __nccwpck_require__(31530); const operationHelpers_js_1 = __nccwpck_require__(19688); const defaultJsonContentTypes = ["application/json", "text/json"]; const defaultXmlContentTypes = ["application/xml", "application/atom+xml"]; /** * The programmatic identifier of the deserializationPolicy. */ exports.deserializationPolicyName = "deserializationPolicy"; /** * This policy handles parsing out responses according to OperationSpecs on the request. */ function deserializationPolicy(options = {}) { var _a, _b, _c, _d, _e, _f, _g; const jsonContentTypes = (_b = (_a = options.expectedContentTypes) === null || _a === void 0 ? void 0 : _a.json) !== null && _b !== void 0 ? _b : defaultJsonContentTypes; const xmlContentTypes = (_d = (_c = options.expectedContentTypes) === null || _c === void 0 ? void 0 : _c.xml) !== null && _d !== void 0 ? _d : defaultXmlContentTypes; const parseXML = options.parseXML; const serializerOptions = options.serializerOptions; const updatedOptions = { xml: { rootName: (_e = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _e !== void 0 ? _e : "", includeRoot: (_f = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _f !== void 0 ? _f : false, xmlCharKey: (_g = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _g !== void 0 ? _g : interfaces_js_1.XML_CHARKEY, }, }; return { name: exports.deserializationPolicyName, async sendRequest(request, next) { const response = await next(request); return deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, updatedOptions, parseXML); }, }; } exports.deserializationPolicy = deserializationPolicy; function getOperationResponseMap(parsedResponse) { let result; const request = parsedResponse.request; const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; if (operationSpec) { if (!(operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter)) { result = operationSpec.responses[parsedResponse.status]; } else { result = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationResponseGetter(operationSpec, parsedResponse); } } return result; } function shouldDeserializeResponse(parsedResponse) { const request = parsedResponse.request; const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); const shouldDeserialize = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.shouldDeserialize; let result; if (shouldDeserialize === undefined) { result = true; } else if (typeof shouldDeserialize === "boolean") { result = shouldDeserialize; } else { result = shouldDeserialize(parsedResponse); } return result; } async function deserializeResponseBody(jsonContentTypes, xmlContentTypes, response, options, parseXML) { const parsedResponse = await parse(jsonContentTypes, xmlContentTypes, response, options, parseXML); if (!shouldDeserializeResponse(parsedResponse)) { return parsedResponse; } const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(parsedResponse.request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; if (!operationSpec || !operationSpec.responses) { return parsedResponse; } const responseSpec = getOperationResponseMap(parsedResponse); const { error, shouldReturnResponse } = handleErrorResponse(parsedResponse, operationSpec, responseSpec, options); if (error) { throw error; } else if (shouldReturnResponse) { return parsedResponse; } // An operation response spec does exist for current status code, so // use it to deserialize the response. if (responseSpec) { if (responseSpec.bodyMapper) { let valueToDeserialize = parsedResponse.parsedBody; if (operationSpec.isXML && responseSpec.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { valueToDeserialize = typeof valueToDeserialize === "object" ? valueToDeserialize[responseSpec.bodyMapper.xmlElementName] : []; } try { parsedResponse.parsedBody = operationSpec.serializer.deserialize(responseSpec.bodyMapper, valueToDeserialize, "operationRes.parsedBody", options); } catch (deserializeError) { const restError = new core_rest_pipeline_1.RestError(`Error ${deserializeError} occurred in deserializing the responseBody - ${parsedResponse.bodyAsText}`, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse, }); throw restError; } } else if (operationSpec.httpMethod === "HEAD") { // head methods never have a body, but we return a boolean to indicate presence/absence of the resource parsedResponse.parsedBody = response.status >= 200 && response.status < 300; } if (responseSpec.headersMapper) { parsedResponse.parsedHeaders = operationSpec.serializer.deserialize(responseSpec.headersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders", { xml: {}, ignoreUnknownProperties: true }); } } return parsedResponse; } function isOperationSpecEmpty(operationSpec) { const expectedStatusCodes = Object.keys(operationSpec.responses); return (expectedStatusCodes.length === 0 || (expectedStatusCodes.length === 1 && expectedStatusCodes[0] === "default")); } function handleErrorResponse(parsedResponse, operationSpec, responseSpec, options) { var _a; const isSuccessByStatus = 200 <= parsedResponse.status && parsedResponse.status < 300; const isExpectedStatusCode = isOperationSpecEmpty(operationSpec) ? isSuccessByStatus : !!responseSpec; if (isExpectedStatusCode) { if (responseSpec) { if (!responseSpec.isError) { return { error: null, shouldReturnResponse: false }; } } else { return { error: null, shouldReturnResponse: false }; } } const errorResponseSpec = responseSpec !== null && responseSpec !== void 0 ? responseSpec : operationSpec.responses.default; const initialErrorMessage = ((_a = parsedResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(parsedResponse.status)) ? `Unexpected status code: ${parsedResponse.status}` : parsedResponse.bodyAsText; const error = new core_rest_pipeline_1.RestError(initialErrorMessage, { statusCode: parsedResponse.status, request: parsedResponse.request, response: parsedResponse, }); // If the item failed but there's no error spec or default spec to deserialize the error, // we should fail so we just throw the parsed response if (!errorResponseSpec) { throw error; } const defaultBodyMapper = errorResponseSpec.bodyMapper; const defaultHeadersMapper = errorResponseSpec.headersMapper; try { // If error response has a body, try to deserialize it using default body mapper. // Then try to extract error code & message from it if (parsedResponse.parsedBody) { const parsedBody = parsedResponse.parsedBody; let deserializedError; if (defaultBodyMapper) { let valueToDeserialize = parsedBody; if (operationSpec.isXML && defaultBodyMapper.type.name === serializer_js_1.MapperTypeNames.Sequence) { valueToDeserialize = []; const elementName = defaultBodyMapper.xmlElementName; if (typeof parsedBody === "object" && elementName) { valueToDeserialize = parsedBody[elementName]; } } deserializedError = operationSpec.serializer.deserialize(defaultBodyMapper, valueToDeserialize, "error.response.parsedBody", options); } const internalError = parsedBody.error || deserializedError || parsedBody; error.code = internalError.code; if (internalError.message) { error.message = internalError.message; } if (defaultBodyMapper) { error.response.parsedBody = deserializedError; } } // If error response has headers, try to deserialize it using default header mapper if (parsedResponse.headers && defaultHeadersMapper) { error.response.parsedHeaders = operationSpec.serializer.deserialize(defaultHeadersMapper, parsedResponse.headers.toJSON(), "operationRes.parsedHeaders"); } } catch (defaultError) { error.message = `Error "${defaultError.message}" occurred in deserializing the responseBody - "${parsedResponse.bodyAsText}" for the default response.`; } return { error, shouldReturnResponse: false }; } async function parse(jsonContentTypes, xmlContentTypes, operationResponse, opts, parseXML) { var _a; if (!((_a = operationResponse.request.streamResponseStatusCodes) === null || _a === void 0 ? void 0 : _a.has(operationResponse.status)) && operationResponse.bodyAsText) { const text = operationResponse.bodyAsText; const contentType = operationResponse.headers.get("Content-Type") || ""; const contentComponents = !contentType ? [] : contentType.split(";").map((component) => component.toLowerCase()); try { if (contentComponents.length === 0 || contentComponents.some((component) => jsonContentTypes.indexOf(component) !== -1)) { operationResponse.parsedBody = JSON.parse(text); return operationResponse; } else if (contentComponents.some((component) => xmlContentTypes.indexOf(component) !== -1)) { if (!parseXML) { throw new Error("Parsing XML not supported."); } const body = await parseXML(text, opts.xml); operationResponse.parsedBody = body; return operationResponse; } } catch (err) { const msg = `Error "${err}" occurred while parsing the response body - ${operationResponse.bodyAsText}.`; const errCode = err.code || core_rest_pipeline_1.RestError.PARSE_ERROR; const e = new core_rest_pipeline_1.RestError(msg, { code: errCode, statusCode: operationResponse.status, request: operationResponse.request, response: operationResponse, }); throw e; } } return operationResponse; } //# sourceMappingURL=deserializationPolicy.js.map /***/ }), /***/ 26323: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getCachedDefaultHttpClient = void 0; const core_rest_pipeline_1 = __nccwpck_require__(20778); let cachedHttpClient; function getCachedDefaultHttpClient() { if (!cachedHttpClient) { cachedHttpClient = (0, core_rest_pipeline_1.createDefaultHttpClient)(); } return cachedHttpClient; } exports.getCachedDefaultHttpClient = getCachedDefaultHttpClient; //# sourceMappingURL=httpClientCache.js.map /***/ }), /***/ 60160: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.authorizeRequestOnTenantChallenge = exports.authorizeRequestOnClaimChallenge = exports.serializationPolicyName = exports.serializationPolicy = exports.deserializationPolicyName = exports.deserializationPolicy = exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.createClientPipeline = exports.ServiceClient = exports.MapperTypeNames = exports.createSerializer = void 0; var serializer_js_1 = __nccwpck_require__(31530); Object.defineProperty(exports, "createSerializer", ({ enumerable: true, get: function () { return serializer_js_1.createSerializer; } })); Object.defineProperty(exports, "MapperTypeNames", ({ enumerable: true, get: function () { return serializer_js_1.MapperTypeNames; } })); var serviceClient_js_1 = __nccwpck_require__(89544); Object.defineProperty(exports, "ServiceClient", ({ enumerable: true, get: function () { return serviceClient_js_1.ServiceClient; } })); var pipeline_js_1 = __nccwpck_require__(74136); Object.defineProperty(exports, "createClientPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createClientPipeline; } })); var interfaces_js_1 = __nccwpck_require__(56058); Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_ATTRKEY; } })); Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return interfaces_js_1.XML_CHARKEY; } })); var deserializationPolicy_js_1 = __nccwpck_require__(90111); Object.defineProperty(exports, "deserializationPolicy", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicy; } })); Object.defineProperty(exports, "deserializationPolicyName", ({ enumerable: true, get: function () { return deserializationPolicy_js_1.deserializationPolicyName; } })); var serializationPolicy_js_1 = __nccwpck_require__(56234); Object.defineProperty(exports, "serializationPolicy", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicy; } })); Object.defineProperty(exports, "serializationPolicyName", ({ enumerable: true, get: function () { return serializationPolicy_js_1.serializationPolicyName; } })); var authorizeRequestOnClaimChallenge_js_1 = __nccwpck_require__(17698); Object.defineProperty(exports, "authorizeRequestOnClaimChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnClaimChallenge_js_1.authorizeRequestOnClaimChallenge; } })); var authorizeRequestOnTenantChallenge_js_1 = __nccwpck_require__(97454); Object.defineProperty(exports, "authorizeRequestOnTenantChallenge", ({ enumerable: true, get: function () { return authorizeRequestOnTenantChallenge_js_1.authorizeRequestOnTenantChallenge; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 92066: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getPathStringFromParameter = exports.getStreamingResponseStatusCodes = void 0; const serializer_js_1 = __nccwpck_require__(31530); /** * Gets the list of status codes for streaming responses. * @internal */ function getStreamingResponseStatusCodes(operationSpec) { const result = new Set(); for (const statusCode in operationSpec.responses) { const operationResponse = operationSpec.responses[statusCode]; if (operationResponse.bodyMapper && operationResponse.bodyMapper.type.name === serializer_js_1.MapperTypeNames.Stream) { result.add(Number(statusCode)); } } return result; } exports.getStreamingResponseStatusCodes = getStreamingResponseStatusCodes; /** * Get the path to this parameter's value as a dotted string (a.b.c). * @param parameter - The parameter to get the path string for. * @returns The path to this parameter's value as a dotted string. * @internal */ function getPathStringFromParameter(parameter) { const { parameterPath, mapper } = parameter; let result; if (typeof parameterPath === "string") { result = parameterPath; } else if (Array.isArray(parameterPath)) { result = parameterPath.join("."); } else { result = mapper.serializedName; } return result; } exports.getPathStringFromParameter = getPathStringFromParameter; //# sourceMappingURL=interfaceHelpers.js.map /***/ }), /***/ 56058: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; /** * Default key used to access the XML attributes. */ exports.XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ exports.XML_CHARKEY = "_"; //# sourceMappingURL=interfaces.js.map /***/ }), /***/ 89994: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.logger = void 0; const logger_1 = __nccwpck_require__(26515); exports.logger = (0, logger_1.createClientLogger)("core-client"); //# sourceMappingURL=log.js.map /***/ }), /***/ 19688: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getOperationRequestInfo = exports.getOperationArgumentValueFromParameter = void 0; const state_js_1 = __nccwpck_require__(33345); /** * @internal * Retrieves the value to use for a given operation argument * @param operationArguments - The arguments passed from the generated client * @param parameter - The parameter description * @param fallbackObject - If something isn't found in the arguments bag, look here. * Generally used to look at the service client properties. */ function getOperationArgumentValueFromParameter(operationArguments, parameter, fallbackObject) { let parameterPath = parameter.parameterPath; const parameterMapper = parameter.mapper; let value; if (typeof parameterPath === "string") { parameterPath = [parameterPath]; } if (Array.isArray(parameterPath)) { if (parameterPath.length > 0) { if (parameterMapper.isConstant) { value = parameterMapper.defaultValue; } else { let propertySearchResult = getPropertyFromParameterPath(operationArguments, parameterPath); if (!propertySearchResult.propertyFound && fallbackObject) { propertySearchResult = getPropertyFromParameterPath(fallbackObject, parameterPath); } let useDefaultValue = false; if (!propertySearchResult.propertyFound) { useDefaultValue = parameterMapper.required || (parameterPath[0] === "options" && parameterPath.length === 2); } value = useDefaultValue ? parameterMapper.defaultValue : propertySearchResult.propertyValue; } } } else { if (parameterMapper.required) { value = {}; } for (const propertyName in parameterPath) { const propertyMapper = parameterMapper.type.modelProperties[propertyName]; const propertyPath = parameterPath[propertyName]; const propertyValue = getOperationArgumentValueFromParameter(operationArguments, { parameterPath: propertyPath, mapper: propertyMapper, }, fallbackObject); if (propertyValue !== undefined) { if (!value) { value = {}; } value[propertyName] = propertyValue; } } } return value; } exports.getOperationArgumentValueFromParameter = getOperationArgumentValueFromParameter; function getPropertyFromParameterPath(parent, parameterPath) { const result = { propertyFound: false }; let i = 0; for (; i < parameterPath.length; ++i) { const parameterPathPart = parameterPath[i]; // Make sure to check inherited properties too, so don't use hasOwnProperty(). if (parent && parameterPathPart in parent) { parent = parent[parameterPathPart]; } else { break; } } if (i === parameterPath.length) { result.propertyValue = parent; result.propertyFound = true; } return result; } const originalRequestSymbol = Symbol.for("@azure/core-client original request"); function hasOriginalRequest(request) { return originalRequestSymbol in request; } function getOperationRequestInfo(request) { if (hasOriginalRequest(request)) { return getOperationRequestInfo(request[originalRequestSymbol]); } let info = state_js_1.state.operationRequestMap.get(request); if (!info) { info = {}; state_js_1.state.operationRequestMap.set(request, info); } return info; } exports.getOperationRequestInfo = getOperationRequestInfo; //# sourceMappingURL=operationHelpers.js.map /***/ }), /***/ 74136: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createClientPipeline = void 0; const deserializationPolicy_js_1 = __nccwpck_require__(90111); const core_rest_pipeline_1 = __nccwpck_require__(20778); const serializationPolicy_js_1 = __nccwpck_require__(56234); /** * Creates a new Pipeline for use with a Service Client. * Adds in deserializationPolicy by default. * Also adds in bearerTokenAuthenticationPolicy if passed a TokenCredential. * @param options - Options to customize the created pipeline. */ function createClientPipeline(options = {}) { const pipeline = (0, core_rest_pipeline_1.createPipelineFromOptions)(options !== null && options !== void 0 ? options : {}); if (options.credentialOptions) { pipeline.addPolicy((0, core_rest_pipeline_1.bearerTokenAuthenticationPolicy)({ credential: options.credentialOptions.credential, scopes: options.credentialOptions.credentialScopes, })); } pipeline.addPolicy((0, serializationPolicy_js_1.serializationPolicy)(options.serializationOptions), { phase: "Serialize" }); pipeline.addPolicy((0, deserializationPolicy_js_1.deserializationPolicy)(options.deserializationOptions), { phase: "Deserialize", }); return pipeline; } exports.createClientPipeline = createClientPipeline; //# sourceMappingURL=pipeline.js.map /***/ }), /***/ 56234: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.serializeRequestBody = exports.serializeHeaders = exports.serializationPolicy = exports.serializationPolicyName = void 0; const interfaces_js_1 = __nccwpck_require__(56058); const operationHelpers_js_1 = __nccwpck_require__(19688); const serializer_js_1 = __nccwpck_require__(31530); const interfaceHelpers_js_1 = __nccwpck_require__(92066); /** * The programmatic identifier of the serializationPolicy. */ exports.serializationPolicyName = "serializationPolicy"; /** * This policy handles assembling the request body and headers using * an OperationSpec and OperationArguments on the request. */ function serializationPolicy(options = {}) { const stringifyXML = options.stringifyXML; return { name: exports.serializationPolicyName, async sendRequest(request, next) { const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); const operationSpec = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationSpec; const operationArguments = operationInfo === null || operationInfo === void 0 ? void 0 : operationInfo.operationArguments; if (operationSpec && operationArguments) { serializeHeaders(request, operationArguments, operationSpec); serializeRequestBody(request, operationArguments, operationSpec, stringifyXML); } return next(request); }, }; } exports.serializationPolicy = serializationPolicy; /** * @internal */ function serializeHeaders(request, operationArguments, operationSpec) { var _a, _b; if (operationSpec.headerParameters) { for (const headerParameter of operationSpec.headerParameters) { let headerValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, headerParameter); if ((headerValue !== null && headerValue !== undefined) || headerParameter.mapper.required) { headerValue = operationSpec.serializer.serialize(headerParameter.mapper, headerValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter)); const headerCollectionPrefix = headerParameter.mapper .headerCollectionPrefix; if (headerCollectionPrefix) { for (const key of Object.keys(headerValue)) { request.headers.set(headerCollectionPrefix + key, headerValue[key]); } } else { request.headers.set(headerParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(headerParameter), headerValue); } } } } const customHeaders = (_b = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.requestOptions) === null || _b === void 0 ? void 0 : _b.customHeaders; if (customHeaders) { for (const customHeaderName of Object.keys(customHeaders)) { request.headers.set(customHeaderName, customHeaders[customHeaderName]); } } } exports.serializeHeaders = serializeHeaders; /** * @internal */ function serializeRequestBody(request, operationArguments, operationSpec, stringifyXML = function () { throw new Error("XML serialization unsupported!"); }) { var _a, _b, _c, _d, _e; const serializerOptions = (_a = operationArguments.options) === null || _a === void 0 ? void 0 : _a.serializerOptions; const updatedOptions = { xml: { rootName: (_b = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.rootName) !== null && _b !== void 0 ? _b : "", includeRoot: (_c = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.includeRoot) !== null && _c !== void 0 ? _c : false, xmlCharKey: (_d = serializerOptions === null || serializerOptions === void 0 ? void 0 : serializerOptions.xml.xmlCharKey) !== null && _d !== void 0 ? _d : interfaces_js_1.XML_CHARKEY, }, }; const xmlCharKey = updatedOptions.xml.xmlCharKey; if (operationSpec.requestBody && operationSpec.requestBody.mapper) { request.body = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, operationSpec.requestBody); const bodyMapper = operationSpec.requestBody.mapper; const { required, serializedName, xmlName, xmlElementName, xmlNamespace, xmlNamespacePrefix, nullable, } = bodyMapper; const typeName = bodyMapper.type.name; try { if ((request.body !== undefined && request.body !== null) || (nullable && request.body === null) || required) { const requestBodyParameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(operationSpec.requestBody); request.body = operationSpec.serializer.serialize(bodyMapper, request.body, requestBodyParameterPathString, updatedOptions); const isStream = typeName === serializer_js_1.MapperTypeNames.Stream; if (operationSpec.isXML) { const xmlnsKey = xmlNamespacePrefix ? `xmlns:${xmlNamespacePrefix}` : "xmlns"; const value = getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, request.body, updatedOptions); if (typeName === serializer_js_1.MapperTypeNames.Sequence) { request.body = stringifyXML(prepareXMLRootList(value, xmlElementName || xmlName || serializedName, xmlnsKey, xmlNamespace), { rootName: xmlName || serializedName, xmlCharKey }); } else if (!isStream) { request.body = stringifyXML(value, { rootName: xmlName || serializedName, xmlCharKey, }); } } else if (typeName === serializer_js_1.MapperTypeNames.String && (((_e = operationSpec.contentType) === null || _e === void 0 ? void 0 : _e.match("text/plain")) || operationSpec.mediaType === "text")) { // the String serializer has validated that request body is a string // so just send the string. return; } else if (!isStream) { request.body = JSON.stringify(request.body); } } } catch (error) { throw new Error(`Error "${error.message}" occurred in serializing the payload - ${JSON.stringify(serializedName, undefined, " ")}.`); } } else if (operationSpec.formDataParameters && operationSpec.formDataParameters.length > 0) { request.formData = {}; for (const formDataParameter of operationSpec.formDataParameters) { const formDataParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, formDataParameter); if (formDataParameterValue !== undefined && formDataParameterValue !== null) { const formDataParameterPropertyName = formDataParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter); request.formData[formDataParameterPropertyName] = operationSpec.serializer.serialize(formDataParameter.mapper, formDataParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(formDataParameter), updatedOptions); } } } } exports.serializeRequestBody = serializeRequestBody; /** * Adds an xml namespace to the xml serialized object if needed, otherwise it just returns the value itself */ function getXmlValueWithNamespace(xmlNamespace, xmlnsKey, typeName, serializedValue, options) { // Composite and Sequence schemas already got their root namespace set during serialization // We just need to add xmlns to the other schema types if (xmlNamespace && !["Composite", "Sequence", "Dictionary"].includes(typeName)) { const result = {}; result[options.xml.xmlCharKey] = serializedValue; result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: xmlNamespace }; return result; } return serializedValue; } function prepareXMLRootList(obj, elementName, xmlNamespaceKey, xmlNamespace) { if (!Array.isArray(obj)) { obj = [obj]; } if (!xmlNamespaceKey || !xmlNamespace) { return { [elementName]: obj }; } const result = { [elementName]: obj }; result[interfaces_js_1.XML_ATTRKEY] = { [xmlNamespaceKey]: xmlNamespace }; return result; } //# sourceMappingURL=serializationPolicy.js.map /***/ }), /***/ 31530: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.MapperTypeNames = exports.createSerializer = void 0; const tslib_1 = __nccwpck_require__(61860); const base64 = tslib_1.__importStar(__nccwpck_require__(20741)); const interfaces_js_1 = __nccwpck_require__(56058); const utils_js_1 = __nccwpck_require__(31193); class SerializerImpl { constructor(modelMappers = {}, isXML = false) { this.modelMappers = modelMappers; this.isXML = isXML; } /** * @deprecated Removing the constraints validation on client side. */ validateConstraints(mapper, value, objectName) { const failValidation = (constraintName, constraintValue) => { throw new Error(`"${objectName}" with value "${value}" should satisfy the constraint "${constraintName}": ${constraintValue}.`); }; if (mapper.constraints && value !== undefined && value !== null) { const { ExclusiveMaximum, ExclusiveMinimum, InclusiveMaximum, InclusiveMinimum, MaxItems, MaxLength, MinItems, MinLength, MultipleOf, Pattern, UniqueItems, } = mapper.constraints; if (ExclusiveMaximum !== undefined && value >= ExclusiveMaximum) { failValidation("ExclusiveMaximum", ExclusiveMaximum); } if (ExclusiveMinimum !== undefined && value <= ExclusiveMinimum) { failValidation("ExclusiveMinimum", ExclusiveMinimum); } if (InclusiveMaximum !== undefined && value > InclusiveMaximum) { failValidation("InclusiveMaximum", InclusiveMaximum); } if (InclusiveMinimum !== undefined && value < InclusiveMinimum) { failValidation("InclusiveMinimum", InclusiveMinimum); } if (MaxItems !== undefined && value.length > MaxItems) { failValidation("MaxItems", MaxItems); } if (MaxLength !== undefined && value.length > MaxLength) { failValidation("MaxLength", MaxLength); } if (MinItems !== undefined && value.length < MinItems) { failValidation("MinItems", MinItems); } if (MinLength !== undefined && value.length < MinLength) { failValidation("MinLength", MinLength); } if (MultipleOf !== undefined && value % MultipleOf !== 0) { failValidation("MultipleOf", MultipleOf); } if (Pattern) { const pattern = typeof Pattern === "string" ? new RegExp(Pattern) : Pattern; if (typeof value !== "string" || value.match(pattern) === null) { failValidation("Pattern", Pattern); } } if (UniqueItems && value.some((item, i, ar) => ar.indexOf(item) !== i)) { failValidation("UniqueItems", UniqueItems); } } } /** * Serialize the given object based on its metadata defined in the mapper * * @param mapper - The mapper which defines the metadata of the serializable object * * @param object - A valid Javascript object to be serialized * * @param objectName - Name of the serialized object * * @param options - additional options to serialization * * @returns A valid serialized Javascript object */ serialize(mapper, object, objectName, options = { xml: {} }) { var _a, _b, _c; const updatedOptions = { xml: { rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, }, }; let payload = {}; const mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } if (mapperType.match(/^Sequence$/i) !== null) { payload = []; } if (mapper.isConstant) { object = mapper.defaultValue; } // This table of allowed values should help explain // the mapper.required and mapper.nullable properties. // X means "neither undefined or null are allowed". // || required // || true | false // nullable || ========================== // true || null | undefined/null // false || X | undefined // undefined || X | undefined/null const { required, nullable } = mapper; if (required && nullable && object === undefined) { throw new Error(`${objectName} cannot be undefined.`); } if (required && !nullable && (object === undefined || object === null)) { throw new Error(`${objectName} cannot be null or undefined.`); } if (!required && nullable === false && object === null) { throw new Error(`${objectName} cannot be null.`); } if (object === undefined || object === null) { payload = object; } else { if (mapperType.match(/^any$/i) !== null) { payload = object; } else if (mapperType.match(/^(Number|String|Boolean|Object|Stream|Uuid)$/i) !== null) { payload = serializeBasicTypes(mapperType, objectName, object); } else if (mapperType.match(/^Enum$/i) !== null) { const enumMapper = mapper; payload = serializeEnumType(objectName, enumMapper.type.allowedValues, object); } else if (mapperType.match(/^(Date|DateTime|TimeSpan|DateTimeRfc1123|UnixTime)$/i) !== null) { payload = serializeDateTypes(mapperType, object, objectName); } else if (mapperType.match(/^ByteArray$/i) !== null) { payload = serializeByteArrayType(objectName, object); } else if (mapperType.match(/^Base64Url$/i) !== null) { payload = serializeBase64UrlType(objectName, object); } else if (mapperType.match(/^Sequence$/i) !== null) { payload = serializeSequenceType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); } else if (mapperType.match(/^Dictionary$/i) !== null) { payload = serializeDictionaryType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); } else if (mapperType.match(/^Composite$/i) !== null) { payload = serializeCompositeType(this, mapper, object, objectName, Boolean(this.isXML), updatedOptions); } } return payload; } /** * Deserialize the given object based on its metadata defined in the mapper * * @param mapper - The mapper which defines the metadata of the serializable object * * @param responseBody - A valid Javascript entity to be deserialized * * @param objectName - Name of the deserialized object * * @param options - Controls behavior of XML parser and builder. * * @returns A valid deserialized Javascript object */ deserialize(mapper, responseBody, objectName, options = { xml: {} }) { var _a, _b, _c, _d; const updatedOptions = { xml: { rootName: (_a = options.xml.rootName) !== null && _a !== void 0 ? _a : "", includeRoot: (_b = options.xml.includeRoot) !== null && _b !== void 0 ? _b : false, xmlCharKey: (_c = options.xml.xmlCharKey) !== null && _c !== void 0 ? _c : interfaces_js_1.XML_CHARKEY, }, ignoreUnknownProperties: (_d = options.ignoreUnknownProperties) !== null && _d !== void 0 ? _d : false, }; if (responseBody === undefined || responseBody === null) { if (this.isXML && mapper.type.name === "Sequence" && !mapper.xmlIsWrapped) { // Edge case for empty XML non-wrapped lists. xml2js can't distinguish // between the list being empty versus being missing, // so let's do the more user-friendly thing and return an empty list. responseBody = []; } // specifically check for undefined as default value can be a falsey value `0, "", false, null` if (mapper.defaultValue !== undefined) { responseBody = mapper.defaultValue; } return responseBody; } let payload; const mapperType = mapper.type.name; if (!objectName) { objectName = mapper.serializedName; } if (mapperType.match(/^Composite$/i) !== null) { payload = deserializeCompositeType(this, mapper, responseBody, objectName, updatedOptions); } else { if (this.isXML) { const xmlCharKey = updatedOptions.xml.xmlCharKey; /** * If the mapper specifies this as a non-composite type value but the responseBody contains * both header ("$" i.e., XML_ATTRKEY) and body ("#" i.e., XML_CHARKEY) properties, * then just reduce the responseBody value to the body ("#" i.e., XML_CHARKEY) property. */ if (responseBody[interfaces_js_1.XML_ATTRKEY] !== undefined && responseBody[xmlCharKey] !== undefined) { responseBody = responseBody[xmlCharKey]; } } if (mapperType.match(/^Number$/i) !== null) { payload = parseFloat(responseBody); if (isNaN(payload)) { payload = responseBody; } } else if (mapperType.match(/^Boolean$/i) !== null) { if (responseBody === "true") { payload = true; } else if (responseBody === "false") { payload = false; } else { payload = responseBody; } } else if (mapperType.match(/^(String|Enum|Object|Stream|Uuid|TimeSpan|any)$/i) !== null) { payload = responseBody; } else if (mapperType.match(/^(Date|DateTime|DateTimeRfc1123)$/i) !== null) { payload = new Date(responseBody); } else if (mapperType.match(/^UnixTime$/i) !== null) { payload = unixTimeToDate(responseBody); } else if (mapperType.match(/^ByteArray$/i) !== null) { payload = base64.decodeString(responseBody); } else if (mapperType.match(/^Base64Url$/i) !== null) { payload = base64UrlToByteArray(responseBody); } else if (mapperType.match(/^Sequence$/i) !== null) { payload = deserializeSequenceType(this, mapper, responseBody, objectName, updatedOptions); } else if (mapperType.match(/^Dictionary$/i) !== null) { payload = deserializeDictionaryType(this, mapper, responseBody, objectName, updatedOptions); } } if (mapper.isConstant) { payload = mapper.defaultValue; } return payload; } } /** * Method that creates and returns a Serializer. * @param modelMappers - Known models to map * @param isXML - If XML should be supported */ function createSerializer(modelMappers = {}, isXML = false) { return new SerializerImpl(modelMappers, isXML); } exports.createSerializer = createSerializer; function trimEnd(str, ch) { let len = str.length; while (len - 1 >= 0 && str[len - 1] === ch) { --len; } return str.substr(0, len); } function bufferToBase64Url(buffer) { if (!buffer) { return undefined; } if (!(buffer instanceof Uint8Array)) { throw new Error(`Please provide an input of type Uint8Array for converting to Base64Url.`); } // Uint8Array to Base64. const str = base64.encodeByteArray(buffer); // Base64 to Base64Url. return trimEnd(str, "=").replace(/\+/g, "-").replace(/\//g, "_"); } function base64UrlToByteArray(str) { if (!str) { return undefined; } if (str && typeof str.valueOf() !== "string") { throw new Error("Please provide an input of type string for converting to Uint8Array"); } // Base64Url to Base64. str = str.replace(/-/g, "+").replace(/_/g, "/"); // Base64 to Uint8Array. return base64.decodeString(str); } function splitSerializeName(prop) { const classes = []; let partialclass = ""; if (prop) { const subwords = prop.split("."); for (const item of subwords) { if (item.charAt(item.length - 1) === "\\") { partialclass += item.substr(0, item.length - 1) + "."; } else { partialclass += item; classes.push(partialclass); partialclass = ""; } } } return classes; } function dateToUnixTime(d) { if (!d) { return undefined; } if (typeof d.valueOf() === "string") { d = new Date(d); } return Math.floor(d.getTime() / 1000); } function unixTimeToDate(n) { if (!n) { return undefined; } return new Date(n * 1000); } function serializeBasicTypes(typeName, objectName, value) { if (value !== null && value !== undefined) { if (typeName.match(/^Number$/i) !== null) { if (typeof value !== "number") { throw new Error(`${objectName} with value ${value} must be of type number.`); } } else if (typeName.match(/^String$/i) !== null) { if (typeof value.valueOf() !== "string") { throw new Error(`${objectName} with value "${value}" must be of type string.`); } } else if (typeName.match(/^Uuid$/i) !== null) { if (!(typeof value.valueOf() === "string" && (0, utils_js_1.isValidUuid)(value))) { throw new Error(`${objectName} with value "${value}" must be of type string and a valid uuid.`); } } else if (typeName.match(/^Boolean$/i) !== null) { if (typeof value !== "boolean") { throw new Error(`${objectName} with value ${value} must be of type boolean.`); } } else if (typeName.match(/^Stream$/i) !== null) { const objectType = typeof value; if (objectType !== "string" && typeof value.pipe !== "function" && // NodeJS.ReadableStream typeof value.tee !== "function" && // browser ReadableStream !(value instanceof ArrayBuffer) && !ArrayBuffer.isView(value) && // File objects count as a type of Blob, so we want to use instanceof explicitly !((typeof Blob === "function" || typeof Blob === "object") && value instanceof Blob) && objectType !== "function") { throw new Error(`${objectName} must be a string, Blob, ArrayBuffer, ArrayBufferView, ReadableStream, or () => ReadableStream.`); } } } return value; } function serializeEnumType(objectName, allowedValues, value) { if (!allowedValues) { throw new Error(`Please provide a set of allowedValues to validate ${objectName} as an Enum Type.`); } const isPresent = allowedValues.some((item) => { if (typeof item.valueOf() === "string") { return item.toLowerCase() === value.toLowerCase(); } return item === value; }); if (!isPresent) { throw new Error(`${value} is not a valid value for ${objectName}. The valid values are: ${JSON.stringify(allowedValues)}.`); } return value; } function serializeByteArrayType(objectName, value) { if (value !== undefined && value !== null) { if (!(value instanceof Uint8Array)) { throw new Error(`${objectName} must be of type Uint8Array.`); } value = base64.encodeByteArray(value); } return value; } function serializeBase64UrlType(objectName, value) { if (value !== undefined && value !== null) { if (!(value instanceof Uint8Array)) { throw new Error(`${objectName} must be of type Uint8Array.`); } value = bufferToBase64Url(value); } return value; } function serializeDateTypes(typeName, value, objectName) { if (value !== undefined && value !== null) { if (typeName.match(/^Date$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); } value = value instanceof Date ? value.toISOString().substring(0, 10) : new Date(value).toISOString().substring(0, 10); } else if (typeName.match(/^DateTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { throw new Error(`${objectName} must be an instanceof Date or a string in ISO8601 format.`); } value = value instanceof Date ? value.toISOString() : new Date(value).toISOString(); } else if (typeName.match(/^DateTimeRfc1123$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123 format.`); } value = value instanceof Date ? value.toUTCString() : new Date(value).toUTCString(); } else if (typeName.match(/^UnixTime$/i) !== null) { if (!(value instanceof Date || (typeof value.valueOf() === "string" && !isNaN(Date.parse(value))))) { throw new Error(`${objectName} must be an instanceof Date or a string in RFC-1123/ISO8601 format ` + `for it to be serialized in UnixTime/Epoch format.`); } value = dateToUnixTime(value); } else if (typeName.match(/^TimeSpan$/i) !== null) { if (!(0, utils_js_1.isDuration)(value)) { throw new Error(`${objectName} must be a string in ISO 8601 format. Instead was "${value}".`); } } } return value; } function serializeSequenceType(serializer, mapper, object, objectName, isXml, options) { var _a; if (!Array.isArray(object)) { throw new Error(`${objectName} must be of type Array.`); } let elementType = mapper.type.element; if (!elementType || typeof elementType !== "object") { throw new Error(`element" metadata for an Array must be defined in the ` + `mapper and it must of type "object" in ${objectName}.`); } // Quirk: Composite mappers referenced by `element` might // not have *all* properties declared (like uberParent), // so let's try to look up the full definition by name. if (elementType.type.name === "Composite" && elementType.type.className) { elementType = (_a = serializer.modelMappers[elementType.type.className]) !== null && _a !== void 0 ? _a : elementType; } const tempArray = []; for (let i = 0; i < object.length; i++) { const serializedValue = serializer.serialize(elementType, object[i], objectName, options); if (isXml && elementType.xmlNamespace) { const xmlnsKey = elementType.xmlNamespacePrefix ? `xmlns:${elementType.xmlNamespacePrefix}` : "xmlns"; if (elementType.type.name === "Composite") { tempArray[i] = Object.assign({}, serializedValue); tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; } else { tempArray[i] = {}; tempArray[i][options.xml.xmlCharKey] = serializedValue; tempArray[i][interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: elementType.xmlNamespace }; } } else { tempArray[i] = serializedValue; } } return tempArray; } function serializeDictionaryType(serializer, mapper, object, objectName, isXml, options) { if (typeof object !== "object") { throw new Error(`${objectName} must be of type object.`); } const valueType = mapper.type.value; if (!valueType || typeof valueType !== "object") { throw new Error(`"value" metadata for a Dictionary must be defined in the ` + `mapper and it must of type "object" in ${objectName}.`); } const tempDictionary = {}; for (const key of Object.keys(object)) { const serializedValue = serializer.serialize(valueType, object[key], objectName, options); // If the element needs an XML namespace we need to add it within the $ property tempDictionary[key] = getXmlObjectValue(valueType, serializedValue, isXml, options); } // Add the namespace to the root element if needed if (isXml && mapper.xmlNamespace) { const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; const result = tempDictionary; result[interfaces_js_1.XML_ATTRKEY] = { [xmlnsKey]: mapper.xmlNamespace }; return result; } return tempDictionary; } /** * Resolves the additionalProperties property from a referenced mapper * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve * @param objectName - name of the object being serialized */ function resolveAdditionalProperties(serializer, mapper, objectName) { const additionalProperties = mapper.type.additionalProperties; if (!additionalProperties && mapper.type.className) { const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); return modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.additionalProperties; } return additionalProperties; } /** * Finds the mapper referenced by className * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve * @param objectName - name of the object being serialized */ function resolveReferencedMapper(serializer, mapper, objectName) { const className = mapper.type.className; if (!className) { throw new Error(`Class name for model "${objectName}" is not provided in the mapper "${JSON.stringify(mapper, undefined, 2)}".`); } return serializer.modelMappers[className]; } /** * Resolves a composite mapper's modelProperties. * @param serializer - the serializer containing the entire set of mappers * @param mapper - the composite mapper to resolve */ function resolveModelProperties(serializer, mapper, objectName) { let modelProps = mapper.type.modelProperties; if (!modelProps) { const modelMapper = resolveReferencedMapper(serializer, mapper, objectName); if (!modelMapper) { throw new Error(`mapper() cannot be null or undefined for model "${mapper.type.className}".`); } modelProps = modelMapper === null || modelMapper === void 0 ? void 0 : modelMapper.type.modelProperties; if (!modelProps) { throw new Error(`modelProperties cannot be null or undefined in the ` + `mapper "${JSON.stringify(modelMapper)}" of type "${mapper.type.className}" for object "${objectName}".`); } } return modelProps; } function serializeCompositeType(serializer, mapper, object, objectName, isXml, options) { if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, object, "clientName"); } if (object !== undefined && object !== null) { const payload = {}; const modelProps = resolveModelProperties(serializer, mapper, objectName); for (const key of Object.keys(modelProps)) { const propertyMapper = modelProps[key]; if (propertyMapper.readOnly) { continue; } let propName; let parentObject = payload; if (serializer.isXML) { if (propertyMapper.xmlIsWrapped) { propName = propertyMapper.xmlName; } else { propName = propertyMapper.xmlElementName || propertyMapper.xmlName; } } else { const paths = splitSerializeName(propertyMapper.serializedName); propName = paths.pop(); for (const pathName of paths) { const childObject = parentObject[pathName]; if ((childObject === undefined || childObject === null) && ((object[key] !== undefined && object[key] !== null) || propertyMapper.defaultValue !== undefined)) { parentObject[pathName] = {}; } parentObject = parentObject[pathName]; } } if (parentObject !== undefined && parentObject !== null) { if (isXml && mapper.xmlNamespace) { const xmlnsKey = mapper.xmlNamespacePrefix ? `xmlns:${mapper.xmlNamespacePrefix}` : "xmlns"; parentObject[interfaces_js_1.XML_ATTRKEY] = Object.assign(Object.assign({}, parentObject[interfaces_js_1.XML_ATTRKEY]), { [xmlnsKey]: mapper.xmlNamespace }); } const propertyObjectName = propertyMapper.serializedName !== "" ? objectName + "." + propertyMapper.serializedName : objectName; let toSerialize = object[key]; const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator && polymorphicDiscriminator.clientName === key && (toSerialize === undefined || toSerialize === null)) { toSerialize = mapper.serializedName; } const serializedValue = serializer.serialize(propertyMapper, toSerialize, propertyObjectName, options); if (serializedValue !== undefined && propName !== undefined && propName !== null) { const value = getXmlObjectValue(propertyMapper, serializedValue, isXml, options); if (isXml && propertyMapper.xmlIsAttribute) { // XML_ATTRKEY, i.e., $ is the key attributes are kept under in xml2js. // This keeps things simple while preventing name collision // with names in user documents. parentObject[interfaces_js_1.XML_ATTRKEY] = parentObject[interfaces_js_1.XML_ATTRKEY] || {}; parentObject[interfaces_js_1.XML_ATTRKEY][propName] = serializedValue; } else if (isXml && propertyMapper.xmlIsWrapped) { parentObject[propName] = { [propertyMapper.xmlElementName]: value }; } else { parentObject[propName] = value; } } } } const additionalPropertiesMapper = resolveAdditionalProperties(serializer, mapper, objectName); if (additionalPropertiesMapper) { const propNames = Object.keys(modelProps); for (const clientPropName in object) { const isAdditionalProperty = propNames.every((pn) => pn !== clientPropName); if (isAdditionalProperty) { payload[clientPropName] = serializer.serialize(additionalPropertiesMapper, object[clientPropName], objectName + '["' + clientPropName + '"]', options); } } } return payload; } return object; } function getXmlObjectValue(propertyMapper, serializedValue, isXml, options) { if (!isXml || !propertyMapper.xmlNamespace) { return serializedValue; } const xmlnsKey = propertyMapper.xmlNamespacePrefix ? `xmlns:${propertyMapper.xmlNamespacePrefix}` : "xmlns"; const xmlNamespace = { [xmlnsKey]: propertyMapper.xmlNamespace }; if (["Composite"].includes(propertyMapper.type.name)) { if (serializedValue[interfaces_js_1.XML_ATTRKEY]) { return serializedValue; } else { const result = Object.assign({}, serializedValue); result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; return result; } } const result = {}; result[options.xml.xmlCharKey] = serializedValue; result[interfaces_js_1.XML_ATTRKEY] = xmlNamespace; return result; } function isSpecialXmlProperty(propertyName, options) { return [interfaces_js_1.XML_ATTRKEY, options.xml.xmlCharKey].includes(propertyName); } function deserializeCompositeType(serializer, mapper, responseBody, objectName, options) { var _a, _b; const xmlCharKey = (_a = options.xml.xmlCharKey) !== null && _a !== void 0 ? _a : interfaces_js_1.XML_CHARKEY; if (getPolymorphicDiscriminatorRecursively(serializer, mapper)) { mapper = getPolymorphicMapper(serializer, mapper, responseBody, "serializedName"); } const modelProps = resolveModelProperties(serializer, mapper, objectName); let instance = {}; const handledPropertyNames = []; for (const key of Object.keys(modelProps)) { const propertyMapper = modelProps[key]; const paths = splitSerializeName(modelProps[key].serializedName); handledPropertyNames.push(paths[0]); const { serializedName, xmlName, xmlElementName } = propertyMapper; let propertyObjectName = objectName; if (serializedName !== "" && serializedName !== undefined) { propertyObjectName = objectName + "." + serializedName; } const headerCollectionPrefix = propertyMapper.headerCollectionPrefix; if (headerCollectionPrefix) { const dictionary = {}; for (const headerKey of Object.keys(responseBody)) { if (headerKey.startsWith(headerCollectionPrefix)) { dictionary[headerKey.substring(headerCollectionPrefix.length)] = serializer.deserialize(propertyMapper.type.value, responseBody[headerKey], propertyObjectName, options); } handledPropertyNames.push(headerKey); } instance[key] = dictionary; } else if (serializer.isXML) { if (propertyMapper.xmlIsAttribute && responseBody[interfaces_js_1.XML_ATTRKEY]) { instance[key] = serializer.deserialize(propertyMapper, responseBody[interfaces_js_1.XML_ATTRKEY][xmlName], propertyObjectName, options); } else if (propertyMapper.xmlIsMsText) { if (responseBody[xmlCharKey] !== undefined) { instance[key] = responseBody[xmlCharKey]; } else if (typeof responseBody === "string") { // The special case where xml parser parses "content" into JSON of // `{ name: "content"}` instead of `{ name: { "_": "content" }}` instance[key] = responseBody; } } else { const propertyName = xmlElementName || xmlName || serializedName; if (propertyMapper.xmlIsWrapped) { /* a list of wrapped by For the xml example below ... ... the responseBody has { Cors: { CorsRule: [{...}, {...}] } } xmlName is "Cors" and xmlElementName is"CorsRule". */ const wrapped = responseBody[xmlName]; const elementList = (_b = wrapped === null || wrapped === void 0 ? void 0 : wrapped[xmlElementName]) !== null && _b !== void 0 ? _b : []; instance[key] = serializer.deserialize(propertyMapper, elementList, propertyObjectName, options); handledPropertyNames.push(xmlName); } else { const property = responseBody[propertyName]; instance[key] = serializer.deserialize(propertyMapper, property, propertyObjectName, options); handledPropertyNames.push(propertyName); } } } else { // deserialize the property if it is present in the provided responseBody instance let propertyInstance; let res = responseBody; // traversing the object step by step. let steps = 0; for (const item of paths) { if (!res) break; steps++; res = res[item]; } // only accept null when reaching the last position of object otherwise it would be undefined if (res === null && steps < paths.length) { res = undefined; } propertyInstance = res; const polymorphicDiscriminator = mapper.type.polymorphicDiscriminator; // checking that the model property name (key)(ex: "fishtype") and the // clientName of the polymorphicDiscriminator {metadata} (ex: "fishtype") // instead of the serializedName of the polymorphicDiscriminator (ex: "fish.type") // is a better approach. The generator is not consistent with escaping '\.' in the // serializedName of the property (ex: "fish\.type") that is marked as polymorphic discriminator // and the serializedName of the metadata polymorphicDiscriminator (ex: "fish.type"). However, // the clientName transformation of the polymorphicDiscriminator (ex: "fishtype") and // the transformation of model property name (ex: "fishtype") is done consistently. // Hence, it is a safer bet to rely on the clientName of the polymorphicDiscriminator. if (polymorphicDiscriminator && key === polymorphicDiscriminator.clientName && (propertyInstance === undefined || propertyInstance === null)) { propertyInstance = mapper.serializedName; } let serializedValue; // paging if (Array.isArray(responseBody[key]) && modelProps[key].serializedName === "") { propertyInstance = responseBody[key]; const arrayInstance = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); // Copy over any properties that have already been added into the instance, where they do // not exist on the newly de-serialized array for (const [k, v] of Object.entries(instance)) { if (!Object.prototype.hasOwnProperty.call(arrayInstance, k)) { arrayInstance[k] = v; } } instance = arrayInstance; } else if (propertyInstance !== undefined || propertyMapper.defaultValue !== undefined) { serializedValue = serializer.deserialize(propertyMapper, propertyInstance, propertyObjectName, options); instance[key] = serializedValue; } } } const additionalPropertiesMapper = mapper.type.additionalProperties; if (additionalPropertiesMapper) { const isAdditionalProperty = (responsePropName) => { for (const clientPropName in modelProps) { const paths = splitSerializeName(modelProps[clientPropName].serializedName); if (paths[0] === responsePropName) { return false; } } return true; }; for (const responsePropName in responseBody) { if (isAdditionalProperty(responsePropName)) { instance[responsePropName] = serializer.deserialize(additionalPropertiesMapper, responseBody[responsePropName], objectName + '["' + responsePropName + '"]', options); } } } else if (responseBody && !options.ignoreUnknownProperties) { for (const key of Object.keys(responseBody)) { if (instance[key] === undefined && !handledPropertyNames.includes(key) && !isSpecialXmlProperty(key, options)) { instance[key] = responseBody[key]; } } } return instance; } function deserializeDictionaryType(serializer, mapper, responseBody, objectName, options) { /* jshint validthis: true */ const value = mapper.type.value; if (!value || typeof value !== "object") { throw new Error(`"value" metadata for a Dictionary must be defined in the ` + `mapper and it must of type "object" in ${objectName}`); } if (responseBody) { const tempDictionary = {}; for (const key of Object.keys(responseBody)) { tempDictionary[key] = serializer.deserialize(value, responseBody[key], objectName, options); } return tempDictionary; } return responseBody; } function deserializeSequenceType(serializer, mapper, responseBody, objectName, options) { var _a; let element = mapper.type.element; if (!element || typeof element !== "object") { throw new Error(`element" metadata for an Array must be defined in the ` + `mapper and it must of type "object" in ${objectName}`); } if (responseBody) { if (!Array.isArray(responseBody)) { // xml2js will interpret a single element array as just the element, so force it to be an array responseBody = [responseBody]; } // Quirk: Composite mappers referenced by `element` might // not have *all* properties declared (like uberParent), // so let's try to look up the full definition by name. if (element.type.name === "Composite" && element.type.className) { element = (_a = serializer.modelMappers[element.type.className]) !== null && _a !== void 0 ? _a : element; } const tempArray = []; for (let i = 0; i < responseBody.length; i++) { tempArray[i] = serializer.deserialize(element, responseBody[i], `${objectName}[${i}]`, options); } return tempArray; } return responseBody; } function getIndexDiscriminator(discriminators, discriminatorValue, typeName) { const typeNamesToCheck = [typeName]; while (typeNamesToCheck.length) { const currentName = typeNamesToCheck.shift(); const indexDiscriminator = discriminatorValue === currentName ? discriminatorValue : currentName + "." + discriminatorValue; if (Object.prototype.hasOwnProperty.call(discriminators, indexDiscriminator)) { return discriminators[indexDiscriminator]; } else { for (const [name, mapper] of Object.entries(discriminators)) { if (name.startsWith(currentName + ".") && mapper.type.uberParent === currentName && mapper.type.className) { typeNamesToCheck.push(mapper.type.className); } } } } return undefined; } function getPolymorphicMapper(serializer, mapper, object, polymorphicPropertyName) { var _a; const polymorphicDiscriminator = getPolymorphicDiscriminatorRecursively(serializer, mapper); if (polymorphicDiscriminator) { let discriminatorName = polymorphicDiscriminator[polymorphicPropertyName]; if (discriminatorName) { // The serializedName might have \\, which we just want to ignore if (polymorphicPropertyName === "serializedName") { discriminatorName = discriminatorName.replace(/\\/gi, ""); } const discriminatorValue = object[discriminatorName]; const typeName = (_a = mapper.type.uberParent) !== null && _a !== void 0 ? _a : mapper.type.className; if (typeof discriminatorValue === "string" && typeName) { const polymorphicMapper = getIndexDiscriminator(serializer.modelMappers.discriminators, discriminatorValue, typeName); if (polymorphicMapper) { mapper = polymorphicMapper; } } } } return mapper; } function getPolymorphicDiscriminatorRecursively(serializer, mapper) { return (mapper.type.polymorphicDiscriminator || getPolymorphicDiscriminatorSafely(serializer, mapper.type.uberParent) || getPolymorphicDiscriminatorSafely(serializer, mapper.type.className)); } function getPolymorphicDiscriminatorSafely(serializer, typeName) { return (typeName && serializer.modelMappers[typeName] && serializer.modelMappers[typeName].type.polymorphicDiscriminator); } /** * Known types of Mappers */ exports.MapperTypeNames = { Base64Url: "Base64Url", Boolean: "Boolean", ByteArray: "ByteArray", Composite: "Composite", Date: "Date", DateTime: "DateTime", DateTimeRfc1123: "DateTimeRfc1123", Dictionary: "Dictionary", Enum: "Enum", Number: "Number", Object: "Object", Sequence: "Sequence", String: "String", Stream: "Stream", TimeSpan: "TimeSpan", UnixTime: "UnixTime", }; //# sourceMappingURL=serializer.js.map /***/ }), /***/ 89544: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ServiceClient = void 0; const core_rest_pipeline_1 = __nccwpck_require__(20778); const pipeline_js_1 = __nccwpck_require__(74136); const utils_js_1 = __nccwpck_require__(31193); const httpClientCache_js_1 = __nccwpck_require__(26323); const operationHelpers_js_1 = __nccwpck_require__(19688); const urlHelpers_js_1 = __nccwpck_require__(61752); const interfaceHelpers_js_1 = __nccwpck_require__(92066); const log_js_1 = __nccwpck_require__(89994); /** * Initializes a new instance of the ServiceClient. */ class ServiceClient { /** * The ServiceClient constructor * @param credential - The credentials used for authentication with the service. * @param options - The service client options that govern the behavior of the client. */ constructor(options = {}) { var _a, _b; this._requestContentType = options.requestContentType; this._endpoint = (_a = options.endpoint) !== null && _a !== void 0 ? _a : options.baseUri; if (options.baseUri) { log_js_1.logger.warning("The baseUri option for SDK Clients has been deprecated, please use endpoint instead."); } this._allowInsecureConnection = options.allowInsecureConnection; this._httpClient = options.httpClient || (0, httpClientCache_js_1.getCachedDefaultHttpClient)(); this.pipeline = options.pipeline || createDefaultPipeline(options); if ((_b = options.additionalPolicies) === null || _b === void 0 ? void 0 : _b.length) { for (const { policy, position } of options.additionalPolicies) { // Sign happens after Retry and is commonly needed to occur // before policies that intercept post-retry. const afterPhase = position === "perRetry" ? "Sign" : undefined; this.pipeline.addPolicy(policy, { afterPhase, }); } } } /** * Send the provided httpRequest. */ async sendRequest(request) { return this.pipeline.sendRequest(this._httpClient, request); } /** * Send an HTTP request that is populated using the provided OperationSpec. * @typeParam T - The typed result of the request, based on the OperationSpec. * @param operationArguments - The arguments that the HTTP request's templated values will be populated from. * @param operationSpec - The OperationSpec to use to populate the httpRequest. */ async sendOperationRequest(operationArguments, operationSpec) { const endpoint = operationSpec.baseUrl || this._endpoint; if (!endpoint) { throw new Error("If operationSpec.baseUrl is not specified, then the ServiceClient must have a endpoint string property that contains the base URL to use."); } // Templatized URLs sometimes reference properties on the ServiceClient child class, // so we have to pass `this` below in order to search these properties if they're // not part of OperationArguments const url = (0, urlHelpers_js_1.getRequestUrl)(endpoint, operationSpec, operationArguments, this); const request = (0, core_rest_pipeline_1.createPipelineRequest)({ url, }); request.method = operationSpec.httpMethod; const operationInfo = (0, operationHelpers_js_1.getOperationRequestInfo)(request); operationInfo.operationSpec = operationSpec; operationInfo.operationArguments = operationArguments; const contentType = operationSpec.contentType || this._requestContentType; if (contentType && operationSpec.requestBody) { request.headers.set("Content-Type", contentType); } const options = operationArguments.options; if (options) { const requestOptions = options.requestOptions; if (requestOptions) { if (requestOptions.timeout) { request.timeout = requestOptions.timeout; } if (requestOptions.onUploadProgress) { request.onUploadProgress = requestOptions.onUploadProgress; } if (requestOptions.onDownloadProgress) { request.onDownloadProgress = requestOptions.onDownloadProgress; } if (requestOptions.shouldDeserialize !== undefined) { operationInfo.shouldDeserialize = requestOptions.shouldDeserialize; } if (requestOptions.allowInsecureConnection) { request.allowInsecureConnection = true; } } if (options.abortSignal) { request.abortSignal = options.abortSignal; } if (options.tracingOptions) { request.tracingOptions = options.tracingOptions; } } if (this._allowInsecureConnection) { request.allowInsecureConnection = true; } if (request.streamResponseStatusCodes === undefined) { request.streamResponseStatusCodes = (0, interfaceHelpers_js_1.getStreamingResponseStatusCodes)(operationSpec); } try { const rawResponse = await this.sendRequest(request); const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[rawResponse.status]); if (options === null || options === void 0 ? void 0 : options.onResponse) { options.onResponse(rawResponse, flatResponse); } return flatResponse; } catch (error) { if (typeof error === "object" && (error === null || error === void 0 ? void 0 : error.response)) { const rawResponse = error.response; const flatResponse = (0, utils_js_1.flattenResponse)(rawResponse, operationSpec.responses[error.statusCode] || operationSpec.responses["default"]); error.details = flatResponse; if (options === null || options === void 0 ? void 0 : options.onResponse) { options.onResponse(rawResponse, flatResponse, error); } } throw error; } } } exports.ServiceClient = ServiceClient; function createDefaultPipeline(options) { const credentialScopes = getCredentialScopes(options); const credentialOptions = options.credential && credentialScopes ? { credentialScopes, credential: options.credential } : undefined; return (0, pipeline_js_1.createClientPipeline)(Object.assign(Object.assign({}, options), { credentialOptions })); } function getCredentialScopes(options) { if (options.credentialScopes) { return options.credentialScopes; } if (options.endpoint) { return `${options.endpoint}/.default`; } if (options.baseUri) { return `${options.baseUri}/.default`; } if (options.credential && !options.credentialScopes) { throw new Error(`When using credentials, the ServiceClientOptions must contain either a endpoint or a credentialScopes. Unable to create a bearerTokenAuthenticationPolicy`); } return undefined; } //# sourceMappingURL=serviceClient.js.map /***/ }), /***/ 33345: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.state = void 0; /** * Holds the singleton operationRequestMap, to be shared across CJS and ESM imports. */ exports.state = { operationRequestMap: new WeakMap(), }; //# sourceMappingURL=state-cjs.cjs.map /***/ }), /***/ 61752: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.appendQueryParams = exports.getRequestUrl = void 0; const operationHelpers_js_1 = __nccwpck_require__(19688); const interfaceHelpers_js_1 = __nccwpck_require__(92066); const CollectionFormatToDelimiterMap = { CSV: ",", SSV: " ", Multi: "Multi", TSV: "\t", Pipes: "|", }; function getRequestUrl(baseUri, operationSpec, operationArguments, fallbackObject) { const urlReplacements = calculateUrlReplacements(operationSpec, operationArguments, fallbackObject); let isAbsolutePath = false; let requestUrl = replaceAll(baseUri, urlReplacements); if (operationSpec.path) { let path = replaceAll(operationSpec.path, urlReplacements); // QUIRK: sometimes we get a path component like /{nextLink} // which may be a fully formed URL with a leading /. In that case, we should // remove the leading / if (operationSpec.path === "/{nextLink}" && path.startsWith("/")) { path = path.substring(1); } // QUIRK: sometimes we get a path component like {nextLink} // which may be a fully formed URL. In that case, we should // ignore the baseUri. if (isAbsoluteUrl(path)) { requestUrl = path; isAbsolutePath = true; } else { requestUrl = appendPath(requestUrl, path); } } const { queryParams, sequenceParams } = calculateQueryParameters(operationSpec, operationArguments, fallbackObject); /** * Notice that this call sets the `noOverwrite` parameter to true if the `requestUrl` * is an absolute path. This ensures that existing query parameter values in `requestUrl` * do not get overwritten. On the other hand when `requestUrl` is not absolute path, it * is still being built so there is nothing to overwrite. */ requestUrl = appendQueryParams(requestUrl, queryParams, sequenceParams, isAbsolutePath); return requestUrl; } exports.getRequestUrl = getRequestUrl; function replaceAll(input, replacements) { let result = input; for (const [searchValue, replaceValue] of replacements) { result = result.split(searchValue).join(replaceValue); } return result; } function calculateUrlReplacements(operationSpec, operationArguments, fallbackObject) { var _a; const result = new Map(); if ((_a = operationSpec.urlParameters) === null || _a === void 0 ? void 0 : _a.length) { for (const urlParameter of operationSpec.urlParameters) { let urlParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, urlParameter, fallbackObject); const parameterPathString = (0, interfaceHelpers_js_1.getPathStringFromParameter)(urlParameter); urlParameterValue = operationSpec.serializer.serialize(urlParameter.mapper, urlParameterValue, parameterPathString); if (!urlParameter.skipEncoding) { urlParameterValue = encodeURIComponent(urlParameterValue); } result.set(`{${urlParameter.mapper.serializedName || parameterPathString}}`, urlParameterValue); } } return result; } function isAbsoluteUrl(url) { return url.includes("://"); } function appendPath(url, pathToAppend) { if (!pathToAppend) { return url; } const parsedUrl = new URL(url); let newPath = parsedUrl.pathname; if (!newPath.endsWith("/")) { newPath = `${newPath}/`; } if (pathToAppend.startsWith("/")) { pathToAppend = pathToAppend.substring(1); } const searchStart = pathToAppend.indexOf("?"); if (searchStart !== -1) { const path = pathToAppend.substring(0, searchStart); const search = pathToAppend.substring(searchStart + 1); newPath = newPath + path; if (search) { parsedUrl.search = parsedUrl.search ? `${parsedUrl.search}&${search}` : search; } } else { newPath = newPath + pathToAppend; } parsedUrl.pathname = newPath; return parsedUrl.toString(); } function calculateQueryParameters(operationSpec, operationArguments, fallbackObject) { var _a; const result = new Map(); const sequenceParams = new Set(); if ((_a = operationSpec.queryParameters) === null || _a === void 0 ? void 0 : _a.length) { for (const queryParameter of operationSpec.queryParameters) { if (queryParameter.mapper.type.name === "Sequence" && queryParameter.mapper.serializedName) { sequenceParams.add(queryParameter.mapper.serializedName); } let queryParameterValue = (0, operationHelpers_js_1.getOperationArgumentValueFromParameter)(operationArguments, queryParameter, fallbackObject); if ((queryParameterValue !== undefined && queryParameterValue !== null) || queryParameter.mapper.required) { queryParameterValue = operationSpec.serializer.serialize(queryParameter.mapper, queryParameterValue, (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter)); const delimiter = queryParameter.collectionFormat ? CollectionFormatToDelimiterMap[queryParameter.collectionFormat] : ""; if (Array.isArray(queryParameterValue)) { // replace null and undefined queryParameterValue = queryParameterValue.map((item) => { if (item === null || item === undefined) { return ""; } return item; }); } if (queryParameter.collectionFormat === "Multi" && queryParameterValue.length === 0) { continue; } else if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "SSV" || queryParameter.collectionFormat === "TSV")) { queryParameterValue = queryParameterValue.join(delimiter); } if (!queryParameter.skipEncoding) { if (Array.isArray(queryParameterValue)) { queryParameterValue = queryParameterValue.map((item) => { return encodeURIComponent(item); }); } else { queryParameterValue = encodeURIComponent(queryParameterValue); } } // Join pipes and CSV *after* encoding, or the server will be upset. if (Array.isArray(queryParameterValue) && (queryParameter.collectionFormat === "CSV" || queryParameter.collectionFormat === "Pipes")) { queryParameterValue = queryParameterValue.join(delimiter); } result.set(queryParameter.mapper.serializedName || (0, interfaceHelpers_js_1.getPathStringFromParameter)(queryParameter), queryParameterValue); } } } return { queryParams: result, sequenceParams, }; } function simpleParseQueryParams(queryString) { const result = new Map(); if (!queryString || queryString[0] !== "?") { return result; } // remove the leading ? queryString = queryString.slice(1); const pairs = queryString.split("&"); for (const pair of pairs) { const [name, value] = pair.split("=", 2); const existingValue = result.get(name); if (existingValue) { if (Array.isArray(existingValue)) { existingValue.push(value); } else { result.set(name, [existingValue, value]); } } else { result.set(name, value); } } return result; } /** @internal */ function appendQueryParams(url, queryParams, sequenceParams, noOverwrite = false) { if (queryParams.size === 0) { return url; } const parsedUrl = new URL(url); // QUIRK: parsedUrl.searchParams will have their name/value pairs decoded, which // can change their meaning to the server, such as in the case of a SAS signature. // To avoid accidentally un-encoding a query param, we parse the key/values ourselves const combinedParams = simpleParseQueryParams(parsedUrl.search); for (const [name, value] of queryParams) { const existingValue = combinedParams.get(name); if (Array.isArray(existingValue)) { if (Array.isArray(value)) { existingValue.push(...value); const valueSet = new Set(existingValue); combinedParams.set(name, Array.from(valueSet)); } else { existingValue.push(value); } } else if (existingValue) { if (Array.isArray(value)) { value.unshift(existingValue); } else if (sequenceParams.has(name)) { combinedParams.set(name, [existingValue, value]); } if (!noOverwrite) { combinedParams.set(name, value); } } else { combinedParams.set(name, value); } } const searchPieces = []; for (const [name, value] of combinedParams) { if (typeof value === "string") { searchPieces.push(`${name}=${value}`); } else if (Array.isArray(value)) { // QUIRK: If we get an array of values, include multiple key/value pairs for (const subValue of value) { searchPieces.push(`${name}=${subValue}`); } } else { searchPieces.push(`${name}=${value}`); } } // QUIRK: we have to set search manually as searchParams will encode comma when it shouldn't. parsedUrl.search = searchPieces.length ? `?${searchPieces.join("&")}` : ""; return parsedUrl.toString(); } exports.appendQueryParams = appendQueryParams; //# sourceMappingURL=urlHelpers.js.map /***/ }), /***/ 31193: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.flattenResponse = exports.isValidUuid = exports.isDuration = exports.isPrimitiveBody = void 0; /** * A type guard for a primitive response body. * @param value - Value to test * * @internal */ function isPrimitiveBody(value, mapperTypeName) { return (mapperTypeName !== "Composite" && mapperTypeName !== "Dictionary" && (typeof value === "string" || typeof value === "number" || typeof value === "boolean" || (mapperTypeName === null || mapperTypeName === void 0 ? void 0 : mapperTypeName.match(/^(Date|DateTime|DateTimeRfc1123|UnixTime|ByteArray|Base64Url)$/i)) !== null || value === undefined || value === null)); } exports.isPrimitiveBody = isPrimitiveBody; const validateISODuration = /^(-|\+)?P(?:([-+]?[0-9,.]*)Y)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)W)?(?:([-+]?[0-9,.]*)D)?(?:T(?:([-+]?[0-9,.]*)H)?(?:([-+]?[0-9,.]*)M)?(?:([-+]?[0-9,.]*)S)?)?$/; /** * Returns true if the given string is in ISO 8601 format. * @param value - The value to be validated for ISO 8601 duration format. * @internal */ function isDuration(value) { return validateISODuration.test(value); } exports.isDuration = isDuration; const validUuidRegex = /^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$/i; /** * Returns true if the provided uuid is valid. * * @param uuid - The uuid that needs to be validated. * * @internal */ function isValidUuid(uuid) { return validUuidRegex.test(uuid); } exports.isValidUuid = isValidUuid; /** * Maps the response as follows: * - wraps the response body if needed (typically if its type is primitive). * - returns null if the combination of the headers and the body is empty. * - otherwise, returns the combination of the headers and the body. * * @param responseObject - a representation of the parsed response * @returns the response that will be returned to the user which can be null and/or wrapped * * @internal */ function handleNullableResponseAndWrappableBody(responseObject) { const combinedHeadersAndBody = Object.assign(Object.assign({}, responseObject.headers), responseObject.body); if (responseObject.hasNullableType && Object.getOwnPropertyNames(combinedHeadersAndBody).length === 0) { return responseObject.shouldWrapBody ? { body: null } : null; } else { return responseObject.shouldWrapBody ? Object.assign(Object.assign({}, responseObject.headers), { body: responseObject.body }) : combinedHeadersAndBody; } } /** * Take a `FullOperationResponse` and turn it into a flat * response object to hand back to the consumer. * @param fullResponse - The processed response from the operation request * @param responseSpec - The response map from the OperationSpec * * @internal */ function flattenResponse(fullResponse, responseSpec) { var _a, _b; const parsedHeaders = fullResponse.parsedHeaders; // head methods never have a body, but we return a boolean set to body property // to indicate presence/absence of the resource if (fullResponse.request.method === "HEAD") { return Object.assign(Object.assign({}, parsedHeaders), { body: fullResponse.parsedBody }); } const bodyMapper = responseSpec && responseSpec.bodyMapper; const isNullable = Boolean(bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.nullable); const expectedBodyTypeName = bodyMapper === null || bodyMapper === void 0 ? void 0 : bodyMapper.type.name; /** If the body is asked for, we look at the expected body type to handle it */ if (expectedBodyTypeName === "Stream") { return Object.assign(Object.assign({}, parsedHeaders), { blobBody: fullResponse.blobBody, readableStreamBody: fullResponse.readableStreamBody }); } const modelProperties = (expectedBodyTypeName === "Composite" && bodyMapper.type.modelProperties) || {}; const isPageableResponse = Object.keys(modelProperties).some((k) => modelProperties[k].serializedName === ""); if (expectedBodyTypeName === "Sequence" || isPageableResponse) { const arrayResponse = (_a = fullResponse.parsedBody) !== null && _a !== void 0 ? _a : []; for (const key of Object.keys(modelProperties)) { if (modelProperties[key].serializedName) { arrayResponse[key] = (_b = fullResponse.parsedBody) === null || _b === void 0 ? void 0 : _b[key]; } } if (parsedHeaders) { for (const key of Object.keys(parsedHeaders)) { arrayResponse[key] = parsedHeaders[key]; } } return isNullable && !fullResponse.parsedBody && !parsedHeaders && Object.getOwnPropertyNames(modelProperties).length === 0 ? null : arrayResponse; } return handleNullableResponseAndWrappableBody({ body: fullResponse.parsedBody, headers: parsedHeaders, hasNullableType: isNullable, shouldWrapBody: isPrimitiveBody(fullResponse.parsedBody, expectedBodyTypeName), }); } exports.flattenResponse = flattenResponse; //# sourceMappingURL=utils.js.map /***/ }), /***/ 95492: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ExtendedServiceClient = void 0; const disableKeepAlivePolicy_js_1 = __nccwpck_require__(32639); const core_rest_pipeline_1 = __nccwpck_require__(20778); const core_client_1 = __nccwpck_require__(60160); const response_js_1 = __nccwpck_require__(38153); /** * Client to provide compatability between core V1 & V2. */ class ExtendedServiceClient extends core_client_1.ServiceClient { constructor(options) { var _a, _b; super(options); if (((_a = options.keepAliveOptions) === null || _a === void 0 ? void 0 : _a.enable) === false && !(0, disableKeepAlivePolicy_js_1.pipelineContainsDisableKeepAlivePolicy)(this.pipeline)) { this.pipeline.addPolicy((0, disableKeepAlivePolicy_js_1.createDisableKeepAlivePolicy)()); } if (((_b = options.redirectOptions) === null || _b === void 0 ? void 0 : _b.handleRedirects) === false) { this.pipeline.removePolicy({ name: core_rest_pipeline_1.redirectPolicyName, }); } } /** * Compatible send operation request function. * * @param operationArguments - Operation arguments * @param operationSpec - Operation Spec * @returns */ async sendOperationRequest(operationArguments, operationSpec) { var _a; const userProvidedCallBack = (_a = operationArguments === null || operationArguments === void 0 ? void 0 : operationArguments.options) === null || _a === void 0 ? void 0 : _a.onResponse; let lastResponse; function onResponse(rawResponse, flatResponse, error) { lastResponse = rawResponse; if (userProvidedCallBack) { userProvidedCallBack(rawResponse, flatResponse, error); } } operationArguments.options = Object.assign(Object.assign({}, operationArguments.options), { onResponse }); const result = await super.sendOperationRequest(operationArguments, operationSpec); if (lastResponse) { Object.defineProperty(result, "_response", { value: (0, response_js_1.toCompatResponse)(lastResponse), }); } return result; } } exports.ExtendedServiceClient = ExtendedServiceClient; //# sourceMappingURL=extendedClient.js.map /***/ }), /***/ 2078: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.convertHttpClient = void 0; const response_js_1 = __nccwpck_require__(38153); const util_js_1 = __nccwpck_require__(33850); /** * Converts a RequestPolicy based HttpClient to a PipelineRequest based HttpClient. * @param requestPolicyClient - A HttpClient compatible with core-http * @returns A HttpClient compatible with core-rest-pipeline */ function convertHttpClient(requestPolicyClient) { return { sendRequest: async (request) => { const response = await requestPolicyClient.sendRequest((0, util_js_1.toWebResourceLike)(request, { createProxy: true })); return (0, response_js_1.toPipelineResponse)(response); }, }; } exports.convertHttpClient = convertHttpClient; //# sourceMappingURL=httpClientAdapter.js.map /***/ }), /***/ 61584: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toHttpHeadersLike = exports.convertHttpClient = exports.disableKeepAlivePolicyName = exports.HttpPipelineLogLevel = exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.ExtendedServiceClient = void 0; /** * A Shim Library that provides compatibility between Core V1 & V2 Packages. * * @packageDocumentation */ var extendedClient_js_1 = __nccwpck_require__(95492); Object.defineProperty(exports, "ExtendedServiceClient", ({ enumerable: true, get: function () { return extendedClient_js_1.ExtendedServiceClient; } })); var requestPolicyFactoryPolicy_js_1 = __nccwpck_require__(3466); Object.defineProperty(exports, "requestPolicyFactoryPolicyName", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.requestPolicyFactoryPolicyName; } })); Object.defineProperty(exports, "createRequestPolicyFactoryPolicy", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.createRequestPolicyFactoryPolicy; } })); Object.defineProperty(exports, "HttpPipelineLogLevel", ({ enumerable: true, get: function () { return requestPolicyFactoryPolicy_js_1.HttpPipelineLogLevel; } })); var disableKeepAlivePolicy_js_1 = __nccwpck_require__(32639); Object.defineProperty(exports, "disableKeepAlivePolicyName", ({ enumerable: true, get: function () { return disableKeepAlivePolicy_js_1.disableKeepAlivePolicyName; } })); var httpClientAdapter_js_1 = __nccwpck_require__(2078); Object.defineProperty(exports, "convertHttpClient", ({ enumerable: true, get: function () { return httpClientAdapter_js_1.convertHttpClient; } })); var util_js_1 = __nccwpck_require__(33850); Object.defineProperty(exports, "toHttpHeadersLike", ({ enumerable: true, get: function () { return util_js_1.toHttpHeadersLike; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 32639: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.pipelineContainsDisableKeepAlivePolicy = exports.createDisableKeepAlivePolicy = exports.disableKeepAlivePolicyName = void 0; exports.disableKeepAlivePolicyName = "DisableKeepAlivePolicy"; function createDisableKeepAlivePolicy() { return { name: exports.disableKeepAlivePolicyName, async sendRequest(request, next) { request.disableKeepAlive = true; return next(request); }, }; } exports.createDisableKeepAlivePolicy = createDisableKeepAlivePolicy; /** * @internal */ function pipelineContainsDisableKeepAlivePolicy(pipeline) { return pipeline.getOrderedPolicies().some((policy) => policy.name === exports.disableKeepAlivePolicyName); } exports.pipelineContainsDisableKeepAlivePolicy = pipelineContainsDisableKeepAlivePolicy; //# sourceMappingURL=disableKeepAlivePolicy.js.map /***/ }), /***/ 3466: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createRequestPolicyFactoryPolicy = exports.requestPolicyFactoryPolicyName = exports.HttpPipelineLogLevel = void 0; const util_js_1 = __nccwpck_require__(33850); const response_js_1 = __nccwpck_require__(38153); /** * An enum for compatibility with RequestPolicy */ var HttpPipelineLogLevel; (function (HttpPipelineLogLevel) { HttpPipelineLogLevel[HttpPipelineLogLevel["ERROR"] = 1] = "ERROR"; HttpPipelineLogLevel[HttpPipelineLogLevel["INFO"] = 3] = "INFO"; HttpPipelineLogLevel[HttpPipelineLogLevel["OFF"] = 0] = "OFF"; HttpPipelineLogLevel[HttpPipelineLogLevel["WARNING"] = 2] = "WARNING"; })(HttpPipelineLogLevel || (exports.HttpPipelineLogLevel = HttpPipelineLogLevel = {})); const mockRequestPolicyOptions = { log(_logLevel, _message) { /* do nothing */ }, shouldLog(_logLevel) { return false; }, }; /** * The name of the RequestPolicyFactoryPolicy */ exports.requestPolicyFactoryPolicyName = "RequestPolicyFactoryPolicy"; /** * A policy that wraps policies written for core-http. * @param factories - An array of `RequestPolicyFactory` objects from a core-http pipeline */ function createRequestPolicyFactoryPolicy(factories) { const orderedFactories = factories.slice().reverse(); return { name: exports.requestPolicyFactoryPolicyName, async sendRequest(request, next) { let httpPipeline = { async sendRequest(httpRequest) { const response = await next((0, util_js_1.toPipelineRequest)(httpRequest)); return (0, response_js_1.toCompatResponse)(response, { createProxy: true }); }, }; for (const factory of orderedFactories) { httpPipeline = factory.create(httpPipeline, mockRequestPolicyOptions); } const webResourceLike = (0, util_js_1.toWebResourceLike)(request, { createProxy: true }); const response = await httpPipeline.sendRequest(webResourceLike); return (0, response_js_1.toPipelineResponse)(response); }, }; } exports.createRequestPolicyFactoryPolicy = createRequestPolicyFactoryPolicy; //# sourceMappingURL=requestPolicyFactoryPolicy.js.map /***/ }), /***/ 38153: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.toPipelineResponse = exports.toCompatResponse = void 0; const core_rest_pipeline_1 = __nccwpck_require__(20778); const util_js_1 = __nccwpck_require__(33850); const originalResponse = Symbol("Original FullOperationResponse"); /** * A helper to convert response objects from the new pipeline back to the old one. * @param response - A response object from core-client. * @returns A response compatible with `HttpOperationResponse` from core-http. */ function toCompatResponse(response, options) { let request = (0, util_js_1.toWebResourceLike)(response.request); let headers = (0, util_js_1.toHttpHeadersLike)(response.headers); if (options === null || options === void 0 ? void 0 : options.createProxy) { return new Proxy(response, { get(target, prop, receiver) { if (prop === "headers") { return headers; } else if (prop === "request") { return request; } else if (prop === originalResponse) { return response; } return Reflect.get(target, prop, receiver); }, set(target, prop, value, receiver) { if (prop === "headers") { headers = value; } else if (prop === "request") { request = value; } return Reflect.set(target, prop, value, receiver); }, }); } else { return Object.assign(Object.assign({}, response), { request, headers }); } } exports.toCompatResponse = toCompatResponse; /** * A helper to convert back to a PipelineResponse * @param compatResponse - A response compatible with `HttpOperationResponse` from core-http. */ function toPipelineResponse(compatResponse) { const extendedCompatResponse = compatResponse; const response = extendedCompatResponse[originalResponse]; const headers = (0, core_rest_pipeline_1.createHttpHeaders)(compatResponse.headers.toJson({ preserveCase: true })); if (response) { response.headers = headers; return response; } else { return Object.assign(Object.assign({}, compatResponse), { headers, request: (0, util_js_1.toPipelineRequest)(compatResponse.request) }); } } exports.toPipelineResponse = toPipelineResponse; //# sourceMappingURL=response.js.map /***/ }), /***/ 33850: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HttpHeaders = exports.toHttpHeadersLike = exports.toWebResourceLike = exports.toPipelineRequest = void 0; const core_rest_pipeline_1 = __nccwpck_require__(20778); // We use a custom symbol to cache a reference to the original request without // exposing it on the public interface. const originalRequestSymbol = Symbol("Original PipelineRequest"); // Symbol.for() will return the same symbol if it's already been created // This particular one is used in core-client to handle the case of when a request is // cloned but we need to retrieve the OperationSpec and OperationArguments from the // original request. const originalClientRequestSymbol = Symbol.for("@azure/core-client original request"); function toPipelineRequest(webResource, options = {}) { const compatWebResource = webResource; const request = compatWebResource[originalRequestSymbol]; const headers = (0, core_rest_pipeline_1.createHttpHeaders)(webResource.headers.toJson({ preserveCase: true })); if (request) { request.headers = headers; return request; } else { const newRequest = (0, core_rest_pipeline_1.createPipelineRequest)({ url: webResource.url, method: webResource.method, headers, withCredentials: webResource.withCredentials, timeout: webResource.timeout, requestId: webResource.requestId, abortSignal: webResource.abortSignal, body: webResource.body, formData: webResource.formData, disableKeepAlive: !!webResource.keepAlive, onDownloadProgress: webResource.onDownloadProgress, onUploadProgress: webResource.onUploadProgress, proxySettings: webResource.proxySettings, streamResponseStatusCodes: webResource.streamResponseStatusCodes, }); if (options.originalRequest) { newRequest[originalClientRequestSymbol] = options.originalRequest; } return newRequest; } } exports.toPipelineRequest = toPipelineRequest; function toWebResourceLike(request, options) { var _a; const originalRequest = (_a = options === null || options === void 0 ? void 0 : options.originalRequest) !== null && _a !== void 0 ? _a : request; const webResource = { url: request.url, method: request.method, headers: toHttpHeadersLike(request.headers), withCredentials: request.withCredentials, timeout: request.timeout, requestId: request.headers.get("x-ms-client-request-id") || request.requestId, abortSignal: request.abortSignal, body: request.body, formData: request.formData, keepAlive: !!request.disableKeepAlive, onDownloadProgress: request.onDownloadProgress, onUploadProgress: request.onUploadProgress, proxySettings: request.proxySettings, streamResponseStatusCodes: request.streamResponseStatusCodes, clone() { throw new Error("Cannot clone a non-proxied WebResourceLike"); }, prepare() { throw new Error("WebResourceLike.prepare() is not supported by @azure/core-http-compat"); }, validateRequestProperties() { /** do nothing */ }, }; if (options === null || options === void 0 ? void 0 : options.createProxy) { return new Proxy(webResource, { get(target, prop, receiver) { if (prop === originalRequestSymbol) { return request; } else if (prop === "clone") { return () => { return toWebResourceLike(toPipelineRequest(webResource, { originalRequest }), { createProxy: true, originalRequest, }); }; } return Reflect.get(target, prop, receiver); }, set(target, prop, value, receiver) { if (prop === "keepAlive") { request.disableKeepAlive = !value; } const passThroughProps = [ "url", "method", "withCredentials", "timeout", "requestId", "abortSignal", "body", "formData", "onDownloadProgress", "onUploadProgress", "proxySettings", "streamResponseStatusCodes", ]; if (typeof prop === "string" && passThroughProps.includes(prop)) { request[prop] = value; } return Reflect.set(target, prop, value, receiver); }, }); } else { return webResource; } } exports.toWebResourceLike = toWebResourceLike; /** * Converts HttpHeaders from core-rest-pipeline to look like * HttpHeaders from core-http. * @param headers - HttpHeaders from core-rest-pipeline * @returns HttpHeaders as they looked in core-http */ function toHttpHeadersLike(headers) { return new HttpHeaders(headers.toJSON({ preserveCase: true })); } exports.toHttpHeadersLike = toHttpHeadersLike; /** * A collection of HttpHeaders that can be sent with a HTTP request. */ function getHeaderKey(headerName) { return headerName.toLowerCase(); } /** * A collection of HTTP header key/value pairs. */ class HttpHeaders { constructor(rawHeaders) { this._headersMap = {}; if (rawHeaders) { for (const headerName in rawHeaders) { this.set(headerName, rawHeaders[headerName]); } } } /** * Set a header in this collection with the provided name and value. The name is * case-insensitive. * @param headerName - The name of the header to set. This value is case-insensitive. * @param headerValue - The value of the header to set. */ set(headerName, headerValue) { this._headersMap[getHeaderKey(headerName)] = { name: headerName, value: headerValue.toString(), }; } /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param headerName - The name of the header. */ get(headerName) { const header = this._headersMap[getHeaderKey(headerName)]; return !header ? undefined : header.value; } /** * Get whether or not this header collection contains a header entry for the provided header name. */ contains(headerName) { return !!this._headersMap[getHeaderKey(headerName)]; } /** * Remove the header with the provided headerName. Return whether or not the header existed and * was removed. * @param headerName - The name of the header to remove. */ remove(headerName) { const result = this.contains(headerName); delete this._headersMap[getHeaderKey(headerName)]; return result; } /** * Get the headers that are contained this collection as an object. */ rawHeaders() { return this.toJson({ preserveCase: true }); } /** * Get the headers that are contained in this collection as an array. */ headersArray() { const headers = []; for (const headerKey in this._headersMap) { headers.push(this._headersMap[headerKey]); } return headers; } /** * Get the header names that are contained in this collection. */ headerNames() { const headerNames = []; const headers = this.headersArray(); for (let i = 0; i < headers.length; ++i) { headerNames.push(headers[i].name); } return headerNames; } /** * Get the header values that are contained in this collection. */ headerValues() { const headerValues = []; const headers = this.headersArray(); for (let i = 0; i < headers.length; ++i) { headerValues.push(headers[i].value); } return headerValues; } /** * Get the JSON object representation of this HTTP header collection. */ toJson(options = {}) { const result = {}; if (options.preserveCase) { for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; result[header.name] = header.value; } } else { for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; result[getHeaderKey(header.name)] = header.value; } } return result; } /** * Get the string representation of this HTTP header collection. */ toString() { return JSON.stringify(this.toJson({ preserveCase: true })); } /** * Create a deep clone/copy of this HttpHeaders collection. */ clone() { const resultPreservingCasing = {}; for (const headerKey in this._headersMap) { const header = this._headersMap[headerKey]; resultPreservingCasing[header.name] = header.value; } return new HttpHeaders(resultPreservingCasing); } } exports.HttpHeaders = HttpHeaders; //# sourceMappingURL=util.js.map /***/ }), /***/ 93878: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.pollHttpOperation = exports.isOperationError = exports.getResourceLocation = exports.getOperationStatus = exports.getOperationLocation = exports.initHttpOperation = exports.getStatusFromInitialResponse = exports.getErrorFromResponse = exports.parseRetryAfter = exports.inferLroMode = void 0; const operation_js_1 = __nccwpck_require__(30736); const logger_js_1 = __nccwpck_require__(50480); function getOperationLocationPollingUrl(inputs) { const { azureAsyncOperation, operationLocation } = inputs; return operationLocation !== null && operationLocation !== void 0 ? operationLocation : azureAsyncOperation; } function getLocationHeader(rawResponse) { return rawResponse.headers["location"]; } function getOperationLocationHeader(rawResponse) { return rawResponse.headers["operation-location"]; } function getAzureAsyncOperationHeader(rawResponse) { return rawResponse.headers["azure-asyncoperation"]; } function findResourceLocation(inputs) { var _a; const { location, requestMethod, requestPath, resourceLocationConfig } = inputs; switch (requestMethod) { case "PUT": { return requestPath; } case "DELETE": { return undefined; } case "PATCH": { return (_a = getDefault()) !== null && _a !== void 0 ? _a : requestPath; } default: { return getDefault(); } } function getDefault() { switch (resourceLocationConfig) { case "azure-async-operation": { return undefined; } case "original-uri": { return requestPath; } case "location": default: { return location; } } } } function inferLroMode(inputs) { const { rawResponse, requestMethod, requestPath, resourceLocationConfig } = inputs; const operationLocation = getOperationLocationHeader(rawResponse); const azureAsyncOperation = getAzureAsyncOperationHeader(rawResponse); const pollingUrl = getOperationLocationPollingUrl({ operationLocation, azureAsyncOperation }); const location = getLocationHeader(rawResponse); const normalizedRequestMethod = requestMethod === null || requestMethod === void 0 ? void 0 : requestMethod.toLocaleUpperCase(); if (pollingUrl !== undefined) { return { mode: "OperationLocation", operationLocation: pollingUrl, resourceLocation: findResourceLocation({ requestMethod: normalizedRequestMethod, location, requestPath, resourceLocationConfig, }), }; } else if (location !== undefined) { return { mode: "ResourceLocation", operationLocation: location, }; } else if (normalizedRequestMethod === "PUT" && requestPath) { return { mode: "Body", operationLocation: requestPath, }; } else { return undefined; } } exports.inferLroMode = inferLroMode; function transformStatus(inputs) { const { status, statusCode } = inputs; if (typeof status !== "string" && status !== undefined) { throw new Error(`Polling was unsuccessful. Expected status to have a string value or no value but it has instead: ${status}. This doesn't necessarily indicate the operation has failed. Check your Azure subscription or resource status for more information.`); } switch (status === null || status === void 0 ? void 0 : status.toLocaleLowerCase()) { case undefined: return toOperationStatus(statusCode); case "succeeded": return "succeeded"; case "failed": return "failed"; case "running": case "accepted": case "started": case "canceling": case "cancelling": return "running"; case "canceled": case "cancelled": return "canceled"; default: { logger_js_1.logger.verbose(`LRO: unrecognized operation status: ${status}`); return status; } } } function getStatus(rawResponse) { var _a; const { status } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function getProvisioningState(rawResponse) { var _a, _b; const { properties, provisioningState } = (_a = rawResponse.body) !== null && _a !== void 0 ? _a : {}; const status = (_b = properties === null || properties === void 0 ? void 0 : properties.provisioningState) !== null && _b !== void 0 ? _b : provisioningState; return transformStatus({ status, statusCode: rawResponse.statusCode }); } function toOperationStatus(statusCode) { if (statusCode === 202) { return "running"; } else if (statusCode < 300) { return "succeeded"; } else { return "failed"; } } function parseRetryAfter({ rawResponse }) { const retryAfter = rawResponse.headers["retry-after"]; if (retryAfter !== undefined) { // Retry-After header value is either in HTTP date format, or in seconds const retryAfterInSeconds = parseInt(retryAfter); return isNaN(retryAfterInSeconds) ? calculatePollingIntervalFromDate(new Date(retryAfter)) : retryAfterInSeconds * 1000; } return undefined; } exports.parseRetryAfter = parseRetryAfter; function getErrorFromResponse(response) { const error = accessBodyProperty(response, "error"); if (!error) { logger_js_1.logger.warning(`The long-running operation failed but there is no error property in the response's body`); return; } if (!error.code || !error.message) { logger_js_1.logger.warning(`The long-running operation failed but the error property in the response's body doesn't contain code or message`); return; } return error; } exports.getErrorFromResponse = getErrorFromResponse; function calculatePollingIntervalFromDate(retryAfterDate) { const timeNow = Math.floor(new Date().getTime()); const retryAfterTime = retryAfterDate.getTime(); if (timeNow < retryAfterTime) { return retryAfterTime - timeNow; } return undefined; } function getStatusFromInitialResponse(inputs) { const { response, state, operationLocation } = inputs; function helper() { var _a; const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case undefined: return toOperationStatus(response.rawResponse.statusCode); case "Body": return getOperationStatus(response, state); default: return "running"; } } const status = helper(); return status === "running" && operationLocation === undefined ? "succeeded" : status; } exports.getStatusFromInitialResponse = getStatusFromInitialResponse; /** * Initiates the long-running operation. */ async function initHttpOperation(inputs) { const { stateProxy, resourceLocationConfig, processResult, lro, setErrorAsResult } = inputs; return (0, operation_js_1.initOperation)({ init: async () => { const response = await lro.sendInitialRequest(); const config = inferLroMode({ rawResponse: response.rawResponse, requestPath: lro.requestPath, requestMethod: lro.requestMethod, resourceLocationConfig, }); return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, stateProxy, processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, getOperationStatus: getStatusFromInitialResponse, setErrorAsResult, }); } exports.initHttpOperation = initHttpOperation; function getOperationLocation({ rawResponse }, state) { var _a; const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getOperationLocationPollingUrl({ operationLocation: getOperationLocationHeader(rawResponse), azureAsyncOperation: getAzureAsyncOperationHeader(rawResponse), }); } case "ResourceLocation": { return getLocationHeader(rawResponse); } case "Body": default: { return undefined; } } } exports.getOperationLocation = getOperationLocation; function getOperationStatus({ rawResponse }, state) { var _a; const mode = (_a = state.config.metadata) === null || _a === void 0 ? void 0 : _a["mode"]; switch (mode) { case "OperationLocation": { return getStatus(rawResponse); } case "ResourceLocation": { return toOperationStatus(rawResponse.statusCode); } case "Body": { return getProvisioningState(rawResponse); } default: throw new Error(`Internal error: Unexpected operation mode: ${mode}`); } } exports.getOperationStatus = getOperationStatus; function accessBodyProperty({ flatResponse, rawResponse }, prop) { var _a, _b; return (_a = flatResponse === null || flatResponse === void 0 ? void 0 : flatResponse[prop]) !== null && _a !== void 0 ? _a : (_b = rawResponse.body) === null || _b === void 0 ? void 0 : _b[prop]; } function getResourceLocation(res, state) { const loc = accessBodyProperty(res, "resourceLocation"); if (loc && typeof loc === "string") { state.config.resourceLocation = loc; } return state.config.resourceLocation; } exports.getResourceLocation = getResourceLocation; function isOperationError(e) { return e.name === "RestError"; } exports.isOperationError = isOperationError; /** Polls the long-running operation. */ async function pollHttpOperation(inputs) { const { lro, stateProxy, options, processResult, updateState, setDelay, state, setErrorAsResult, } = inputs; return (0, operation_js_1.pollOperation)({ state, stateProxy, setDelay, processResult: processResult ? ({ flatResponse }, inputState) => processResult(flatResponse, inputState) : ({ flatResponse }) => flatResponse, getError: getErrorFromResponse, updateState, getPollingInterval: parseRetryAfter, getOperationLocation, getOperationStatus, isOperationError, getResourceLocation, options, /** * The expansion here is intentional because `lro` could be an object that * references an inner this, so we need to preserve a reference to it. */ poll: async (location, inputOptions) => lro.sendPollRequest(location, inputOptions), setErrorAsResult, }); } exports.pollHttpOperation = pollHttpOperation; //# sourceMappingURL=operation.js.map /***/ }), /***/ 57421: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createHttpPoller = void 0; const operation_js_1 = __nccwpck_require__(93878); const poller_js_1 = __nccwpck_require__(18835); /** * Creates a poller that can be used to poll a long-running operation. * @param lro - Description of the long-running operation * @param options - options to configure the poller * @returns an initialized poller */ async function createHttpPoller(lro, options) { const { resourceLocationConfig, intervalInMs, processResult, restoreFrom, updateState, withOperationLocation, resolveOnUnsuccessful = false, } = options || {}; return (0, poller_js_1.buildCreatePoller)({ getStatusFromInitialResponse: operation_js_1.getStatusFromInitialResponse, getStatusFromPollResponse: operation_js_1.getOperationStatus, isOperationError: operation_js_1.isOperationError, getOperationLocation: operation_js_1.getOperationLocation, getResourceLocation: operation_js_1.getResourceLocation, getPollingInterval: operation_js_1.parseRetryAfter, getError: operation_js_1.getErrorFromResponse, resolveOnUnsuccessful, })({ init: async () => { const response = await lro.sendInitialRequest(); const config = (0, operation_js_1.inferLroMode)({ rawResponse: response.rawResponse, requestPath: lro.requestPath, requestMethod: lro.requestMethod, resourceLocationConfig, }); return Object.assign({ response, operationLocation: config === null || config === void 0 ? void 0 : config.operationLocation, resourceLocation: config === null || config === void 0 ? void 0 : config.resourceLocation }, ((config === null || config === void 0 ? void 0 : config.mode) ? { metadata: { mode: config.mode } } : {})); }, poll: lro.sendPollRequest, }, { intervalInMs, withOperationLocation, restoreFrom, updateState, processResult: processResult ? ({ flatResponse }, state) => processResult(flatResponse, state) : ({ flatResponse }) => flatResponse, }); } exports.createHttpPoller = createHttpPoller; //# sourceMappingURL=poller.js.map /***/ }), /***/ 91754: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createHttpPoller = void 0; const tslib_1 = __nccwpck_require__(61860); var poller_js_1 = __nccwpck_require__(57421); Object.defineProperty(exports, "createHttpPoller", ({ enumerable: true, get: function () { return poller_js_1.createHttpPoller; } })); /** * This can be uncommented to expose the protocol-agnostic poller */ // export { // BuildCreatePollerOptions, // Operation, // CreatePollerOptions, // OperationConfig, // RestorableOperationState, // } from "./poller/models"; // export { buildCreatePoller } from "./poller/poller"; /** legacy */ tslib_1.__exportStar(__nccwpck_require__(55454), exports); tslib_1.__exportStar(__nccwpck_require__(36272), exports); tslib_1.__exportStar(__nccwpck_require__(82670), exports); //# sourceMappingURL=index.js.map /***/ }), /***/ 55454: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LroEngine = void 0; var lroEngine_js_1 = __nccwpck_require__(91607); Object.defineProperty(exports, "LroEngine", ({ enumerable: true, get: function () { return lroEngine_js_1.LroEngine; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 91607: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.LroEngine = void 0; const operation_js_1 = __nccwpck_require__(54267); const constants_js_1 = __nccwpck_require__(55044); const poller_js_1 = __nccwpck_require__(36272); const operation_js_2 = __nccwpck_require__(30736); /** * The LRO Engine, a class that performs polling. */ class LroEngine extends poller_js_1.Poller { constructor(lro, options) { const { intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, resumeFrom, resolveOnUnsuccessful = false, isDone, lroResourceLocationConfig, processResult, updateState, } = options || {}; const state = resumeFrom ? (0, operation_js_2.deserializeState)(resumeFrom) : {}; const operation = new operation_js_1.GenericPollOperation(state, lro, !resolveOnUnsuccessful, lroResourceLocationConfig, processResult, updateState, isDone); super(operation); this.resolveOnUnsuccessful = resolveOnUnsuccessful; this.config = { intervalInMs: intervalInMs }; operation.setPollerConfig(this.config); } /** * The method used by the poller to wait before attempting to update its operation. */ delay() { return new Promise((resolve) => setTimeout(() => resolve(), this.config.intervalInMs)); } } exports.LroEngine = LroEngine; //# sourceMappingURL=lroEngine.js.map /***/ }), /***/ 54267: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.GenericPollOperation = void 0; const operation_js_1 = __nccwpck_require__(93878); const logger_js_1 = __nccwpck_require__(50480); const createStateProxy = () => ({ initState: (config) => ({ config, isStarted: true }), setCanceled: (state) => (state.isCancelled = true), setError: (state, error) => (state.error = error), setResult: (state, result) => (state.result = result), setRunning: (state) => (state.isStarted = true), setSucceeded: (state) => (state.isCompleted = true), setFailed: () => { /** empty body */ }, getError: (state) => state.error, getResult: (state) => state.result, isCanceled: (state) => !!state.isCancelled, isFailed: (state) => !!state.error, isRunning: (state) => !!state.isStarted, isSucceeded: (state) => Boolean(state.isCompleted && !state.isCancelled && !state.error), }); class GenericPollOperation { constructor(state, lro, setErrorAsResult, lroResourceLocationConfig, processResult, updateState, isDone) { this.state = state; this.lro = lro; this.setErrorAsResult = setErrorAsResult; this.lroResourceLocationConfig = lroResourceLocationConfig; this.processResult = processResult; this.updateState = updateState; this.isDone = isDone; } setPollerConfig(pollerConfig) { this.pollerConfig = pollerConfig; } async update(options) { var _a; const stateProxy = createStateProxy(); if (!this.state.isStarted) { this.state = Object.assign(Object.assign({}, this.state), (await (0, operation_js_1.initHttpOperation)({ lro: this.lro, stateProxy, resourceLocationConfig: this.lroResourceLocationConfig, processResult: this.processResult, setErrorAsResult: this.setErrorAsResult, }))); } const updateState = this.updateState; const isDone = this.isDone; if (!this.state.isCompleted && this.state.error === undefined) { await (0, operation_js_1.pollHttpOperation)({ lro: this.lro, state: this.state, stateProxy, processResult: this.processResult, updateState: updateState ? (state, { rawResponse }) => updateState(state, rawResponse) : undefined, isDone: isDone ? ({ flatResponse }, state) => isDone(flatResponse, state) : undefined, options, setDelay: (intervalInMs) => { this.pollerConfig.intervalInMs = intervalInMs; }, setErrorAsResult: this.setErrorAsResult, }); } (_a = options === null || options === void 0 ? void 0 : options.fireProgress) === null || _a === void 0 ? void 0 : _a.call(options, this.state); return this; } async cancel() { logger_js_1.logger.error("`cancelOperation` is deprecated because it wasn't implemented"); return this; } /** * Serializes the Poller operation. */ toString() { return JSON.stringify({ state: this.state, }); } } exports.GenericPollOperation = GenericPollOperation; //# sourceMappingURL=operation.js.map /***/ }), /***/ 82670: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); //# sourceMappingURL=pollOperation.js.map /***/ }), /***/ 36272: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Poller = exports.PollerCancelledError = exports.PollerStoppedError = void 0; /** * When a poller is manually stopped through the `stopPolling` method, * the poller will be rejected with an instance of the PollerStoppedError. */ class PollerStoppedError extends Error { constructor(message) { super(message); this.name = "PollerStoppedError"; Object.setPrototypeOf(this, PollerStoppedError.prototype); } } exports.PollerStoppedError = PollerStoppedError; /** * When the operation is cancelled, the poller will be rejected with an instance * of the PollerCancelledError. */ class PollerCancelledError extends Error { constructor(message) { super(message); this.name = "PollerCancelledError"; Object.setPrototypeOf(this, PollerCancelledError.prototype); } } exports.PollerCancelledError = PollerCancelledError; /** * A class that represents the definition of a program that polls through consecutive requests * until it reaches a state of completion. * * A poller can be executed manually, by polling request by request by calling to the `poll()` method repeatedly, until its operation is completed. * It also provides a way to wait until the operation completes, by calling `pollUntilDone()` and waiting until the operation finishes. * Pollers can also request the cancellation of the ongoing process to whom is providing the underlying long running operation. * * ```ts * const poller = new MyPoller(); * * // Polling just once: * await poller.poll(); * * // We can try to cancel the request here, by calling: * // * // await poller.cancelOperation(); * // * * // Getting the final result: * const result = await poller.pollUntilDone(); * ``` * * The Poller is defined by two types, a type representing the state of the poller, which * must include a basic set of properties from `PollOperationState`, * and a return type defined by `TResult`, which can be anything. * * The Poller class implements the `PollerLike` interface, which allows poller implementations to avoid having * to export the Poller's class directly, and instead only export the already instantiated poller with the PollerLike type. * * ```ts * class Client { * public async makePoller: PollerLike { * const poller = new MyPoller({}); * // It might be preferred to return the poller after the first request is made, * // so that some information can be obtained right away. * await poller.poll(); * return poller; * } * } * * const poller: PollerLike = myClient.makePoller(); * ``` * * A poller can be created through its constructor, then it can be polled until it's completed. * At any point in time, the state of the poller can be obtained without delay through the getOperationState method. * At any point in time, the intermediate forms of the result type can be requested without delay. * Once the underlying operation is marked as completed, the poller will stop and the final value will be returned. * * ```ts * const poller = myClient.makePoller(); * const state: MyOperationState = poller.getOperationState(); * * // The intermediate result can be obtained at any time. * const result: MyResult | undefined = poller.getResult(); * * // The final result can only be obtained after the poller finishes. * const result: MyResult = await poller.pollUntilDone(); * ``` * */ // eslint-disable-next-line no-use-before-define class Poller { /** * A poller needs to be initialized by passing in at least the basic properties of the `PollOperation`. * * When writing an implementation of a Poller, this implementation needs to deal with the initialization * of any custom state beyond the basic definition of the poller. The basic poller assumes that the poller's * operation has already been defined, at least its basic properties. The code below shows how to approach * the definition of the constructor of a new custom poller. * * ```ts * export class MyPoller extends Poller { * constructor({ * // Anything you might need outside of the basics * }) { * let state: MyOperationState = { * privateProperty: private, * publicProperty: public, * }; * * const operation = { * state, * update, * cancel, * toString * } * * // Sending the operation to the parent's constructor. * super(operation); * * // You can assign more local properties here. * } * } * ``` * * Inside of this constructor, a new promise is created. This will be used to * tell the user when the poller finishes (see `pollUntilDone()`). The promise's * resolve and reject methods are also used internally to control when to resolve * or reject anyone waiting for the poller to finish. * * The constructor of a custom implementation of a poller is where any serialized version of * a previous poller's operation should be deserialized into the operation sent to the * base constructor. For example: * * ```ts * export class MyPoller extends Poller { * constructor( * baseOperation: string | undefined * ) { * let state: MyOperationState = {}; * if (baseOperation) { * state = { * ...JSON.parse(baseOperation).state, * ...state * }; * } * const operation = { * state, * // ... * } * super(operation); * } * } * ``` * * @param operation - Must contain the basic properties of `PollOperation`. */ constructor(operation) { /** controls whether to throw an error if the operation failed or was canceled. */ this.resolveOnUnsuccessful = false; this.stopped = true; this.pollProgressCallbacks = []; this.operation = operation; this.promise = new Promise((resolve, reject) => { this.resolve = resolve; this.reject = reject; }); // This prevents the UnhandledPromiseRejectionWarning in node.js from being thrown. // The above warning would get thrown if `poller.poll` is called, it returns an error, // and pullUntilDone did not have a .catch or await try/catch on it's return value. this.promise.catch(() => { /* intentionally blank */ }); } /** * Starts a loop that will break only if the poller is done * or if the poller is stopped. */ async startPolling(pollOptions = {}) { if (this.stopped) { this.stopped = false; } while (!this.isStopped() && !this.isDone()) { await this.poll(pollOptions); await this.delay(); } } /** * pollOnce does one polling, by calling to the update method of the underlying * poll operation to make any relevant change effective. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * @param options - Optional properties passed to the operation's update method. */ async pollOnce(options = {}) { if (!this.isDone()) { this.operation = await this.operation.update({ abortSignal: options.abortSignal, fireProgress: this.fireProgress.bind(this), }); } this.processUpdatedState(); } /** * fireProgress calls the functions passed in via onProgress the method of the poller. * * It loops over all of the callbacks received from onProgress, and executes them, sending them * the current operation state. * * @param state - The current operation state. */ fireProgress(state) { for (const callback of this.pollProgressCallbacks) { callback(state); } } /** * Invokes the underlying operation's cancel method. */ async cancelOnce(options = {}) { this.operation = await this.operation.cancel(options); } /** * Returns a promise that will resolve once a single polling request finishes. * It does this by calling the update method of the Poller's operation. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * @param options - Optional properties passed to the operation's update method. */ poll(options = {}) { if (!this.pollOncePromise) { this.pollOncePromise = this.pollOnce(options); const clearPollOncePromise = () => { this.pollOncePromise = undefined; }; this.pollOncePromise.then(clearPollOncePromise, clearPollOncePromise).catch(this.reject); } return this.pollOncePromise; } processUpdatedState() { if (this.operation.state.error) { this.stopped = true; if (!this.resolveOnUnsuccessful) { this.reject(this.operation.state.error); throw this.operation.state.error; } } if (this.operation.state.isCancelled) { this.stopped = true; if (!this.resolveOnUnsuccessful) { const error = new PollerCancelledError("Operation was canceled"); this.reject(error); throw error; } } if (this.isDone() && this.resolve) { // If the poller has finished polling, this means we now have a result. // However, it can be the case that TResult is instantiated to void, so // we are not expecting a result anyway. To assert that we might not // have a result eventually after finishing polling, we cast the result // to TResult. this.resolve(this.getResult()); } } /** * Returns a promise that will resolve once the underlying operation is completed. */ async pollUntilDone(pollOptions = {}) { if (this.stopped) { this.startPolling(pollOptions).catch(this.reject); } // This is needed because the state could have been updated by // `cancelOperation`, e.g. the operation is canceled or an error occurred. this.processUpdatedState(); return this.promise; } /** * Invokes the provided callback after each polling is completed, * sending the current state of the poller's operation. * * It returns a method that can be used to stop receiving updates on the given callback function. */ onProgress(callback) { this.pollProgressCallbacks.push(callback); return () => { this.pollProgressCallbacks = this.pollProgressCallbacks.filter((c) => c !== callback); }; } /** * Returns true if the poller has finished polling. */ isDone() { const state = this.operation.state; return Boolean(state.isCompleted || state.isCancelled || state.error); } /** * Stops the poller from continuing to poll. */ stopPolling() { if (!this.stopped) { this.stopped = true; if (this.reject) { this.reject(new PollerStoppedError("This poller is already stopped")); } } } /** * Returns true if the poller is stopped. */ isStopped() { return this.stopped; } /** * Attempts to cancel the underlying operation. * * It only optionally receives an object with an abortSignal property, from \@azure/abort-controller's AbortSignalLike. * * If it's called again before it finishes, it will throw an error. * * @param options - Optional properties passed to the operation's update method. */ cancelOperation(options = {}) { if (!this.cancelPromise) { this.cancelPromise = this.cancelOnce(options); } else if (options.abortSignal) { throw new Error("A cancel request is currently pending"); } return this.cancelPromise; } /** * Returns the state of the operation. * * Even though TState will be the same type inside any of the methods of any extension of the Poller class, * implementations of the pollers can customize what's shared with the public by writing their own * version of the `getOperationState` method, and by defining two types, one representing the internal state of the poller * and a public type representing a safe to share subset of the properties of the internal state. * Their definition of getOperationState can then return their public type. * * Example: * * ```ts * // Let's say we have our poller's operation state defined as: * interface MyOperationState extends PollOperationState { * privateProperty?: string; * publicProperty?: string; * } * * // To allow us to have a true separation of public and private state, we have to define another interface: * interface PublicState extends PollOperationState { * publicProperty?: string; * } * * // Then, we define our Poller as follows: * export class MyPoller extends Poller { * // ... More content is needed here ... * * public getOperationState(): PublicState { * const state: PublicState = this.operation.state; * return { * // Properties from PollOperationState * isStarted: state.isStarted, * isCompleted: state.isCompleted, * isCancelled: state.isCancelled, * error: state.error, * result: state.result, * * // The only other property needed by PublicState. * publicProperty: state.publicProperty * } * } * } * ``` * * You can see this in the tests of this repository, go to the file: * `../test/utils/testPoller.ts` * and look for the getOperationState implementation. */ getOperationState() { return this.operation.state; } /** * Returns the result value of the operation, * regardless of the state of the poller. * It can return undefined or an incomplete form of the final TResult value * depending on the implementation. */ getResult() { const state = this.operation.state; return state.result; } /** * Returns a serialized version of the poller's operation * by invoking the operation's toString method. */ toString() { return this.operation.toString(); } } exports.Poller = Poller; //# sourceMappingURL=poller.js.map /***/ }), /***/ 50480: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.logger = void 0; const logger_1 = __nccwpck_require__(26515); /** * The `@azure/logger` configuration for this package. * @internal */ exports.logger = (0, logger_1.createClientLogger)("core-lro"); //# sourceMappingURL=logger.js.map /***/ }), /***/ 55044: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.terminalStates = exports.POLL_INTERVAL_IN_MS = void 0; /** * The default time interval to wait before sending the next polling request. */ exports.POLL_INTERVAL_IN_MS = 2000; /** * The closed set of terminal states. */ exports.terminalStates = ["succeeded", "canceled", "failed"]; //# sourceMappingURL=constants.js.map /***/ }), /***/ 30736: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.pollOperation = exports.initOperation = exports.deserializeState = void 0; const logger_js_1 = __nccwpck_require__(50480); const constants_js_1 = __nccwpck_require__(55044); /** * Deserializes the state */ function deserializeState(serializedState) { try { return JSON.parse(serializedState).state; } catch (e) { throw new Error(`Unable to deserialize input state: ${serializedState}`); } } exports.deserializeState = deserializeState; function setStateError(inputs) { const { state, stateProxy, isOperationError } = inputs; return (error) => { if (isOperationError(error)) { stateProxy.setError(state, error); stateProxy.setFailed(state); } throw error; }; } function appendReadableErrorMessage(currentMessage, innerMessage) { let message = currentMessage; if (message.slice(-1) !== ".") { message = message + "."; } return message + " " + innerMessage; } function simplifyError(err) { let message = err.message; let code = err.code; let curErr = err; while (curErr.innererror) { curErr = curErr.innererror; code = curErr.code; message = appendReadableErrorMessage(message, curErr.message); } return { code, message, }; } function processOperationStatus(result) { const { state, stateProxy, status, isDone, processResult, getError, response, setErrorAsResult } = result; switch (status) { case "succeeded": { stateProxy.setSucceeded(state); break; } case "failed": { const err = getError === null || getError === void 0 ? void 0 : getError(response); let postfix = ""; if (err) { const { code, message } = simplifyError(err); postfix = `. ${code}. ${message}`; } const errStr = `The long-running operation has failed${postfix}`; stateProxy.setError(state, new Error(errStr)); stateProxy.setFailed(state); logger_js_1.logger.warning(errStr); break; } case "canceled": { stateProxy.setCanceled(state); break; } } if ((isDone === null || isDone === void 0 ? void 0 : isDone(response, state)) || (isDone === undefined && ["succeeded", "canceled"].concat(setErrorAsResult ? [] : ["failed"]).includes(status))) { stateProxy.setResult(state, buildResult({ response, state, processResult, })); } } function buildResult(inputs) { const { processResult, response, state } = inputs; return processResult ? processResult(response, state) : response; } /** * Initiates the long-running operation. */ async function initOperation(inputs) { const { init, stateProxy, processResult, getOperationStatus, withOperationLocation, setErrorAsResult, } = inputs; const { operationLocation, resourceLocation, metadata, response } = await init(); if (operationLocation) withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); const config = { metadata, operationLocation, resourceLocation, }; logger_js_1.logger.verbose(`LRO: Operation description:`, config); const state = stateProxy.initState(config); const status = getOperationStatus({ response, state, operationLocation }); processOperationStatus({ state, status, stateProxy, response, setErrorAsResult, processResult }); return state; } exports.initOperation = initOperation; async function pollOperationHelper(inputs) { const { poll, state, stateProxy, operationLocation, getOperationStatus, getResourceLocation, isOperationError, options, } = inputs; const response = await poll(operationLocation, options).catch(setStateError({ state, stateProxy, isOperationError, })); const status = getOperationStatus(response, state); logger_js_1.logger.verbose(`LRO: Status:\n\tPolling from: ${state.config.operationLocation}\n\tOperation status: ${status}\n\tPolling status: ${constants_js_1.terminalStates.includes(status) ? "Stopped" : "Running"}`); if (status === "succeeded") { const resourceLocation = getResourceLocation(response, state); if (resourceLocation !== undefined) { return { response: await poll(resourceLocation).catch(setStateError({ state, stateProxy, isOperationError })), status, }; } } return { response, status }; } /** Polls the long-running operation. */ async function pollOperation(inputs) { const { poll, state, stateProxy, options, getOperationStatus, getResourceLocation, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, processResult, getError, updateState, setDelay, isDone, setErrorAsResult, } = inputs; const { operationLocation } = state.config; if (operationLocation !== undefined) { const { response, status } = await pollOperationHelper({ poll, getOperationStatus, state, stateProxy, operationLocation, getResourceLocation, isOperationError, options, }); processOperationStatus({ status, response, state, stateProxy, isDone, processResult, getError, setErrorAsResult, }); if (!constants_js_1.terminalStates.includes(status)) { const intervalInMs = getPollingInterval === null || getPollingInterval === void 0 ? void 0 : getPollingInterval(response); if (intervalInMs) setDelay(intervalInMs); const location = getOperationLocation === null || getOperationLocation === void 0 ? void 0 : getOperationLocation(response, state); if (location !== undefined) { const isUpdated = operationLocation !== location; state.config.operationLocation = location; withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(location, isUpdated); } else withOperationLocation === null || withOperationLocation === void 0 ? void 0 : withOperationLocation(operationLocation, false); } updateState === null || updateState === void 0 ? void 0 : updateState(state, response); } } exports.pollOperation = pollOperation; //# sourceMappingURL=operation.js.map /***/ }), /***/ 18835: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.buildCreatePoller = void 0; const operation_js_1 = __nccwpck_require__(30736); const constants_js_1 = __nccwpck_require__(55044); const core_util_1 = __nccwpck_require__(87779); const createStateProxy = () => ({ /** * The state at this point is created to be of type OperationState. * It will be updated later to be of type TState when the * customer-provided callback, `updateState`, is called during polling. */ initState: (config) => ({ status: "running", config }), setCanceled: (state) => (state.status = "canceled"), setError: (state, error) => (state.error = error), setResult: (state, result) => (state.result = result), setRunning: (state) => (state.status = "running"), setSucceeded: (state) => (state.status = "succeeded"), setFailed: (state) => (state.status = "failed"), getError: (state) => state.error, getResult: (state) => state.result, isCanceled: (state) => state.status === "canceled", isFailed: (state) => state.status === "failed", isRunning: (state) => state.status === "running", isSucceeded: (state) => state.status === "succeeded", }); /** * Returns a poller factory. */ function buildCreatePoller(inputs) { const { getOperationLocation, getStatusFromInitialResponse, getStatusFromPollResponse, isOperationError, getResourceLocation, getPollingInterval, getError, resolveOnUnsuccessful, } = inputs; return async ({ init, poll }, options) => { const { processResult, updateState, withOperationLocation: withOperationLocationCallback, intervalInMs = constants_js_1.POLL_INTERVAL_IN_MS, restoreFrom, } = options || {}; const stateProxy = createStateProxy(); const withOperationLocation = withOperationLocationCallback ? (() => { let called = false; return (operationLocation, isUpdated) => { if (isUpdated) withOperationLocationCallback(operationLocation); else if (!called) withOperationLocationCallback(operationLocation); called = true; }; })() : undefined; const state = restoreFrom ? (0, operation_js_1.deserializeState)(restoreFrom) : await (0, operation_js_1.initOperation)({ init, stateProxy, processResult, getOperationStatus: getStatusFromInitialResponse, withOperationLocation, setErrorAsResult: !resolveOnUnsuccessful, }); let resultPromise; const abortController = new AbortController(); const handlers = new Map(); const handleProgressEvents = async () => handlers.forEach((h) => h(state)); const cancelErrMsg = "Operation was canceled"; let currentPollIntervalInMs = intervalInMs; const poller = { getOperationState: () => state, getResult: () => state.result, isDone: () => ["succeeded", "failed", "canceled"].includes(state.status), isStopped: () => resultPromise === undefined, stopPolling: () => { abortController.abort(); }, toString: () => JSON.stringify({ state, }), onProgress: (callback) => { const s = Symbol(); handlers.set(s, callback); return () => handlers.delete(s); }, pollUntilDone: (pollOptions) => (resultPromise !== null && resultPromise !== void 0 ? resultPromise : (resultPromise = (async () => { const { abortSignal: inputAbortSignal } = pollOptions || {}; // In the future we can use AbortSignal.any() instead function abortListener() { abortController.abort(); } const abortSignal = abortController.signal; if (inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.aborted) { abortController.abort(); } else if (!abortSignal.aborted) { inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.addEventListener("abort", abortListener, { once: true }); } try { if (!poller.isDone()) { await poller.poll({ abortSignal }); while (!poller.isDone()) { await (0, core_util_1.delay)(currentPollIntervalInMs, { abortSignal }); await poller.poll({ abortSignal }); } } } finally { inputAbortSignal === null || inputAbortSignal === void 0 ? void 0 : inputAbortSignal.removeEventListener("abort", abortListener); } if (resolveOnUnsuccessful) { return poller.getResult(); } else { switch (state.status) { case "succeeded": return poller.getResult(); case "canceled": throw new Error(cancelErrMsg); case "failed": throw state.error; case "notStarted": case "running": throw new Error(`Polling completed without succeeding or failing`); } } })().finally(() => { resultPromise = undefined; }))), async poll(pollOptions) { if (resolveOnUnsuccessful) { if (poller.isDone()) return; } else { switch (state.status) { case "succeeded": return; case "canceled": throw new Error(cancelErrMsg); case "failed": throw state.error; } } await (0, operation_js_1.pollOperation)({ poll, state, stateProxy, getOperationLocation, isOperationError, withOperationLocation, getPollingInterval, getOperationStatus: getStatusFromPollResponse, getResourceLocation, processResult, getError, updateState, options: pollOptions, setDelay: (pollIntervalInMs) => { currentPollIntervalInMs = pollIntervalInMs; }, setErrorAsResult: !resolveOnUnsuccessful, }); await handleProgressEvents(); if (!resolveOnUnsuccessful) { switch (state.status) { case "canceled": throw new Error(cancelErrMsg); case "failed": throw state.error; } } }, }; return poller; }; } exports.buildCreatePoller = buildCreatePoller; //# sourceMappingURL=poller.js.map /***/ }), /***/ 66427: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DEFAULT_RETRY_POLICY_COUNT = exports.SDK_VERSION = void 0; exports.SDK_VERSION = "1.18.1"; exports.DEFAULT_RETRY_POLICY_COUNT = 3; //# sourceMappingURL=constants.js.map /***/ }), /***/ 90862: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createPipelineFromOptions = createPipelineFromOptions; const logPolicy_js_1 = __nccwpck_require__(53253); const pipeline_js_1 = __nccwpck_require__(29590); const redirectPolicy_js_1 = __nccwpck_require__(64087); const userAgentPolicy_js_1 = __nccwpck_require__(32799); const multipartPolicy_js_1 = __nccwpck_require__(45807); const decompressResponsePolicy_js_1 = __nccwpck_require__(39295); const defaultRetryPolicy_js_1 = __nccwpck_require__(48170); const formDataPolicy_js_1 = __nccwpck_require__(75497); const core_util_1 = __nccwpck_require__(87779); const proxyPolicy_js_1 = __nccwpck_require__(32815); const setClientRequestIdPolicy_js_1 = __nccwpck_require__(95686); const tlsPolicy_js_1 = __nccwpck_require__(75798); const tracingPolicy_js_1 = __nccwpck_require__(93237); /** * Create a new pipeline with a default set of customizable policies. * @param options - Options to configure a custom pipeline. */ function createPipelineFromOptions(options) { var _a; const pipeline = (0, pipeline_js_1.createEmptyPipeline)(); if (core_util_1.isNodeLike) { if (options.tlsOptions) { pipeline.addPolicy((0, tlsPolicy_js_1.tlsPolicy)(options.tlsOptions)); } pipeline.addPolicy((0, proxyPolicy_js_1.proxyPolicy)(options.proxyOptions)); pipeline.addPolicy((0, decompressResponsePolicy_js_1.decompressResponsePolicy)()); } pipeline.addPolicy((0, formDataPolicy_js_1.formDataPolicy)(), { beforePolicies: [multipartPolicy_js_1.multipartPolicyName] }); pipeline.addPolicy((0, userAgentPolicy_js_1.userAgentPolicy)(options.userAgentOptions)); pipeline.addPolicy((0, setClientRequestIdPolicy_js_1.setClientRequestIdPolicy)((_a = options.telemetryOptions) === null || _a === void 0 ? void 0 : _a.clientRequestIdHeaderName)); // The multipart policy is added after policies with no phase, so that // policies can be added between it and formDataPolicy to modify // properties (e.g., making the boundary constant in recorded tests). pipeline.addPolicy((0, multipartPolicy_js_1.multipartPolicy)(), { afterPhase: "Deserialize" }); pipeline.addPolicy((0, defaultRetryPolicy_js_1.defaultRetryPolicy)(options.retryOptions), { phase: "Retry" }); pipeline.addPolicy((0, tracingPolicy_js_1.tracingPolicy)(Object.assign(Object.assign({}, options.userAgentOptions), options.loggingOptions)), { afterPhase: "Retry", }); if (core_util_1.isNodeLike) { // Both XHR and Fetch expect to handle redirects automatically, // so only include this policy when we're in Node. pipeline.addPolicy((0, redirectPolicy_js_1.redirectPolicy)(options.redirectOptions), { afterPhase: "Retry" }); } pipeline.addPolicy((0, logPolicy_js_1.logPolicy)(options.loggingOptions), { afterPhase: "Sign" }); return pipeline; } //# sourceMappingURL=createPipelineFromOptions.js.map /***/ }), /***/ 7960: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createDefaultHttpClient = createDefaultHttpClient; const nodeHttpClient_js_1 = __nccwpck_require__(10195); /** * Create the correct HttpClient for the current environment. */ function createDefaultHttpClient() { return (0, nodeHttpClient_js_1.createNodeHttpClient)(); } //# sourceMappingURL=defaultHttpClient.js.map /***/ }), /***/ 192: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createHttpHeaders = createHttpHeaders; function normalizeName(name) { return name.toLowerCase(); } function* headerIterator(map) { for (const entry of map.values()) { yield [entry.name, entry.value]; } } class HttpHeadersImpl { constructor(rawHeaders) { this._headersMap = new Map(); if (rawHeaders) { for (const headerName of Object.keys(rawHeaders)) { this.set(headerName, rawHeaders[headerName]); } } } /** * Set a header in this collection with the provided name and value. The name is * case-insensitive. * @param name - The name of the header to set. This value is case-insensitive. * @param value - The value of the header to set. */ set(name, value) { this._headersMap.set(normalizeName(name), { name, value: String(value).trim() }); } /** * Get the header value for the provided header name, or undefined if no header exists in this * collection with the provided name. * @param name - The name of the header. This value is case-insensitive. */ get(name) { var _a; return (_a = this._headersMap.get(normalizeName(name))) === null || _a === void 0 ? void 0 : _a.value; } /** * Get whether or not this header collection contains a header entry for the provided header name. * @param name - The name of the header to set. This value is case-insensitive. */ has(name) { return this._headersMap.has(normalizeName(name)); } /** * Remove the header with the provided headerName. * @param name - The name of the header to remove. */ delete(name) { this._headersMap.delete(normalizeName(name)); } /** * Get the JSON object representation of this HTTP header collection. */ toJSON(options = {}) { const result = {}; if (options.preserveCase) { for (const entry of this._headersMap.values()) { result[entry.name] = entry.value; } } else { for (const [normalizedName, entry] of this._headersMap) { result[normalizedName] = entry.value; } } return result; } /** * Get the string representation of this HTTP header collection. */ toString() { return JSON.stringify(this.toJSON({ preserveCase: true })); } /** * Iterate over tuples of header [name, value] pairs. */ [Symbol.iterator]() { return headerIterator(this._headersMap); } } /** * Creates an object that satisfies the `HttpHeaders` interface. * @param rawHeaders - A simple object representing initial headers */ function createHttpHeaders(rawHeaders) { return new HttpHeadersImpl(rawHeaders); } //# sourceMappingURL=httpHeaders.js.map /***/ }), /***/ 20778: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createFileFromStream = exports.createFile = exports.auxiliaryAuthenticationHeaderPolicyName = exports.auxiliaryAuthenticationHeaderPolicy = exports.ndJsonPolicyName = exports.ndJsonPolicy = exports.bearerTokenAuthenticationPolicyName = exports.bearerTokenAuthenticationPolicy = exports.formDataPolicyName = exports.formDataPolicy = exports.tlsPolicyName = exports.tlsPolicy = exports.userAgentPolicyName = exports.userAgentPolicy = exports.defaultRetryPolicy = exports.tracingPolicyName = exports.tracingPolicy = exports.retryPolicy = exports.throttlingRetryPolicyName = exports.throttlingRetryPolicy = exports.systemErrorRetryPolicyName = exports.systemErrorRetryPolicy = exports.redirectPolicyName = exports.redirectPolicy = exports.getDefaultProxySettings = exports.proxyPolicyName = exports.proxyPolicy = exports.multipartPolicyName = exports.multipartPolicy = exports.logPolicyName = exports.logPolicy = exports.setClientRequestIdPolicyName = exports.setClientRequestIdPolicy = exports.exponentialRetryPolicyName = exports.exponentialRetryPolicy = exports.decompressResponsePolicyName = exports.decompressResponsePolicy = exports.isRestError = exports.RestError = exports.createPipelineRequest = exports.createHttpHeaders = exports.createDefaultHttpClient = exports.createPipelineFromOptions = exports.createEmptyPipeline = void 0; var pipeline_js_1 = __nccwpck_require__(29590); Object.defineProperty(exports, "createEmptyPipeline", ({ enumerable: true, get: function () { return pipeline_js_1.createEmptyPipeline; } })); var createPipelineFromOptions_js_1 = __nccwpck_require__(90862); Object.defineProperty(exports, "createPipelineFromOptions", ({ enumerable: true, get: function () { return createPipelineFromOptions_js_1.createPipelineFromOptions; } })); var defaultHttpClient_js_1 = __nccwpck_require__(7960); Object.defineProperty(exports, "createDefaultHttpClient", ({ enumerable: true, get: function () { return defaultHttpClient_js_1.createDefaultHttpClient; } })); var httpHeaders_js_1 = __nccwpck_require__(192); Object.defineProperty(exports, "createHttpHeaders", ({ enumerable: true, get: function () { return httpHeaders_js_1.createHttpHeaders; } })); var pipelineRequest_js_1 = __nccwpck_require__(95709); Object.defineProperty(exports, "createPipelineRequest", ({ enumerable: true, get: function () { return pipelineRequest_js_1.createPipelineRequest; } })); var restError_js_1 = __nccwpck_require__(8666); Object.defineProperty(exports, "RestError", ({ enumerable: true, get: function () { return restError_js_1.RestError; } })); Object.defineProperty(exports, "isRestError", ({ enumerable: true, get: function () { return restError_js_1.isRestError; } })); var decompressResponsePolicy_js_1 = __nccwpck_require__(39295); Object.defineProperty(exports, "decompressResponsePolicy", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicy; } })); Object.defineProperty(exports, "decompressResponsePolicyName", ({ enumerable: true, get: function () { return decompressResponsePolicy_js_1.decompressResponsePolicyName; } })); var exponentialRetryPolicy_js_1 = __nccwpck_require__(16708); Object.defineProperty(exports, "exponentialRetryPolicy", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicy; } })); Object.defineProperty(exports, "exponentialRetryPolicyName", ({ enumerable: true, get: function () { return exponentialRetryPolicy_js_1.exponentialRetryPolicyName; } })); var setClientRequestIdPolicy_js_1 = __nccwpck_require__(95686); Object.defineProperty(exports, "setClientRequestIdPolicy", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicy; } })); Object.defineProperty(exports, "setClientRequestIdPolicyName", ({ enumerable: true, get: function () { return setClientRequestIdPolicy_js_1.setClientRequestIdPolicyName; } })); var logPolicy_js_1 = __nccwpck_require__(53253); Object.defineProperty(exports, "logPolicy", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicy; } })); Object.defineProperty(exports, "logPolicyName", ({ enumerable: true, get: function () { return logPolicy_js_1.logPolicyName; } })); var multipartPolicy_js_1 = __nccwpck_require__(45807); Object.defineProperty(exports, "multipartPolicy", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicy; } })); Object.defineProperty(exports, "multipartPolicyName", ({ enumerable: true, get: function () { return multipartPolicy_js_1.multipartPolicyName; } })); var proxyPolicy_js_1 = __nccwpck_require__(32815); Object.defineProperty(exports, "proxyPolicy", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicy; } })); Object.defineProperty(exports, "proxyPolicyName", ({ enumerable: true, get: function () { return proxyPolicy_js_1.proxyPolicyName; } })); Object.defineProperty(exports, "getDefaultProxySettings", ({ enumerable: true, get: function () { return proxyPolicy_js_1.getDefaultProxySettings; } })); var redirectPolicy_js_1 = __nccwpck_require__(64087); Object.defineProperty(exports, "redirectPolicy", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicy; } })); Object.defineProperty(exports, "redirectPolicyName", ({ enumerable: true, get: function () { return redirectPolicy_js_1.redirectPolicyName; } })); var systemErrorRetryPolicy_js_1 = __nccwpck_require__(96518); Object.defineProperty(exports, "systemErrorRetryPolicy", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicy; } })); Object.defineProperty(exports, "systemErrorRetryPolicyName", ({ enumerable: true, get: function () { return systemErrorRetryPolicy_js_1.systemErrorRetryPolicyName; } })); var throttlingRetryPolicy_js_1 = __nccwpck_require__(97540); Object.defineProperty(exports, "throttlingRetryPolicy", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicy; } })); Object.defineProperty(exports, "throttlingRetryPolicyName", ({ enumerable: true, get: function () { return throttlingRetryPolicy_js_1.throttlingRetryPolicyName; } })); var retryPolicy_js_1 = __nccwpck_require__(56085); Object.defineProperty(exports, "retryPolicy", ({ enumerable: true, get: function () { return retryPolicy_js_1.retryPolicy; } })); var tracingPolicy_js_1 = __nccwpck_require__(93237); Object.defineProperty(exports, "tracingPolicy", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicy; } })); Object.defineProperty(exports, "tracingPolicyName", ({ enumerable: true, get: function () { return tracingPolicy_js_1.tracingPolicyName; } })); var defaultRetryPolicy_js_1 = __nccwpck_require__(48170); Object.defineProperty(exports, "defaultRetryPolicy", ({ enumerable: true, get: function () { return defaultRetryPolicy_js_1.defaultRetryPolicy; } })); var userAgentPolicy_js_1 = __nccwpck_require__(32799); Object.defineProperty(exports, "userAgentPolicy", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicy; } })); Object.defineProperty(exports, "userAgentPolicyName", ({ enumerable: true, get: function () { return userAgentPolicy_js_1.userAgentPolicyName; } })); var tlsPolicy_js_1 = __nccwpck_require__(75798); Object.defineProperty(exports, "tlsPolicy", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicy; } })); Object.defineProperty(exports, "tlsPolicyName", ({ enumerable: true, get: function () { return tlsPolicy_js_1.tlsPolicyName; } })); var formDataPolicy_js_1 = __nccwpck_require__(75497); Object.defineProperty(exports, "formDataPolicy", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicy; } })); Object.defineProperty(exports, "formDataPolicyName", ({ enumerable: true, get: function () { return formDataPolicy_js_1.formDataPolicyName; } })); var bearerTokenAuthenticationPolicy_js_1 = __nccwpck_require__(26925); Object.defineProperty(exports, "bearerTokenAuthenticationPolicy", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicy; } })); Object.defineProperty(exports, "bearerTokenAuthenticationPolicyName", ({ enumerable: true, get: function () { return bearerTokenAuthenticationPolicy_js_1.bearerTokenAuthenticationPolicyName; } })); var ndJsonPolicy_js_1 = __nccwpck_require__(36827); Object.defineProperty(exports, "ndJsonPolicy", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicy; } })); Object.defineProperty(exports, "ndJsonPolicyName", ({ enumerable: true, get: function () { return ndJsonPolicy_js_1.ndJsonPolicyName; } })); var auxiliaryAuthenticationHeaderPolicy_js_1 = __nccwpck_require__(42262); Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicy", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicy; } })); Object.defineProperty(exports, "auxiliaryAuthenticationHeaderPolicyName", ({ enumerable: true, get: function () { return auxiliaryAuthenticationHeaderPolicy_js_1.auxiliaryAuthenticationHeaderPolicyName; } })); var file_js_1 = __nccwpck_require__(97073); Object.defineProperty(exports, "createFile", ({ enumerable: true, get: function () { return file_js_1.createFile; } })); Object.defineProperty(exports, "createFileFromStream", ({ enumerable: true, get: function () { return file_js_1.createFileFromStream; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 80544: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.logger = void 0; const logger_1 = __nccwpck_require__(26515); exports.logger = (0, logger_1.createClientLogger)("core-rest-pipeline"); //# sourceMappingURL=log.js.map /***/ }), /***/ 10195: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getBodyLength = getBodyLength; exports.createNodeHttpClient = createNodeHttpClient; const tslib_1 = __nccwpck_require__(61860); const http = tslib_1.__importStar(__nccwpck_require__(37067)); const https = tslib_1.__importStar(__nccwpck_require__(44708)); const zlib = tslib_1.__importStar(__nccwpck_require__(38522)); const node_stream_1 = __nccwpck_require__(57075); const abort_controller_1 = __nccwpck_require__(93287); const httpHeaders_js_1 = __nccwpck_require__(192); const restError_js_1 = __nccwpck_require__(8666); const log_js_1 = __nccwpck_require__(80544); const DEFAULT_TLS_SETTINGS = {}; function isReadableStream(body) { return body && typeof body.pipe === "function"; } function isStreamComplete(stream) { if (stream.readable === false) { return Promise.resolve(); } return new Promise((resolve) => { const handler = () => { resolve(); stream.removeListener("close", handler); stream.removeListener("end", handler); stream.removeListener("error", handler); }; stream.on("close", handler); stream.on("end", handler); stream.on("error", handler); }); } function isArrayBuffer(body) { return body && typeof body.byteLength === "number"; } class ReportTransform extends node_stream_1.Transform { // eslint-disable-next-line @typescript-eslint/no-unsafe-function-type _transform(chunk, _encoding, callback) { this.push(chunk); this.loadedBytes += chunk.length; try { this.progressCallback({ loadedBytes: this.loadedBytes }); callback(); } catch (e) { callback(e); } } constructor(progressCallback) { super(); this.loadedBytes = 0; this.progressCallback = progressCallback; } } /** * A HttpClient implementation that uses Node's "https" module to send HTTPS requests. * @internal */ class NodeHttpClient { constructor() { this.cachedHttpsAgents = new WeakMap(); } /** * Makes a request over an underlying transport layer and returns the response. * @param request - The request to be made. */ async sendRequest(request) { var _a, _b, _c; const abortController = new AbortController(); let abortListener; if (request.abortSignal) { if (request.abortSignal.aborted) { throw new abort_controller_1.AbortError("The operation was aborted."); } abortListener = (event) => { if (event.type === "abort") { abortController.abort(); } }; request.abortSignal.addEventListener("abort", abortListener); } if (request.timeout > 0) { setTimeout(() => { abortController.abort(); }, request.timeout); } const acceptEncoding = request.headers.get("Accept-Encoding"); const shouldDecompress = (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("gzip")) || (acceptEncoding === null || acceptEncoding === void 0 ? void 0 : acceptEncoding.includes("deflate")); let body = typeof request.body === "function" ? request.body() : request.body; if (body && !request.headers.has("Content-Length")) { const bodyLength = getBodyLength(body); if (bodyLength !== null) { request.headers.set("Content-Length", bodyLength); } } let responseStream; try { if (body && request.onUploadProgress) { const onUploadProgress = request.onUploadProgress; const uploadReportStream = new ReportTransform(onUploadProgress); uploadReportStream.on("error", (e) => { log_js_1.logger.error("Error in upload progress", e); }); if (isReadableStream(body)) { body.pipe(uploadReportStream); } else { uploadReportStream.end(body); } body = uploadReportStream; } const res = await this.makeRequest(request, abortController, body); const headers = getResponseHeaders(res); const status = (_a = res.statusCode) !== null && _a !== void 0 ? _a : 0; const response = { status, headers, request, }; // Responses to HEAD must not have a body. // If they do return a body, that body must be ignored. if (request.method === "HEAD") { // call resume() and not destroy() to avoid closing the socket // and losing keep alive res.resume(); return response; } responseStream = shouldDecompress ? getDecodedResponseStream(res, headers) : res; const onDownloadProgress = request.onDownloadProgress; if (onDownloadProgress) { const downloadReportStream = new ReportTransform(onDownloadProgress); downloadReportStream.on("error", (e) => { log_js_1.logger.error("Error in download progress", e); }); responseStream.pipe(downloadReportStream); responseStream = downloadReportStream; } if ( // Value of POSITIVE_INFINITY in streamResponseStatusCodes is considered as any status code ((_b = request.streamResponseStatusCodes) === null || _b === void 0 ? void 0 : _b.has(Number.POSITIVE_INFINITY)) || ((_c = request.streamResponseStatusCodes) === null || _c === void 0 ? void 0 : _c.has(response.status))) { response.readableStreamBody = responseStream; } else { response.bodyAsText = await streamToText(responseStream); } return response; } finally { // clean up event listener if (request.abortSignal && abortListener) { let uploadStreamDone = Promise.resolve(); if (isReadableStream(body)) { uploadStreamDone = isStreamComplete(body); } let downloadStreamDone = Promise.resolve(); if (isReadableStream(responseStream)) { downloadStreamDone = isStreamComplete(responseStream); } Promise.all([uploadStreamDone, downloadStreamDone]) .then(() => { var _a; // eslint-disable-next-line promise/always-return if (abortListener) { (_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.removeEventListener("abort", abortListener); } }) .catch((e) => { log_js_1.logger.warning("Error when cleaning up abortListener on httpRequest", e); }); } } } makeRequest(request, abortController, body) { var _a; const url = new URL(request.url); const isInsecure = url.protocol !== "https:"; if (isInsecure && !request.allowInsecureConnection) { throw new Error(`Cannot connect to ${request.url} while allowInsecureConnection is false.`); } const agent = (_a = request.agent) !== null && _a !== void 0 ? _a : this.getOrCreateAgent(request, isInsecure); const options = { agent, hostname: url.hostname, path: `${url.pathname}${url.search}`, port: url.port, method: request.method, headers: request.headers.toJSON({ preserveCase: true }), }; return new Promise((resolve, reject) => { const req = isInsecure ? http.request(options, resolve) : https.request(options, resolve); req.once("error", (err) => { var _a; reject(new restError_js_1.RestError(err.message, { code: (_a = err.code) !== null && _a !== void 0 ? _a : restError_js_1.RestError.REQUEST_SEND_ERROR, request })); }); abortController.signal.addEventListener("abort", () => { const abortError = new abort_controller_1.AbortError("The operation was aborted."); req.destroy(abortError); reject(abortError); }); if (body && isReadableStream(body)) { body.pipe(req); } else if (body) { if (typeof body === "string" || Buffer.isBuffer(body)) { req.end(body); } else if (isArrayBuffer(body)) { req.end(ArrayBuffer.isView(body) ? Buffer.from(body.buffer) : Buffer.from(body)); } else { log_js_1.logger.error("Unrecognized body type", body); reject(new restError_js_1.RestError("Unrecognized body type")); } } else { // streams don't like "undefined" being passed as data req.end(); } }); } getOrCreateAgent(request, isInsecure) { var _a; const disableKeepAlive = request.disableKeepAlive; // Handle Insecure requests first if (isInsecure) { if (disableKeepAlive) { // keepAlive:false is the default so we don't need a custom Agent return http.globalAgent; } if (!this.cachedHttpAgent) { // If there is no cached agent create a new one and cache it. this.cachedHttpAgent = new http.Agent({ keepAlive: true }); } return this.cachedHttpAgent; } else { if (disableKeepAlive && !request.tlsSettings) { // When there are no tlsSettings and keepAlive is false // we don't need a custom agent return https.globalAgent; } // We use the tlsSettings to index cached clients const tlsSettings = (_a = request.tlsSettings) !== null && _a !== void 0 ? _a : DEFAULT_TLS_SETTINGS; // Get the cached agent or create a new one with the // provided values for keepAlive and tlsSettings let agent = this.cachedHttpsAgents.get(tlsSettings); if (agent && agent.options.keepAlive === !disableKeepAlive) { return agent; } log_js_1.logger.info("No cached TLS Agent exist, creating a new Agent"); agent = new https.Agent(Object.assign({ // keepAlive is true if disableKeepAlive is false. keepAlive: !disableKeepAlive }, tlsSettings)); this.cachedHttpsAgents.set(tlsSettings, agent); return agent; } } } function getResponseHeaders(res) { const headers = (0, httpHeaders_js_1.createHttpHeaders)(); for (const header of Object.keys(res.headers)) { const value = res.headers[header]; if (Array.isArray(value)) { if (value.length > 0) { headers.set(header, value[0]); } } else if (value) { headers.set(header, value); } } return headers; } function getDecodedResponseStream(stream, headers) { const contentEncoding = headers.get("Content-Encoding"); if (contentEncoding === "gzip") { const unzip = zlib.createGunzip(); stream.pipe(unzip); return unzip; } else if (contentEncoding === "deflate") { const inflate = zlib.createInflate(); stream.pipe(inflate); return inflate; } return stream; } function streamToText(stream) { return new Promise((resolve, reject) => { const buffer = []; stream.on("data", (chunk) => { if (Buffer.isBuffer(chunk)) { buffer.push(chunk); } else { buffer.push(Buffer.from(chunk)); } }); stream.on("end", () => { resolve(Buffer.concat(buffer).toString("utf8")); }); stream.on("error", (e) => { if (e && (e === null || e === void 0 ? void 0 : e.name) === "AbortError") { reject(e); } else { reject(new restError_js_1.RestError(`Error reading response as text: ${e.message}`, { code: restError_js_1.RestError.PARSE_ERROR, })); } }); }); } /** @internal */ function getBodyLength(body) { if (!body) { return 0; } else if (Buffer.isBuffer(body)) { return body.length; } else if (isReadableStream(body)) { return null; } else if (isArrayBuffer(body)) { return body.byteLength; } else if (typeof body === "string") { return Buffer.from(body).length; } else { return null; } } /** * Create a new HttpClient instance for the NodeJS environment. * @internal */ function createNodeHttpClient() { return new NodeHttpClient(); } //# sourceMappingURL=nodeHttpClient.js.map /***/ }), /***/ 29590: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createEmptyPipeline = createEmptyPipeline; const ValidPhaseNames = new Set(["Deserialize", "Serialize", "Retry", "Sign"]); /** * A private implementation of Pipeline. * Do not export this class from the package. * @internal */ class HttpPipeline { constructor(policies) { var _a; this._policies = []; this._policies = (_a = policies === null || policies === void 0 ? void 0 : policies.slice(0)) !== null && _a !== void 0 ? _a : []; this._orderedPolicies = undefined; } addPolicy(policy, options = {}) { if (options.phase && options.afterPhase) { throw new Error("Policies inside a phase cannot specify afterPhase."); } if (options.phase && !ValidPhaseNames.has(options.phase)) { throw new Error(`Invalid phase name: ${options.phase}`); } if (options.afterPhase && !ValidPhaseNames.has(options.afterPhase)) { throw new Error(`Invalid afterPhase name: ${options.afterPhase}`); } this._policies.push({ policy, options, }); this._orderedPolicies = undefined; } removePolicy(options) { const removedPolicies = []; this._policies = this._policies.filter((policyDescriptor) => { if ((options.name && policyDescriptor.policy.name === options.name) || (options.phase && policyDescriptor.options.phase === options.phase)) { removedPolicies.push(policyDescriptor.policy); return false; } else { return true; } }); this._orderedPolicies = undefined; return removedPolicies; } sendRequest(httpClient, request) { const policies = this.getOrderedPolicies(); const pipeline = policies.reduceRight((next, policy) => { return (req) => { return policy.sendRequest(req, next); }; }, (req) => httpClient.sendRequest(req)); return pipeline(request); } getOrderedPolicies() { if (!this._orderedPolicies) { this._orderedPolicies = this.orderPolicies(); } return this._orderedPolicies; } clone() { return new HttpPipeline(this._policies); } static create() { return new HttpPipeline(); } orderPolicies() { /** * The goal of this method is to reliably order pipeline policies * based on their declared requirements when they were added. * * Order is first determined by phase: * * 1. Serialize Phase * 2. Policies not in a phase * 3. Deserialize Phase * 4. Retry Phase * 5. Sign Phase * * Within each phase, policies are executed in the order * they were added unless they were specified to execute * before/after other policies or after a particular phase. * * To determine the final order, we will walk the policy list * in phase order multiple times until all dependencies are * satisfied. * * `afterPolicies` are the set of policies that must be * executed before a given policy. This requirement is * considered satisfied when each of the listed policies * have been scheduled. * * `beforePolicies` are the set of policies that must be * executed after a given policy. Since this dependency * can be expressed by converting it into a equivalent * `afterPolicies` declarations, they are normalized * into that form for simplicity. * * An `afterPhase` dependency is considered satisfied when all * policies in that phase have scheduled. * */ const result = []; // Track all policies we know about. const policyMap = new Map(); function createPhase(name) { return { name, policies: new Set(), hasRun: false, hasAfterPolicies: false, }; } // Track policies for each phase. const serializePhase = createPhase("Serialize"); const noPhase = createPhase("None"); const deserializePhase = createPhase("Deserialize"); const retryPhase = createPhase("Retry"); const signPhase = createPhase("Sign"); // a list of phases in order const orderedPhases = [serializePhase, noPhase, deserializePhase, retryPhase, signPhase]; // Small helper function to map phase name to each Phase function getPhase(phase) { if (phase === "Retry") { return retryPhase; } else if (phase === "Serialize") { return serializePhase; } else if (phase === "Deserialize") { return deserializePhase; } else if (phase === "Sign") { return signPhase; } else { return noPhase; } } // First walk each policy and create a node to track metadata. for (const descriptor of this._policies) { const policy = descriptor.policy; const options = descriptor.options; const policyName = policy.name; if (policyMap.has(policyName)) { throw new Error("Duplicate policy names not allowed in pipeline"); } const node = { policy, dependsOn: new Set(), dependants: new Set(), }; if (options.afterPhase) { node.afterPhase = getPhase(options.afterPhase); node.afterPhase.hasAfterPolicies = true; } policyMap.set(policyName, node); const phase = getPhase(options.phase); phase.policies.add(node); } // Now that each policy has a node, connect dependency references. for (const descriptor of this._policies) { const { policy, options } = descriptor; const policyName = policy.name; const node = policyMap.get(policyName); if (!node) { throw new Error(`Missing node for policy ${policyName}`); } if (options.afterPolicies) { for (const afterPolicyName of options.afterPolicies) { const afterNode = policyMap.get(afterPolicyName); if (afterNode) { // Linking in both directions helps later // when we want to notify dependants. node.dependsOn.add(afterNode); afterNode.dependants.add(node); } } } if (options.beforePolicies) { for (const beforePolicyName of options.beforePolicies) { const beforeNode = policyMap.get(beforePolicyName); if (beforeNode) { // To execute before another node, make it // depend on the current node. beforeNode.dependsOn.add(node); node.dependants.add(beforeNode); } } } } function walkPhase(phase) { phase.hasRun = true; // Sets iterate in insertion order for (const node of phase.policies) { if (node.afterPhase && (!node.afterPhase.hasRun || node.afterPhase.policies.size)) { // If this node is waiting on a phase to complete, // we need to skip it for now. // Even if the phase is empty, we should wait for it // to be walked to avoid re-ordering policies. continue; } if (node.dependsOn.size === 0) { // If there's nothing else we're waiting for, we can // add this policy to the result list. result.push(node.policy); // Notify anything that depends on this policy that // the policy has been scheduled. for (const dependant of node.dependants) { dependant.dependsOn.delete(node); } policyMap.delete(node.policy.name); phase.policies.delete(node); } } } function walkPhases() { for (const phase of orderedPhases) { walkPhase(phase); // if the phase isn't complete if (phase.policies.size > 0 && phase !== noPhase) { if (!noPhase.hasRun) { // Try running noPhase to see if that unblocks this phase next tick. // This can happen if a phase that happens before noPhase // is waiting on a noPhase policy to complete. walkPhase(noPhase); } // Don't proceed to the next phase until this phase finishes. return; } if (phase.hasAfterPolicies) { // Run any policies unblocked by this phase walkPhase(noPhase); } } } // Iterate until we've put every node in the result list. let iteration = 0; while (policyMap.size > 0) { iteration++; const initialResultLength = result.length; // Keep walking each phase in order until we can order every node. walkPhases(); // The result list *should* get at least one larger each time // after the first full pass. // Otherwise, we're going to loop forever. if (result.length <= initialResultLength && iteration > 1) { throw new Error("Cannot satisfy policy dependencies due to requirements cycle."); } } return result; } } /** * Creates a totally empty pipeline. * Useful for testing or creating a custom one. */ function createEmptyPipeline() { return HttpPipeline.create(); } //# sourceMappingURL=pipeline.js.map /***/ }), /***/ 95709: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createPipelineRequest = createPipelineRequest; const httpHeaders_js_1 = __nccwpck_require__(192); const core_util_1 = __nccwpck_require__(87779); class PipelineRequestImpl { constructor(options) { var _a, _b, _c, _d, _e, _f, _g; this.url = options.url; this.body = options.body; this.headers = (_a = options.headers) !== null && _a !== void 0 ? _a : (0, httpHeaders_js_1.createHttpHeaders)(); this.method = (_b = options.method) !== null && _b !== void 0 ? _b : "GET"; this.timeout = (_c = options.timeout) !== null && _c !== void 0 ? _c : 0; this.multipartBody = options.multipartBody; this.formData = options.formData; this.disableKeepAlive = (_d = options.disableKeepAlive) !== null && _d !== void 0 ? _d : false; this.proxySettings = options.proxySettings; this.streamResponseStatusCodes = options.streamResponseStatusCodes; this.withCredentials = (_e = options.withCredentials) !== null && _e !== void 0 ? _e : false; this.abortSignal = options.abortSignal; this.tracingOptions = options.tracingOptions; this.onUploadProgress = options.onUploadProgress; this.onDownloadProgress = options.onDownloadProgress; this.requestId = options.requestId || (0, core_util_1.randomUUID)(); this.allowInsecureConnection = (_f = options.allowInsecureConnection) !== null && _f !== void 0 ? _f : false; this.enableBrowserStreams = (_g = options.enableBrowserStreams) !== null && _g !== void 0 ? _g : false; } } /** * Creates a new pipeline request with the given options. * This method is to allow for the easy setting of default values and not required. * @param options - The options to create the request with. */ function createPipelineRequest(options) { return new PipelineRequestImpl(options); } //# sourceMappingURL=pipelineRequest.js.map /***/ }), /***/ 42262: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.auxiliaryAuthenticationHeaderPolicyName = void 0; exports.auxiliaryAuthenticationHeaderPolicy = auxiliaryAuthenticationHeaderPolicy; const tokenCycler_js_1 = __nccwpck_require__(39202); const log_js_1 = __nccwpck_require__(80544); /** * The programmatic identifier of the auxiliaryAuthenticationHeaderPolicy. */ exports.auxiliaryAuthenticationHeaderPolicyName = "auxiliaryAuthenticationHeaderPolicy"; const AUTHORIZATION_AUXILIARY_HEADER = "x-ms-authorization-auxiliary"; async function sendAuthorizeRequest(options) { var _a, _b; const { scopes, getAccessToken, request } = options; const getTokenOptions = { abortSignal: request.abortSignal, tracingOptions: request.tracingOptions, }; return (_b = (_a = (await getAccessToken(scopes, getTokenOptions))) === null || _a === void 0 ? void 0 : _a.token) !== null && _b !== void 0 ? _b : ""; } /** * A policy for external tokens to `x-ms-authorization-auxiliary` header. * This header will be used when creating a cross-tenant application we may need to handle authentication requests * for resources that are in different tenants. * You could see [ARM docs](https://learn.microsoft.com/en-us/azure/azure-resource-manager/management/authenticate-multi-tenant) for a rundown of how this feature works */ function auxiliaryAuthenticationHeaderPolicy(options) { const { credentials, scopes } = options; const logger = options.logger || log_js_1.logger; const tokenCyclerMap = new WeakMap(); return { name: exports.auxiliaryAuthenticationHeaderPolicyName, async sendRequest(request, next) { if (!request.url.toLowerCase().startsWith("https://")) { throw new Error("Bearer token authentication for auxiliary header is not permitted for non-TLS protected (non-https) URLs."); } if (!credentials || credentials.length === 0) { logger.info(`${exports.auxiliaryAuthenticationHeaderPolicyName} header will not be set due to empty credentials.`); return next(request); } const tokenPromises = []; for (const credential of credentials) { let getAccessToken = tokenCyclerMap.get(credential); if (!getAccessToken) { getAccessToken = (0, tokenCycler_js_1.createTokenCycler)(credential); tokenCyclerMap.set(credential, getAccessToken); } tokenPromises.push(sendAuthorizeRequest({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, getAccessToken, logger, })); } const auxiliaryTokens = (await Promise.all(tokenPromises)).filter((token) => Boolean(token)); if (auxiliaryTokens.length === 0) { logger.warning(`None of the auxiliary tokens are valid. ${AUTHORIZATION_AUXILIARY_HEADER} header will not be set.`); return next(request); } request.headers.set(AUTHORIZATION_AUXILIARY_HEADER, auxiliaryTokens.map((token) => `Bearer ${token}`).join(", ")); return next(request); }, }; } //# sourceMappingURL=auxiliaryAuthenticationHeaderPolicy.js.map /***/ }), /***/ 26925: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.bearerTokenAuthenticationPolicyName = void 0; exports.bearerTokenAuthenticationPolicy = bearerTokenAuthenticationPolicy; exports.parseChallenges = parseChallenges; const tokenCycler_js_1 = __nccwpck_require__(39202); const log_js_1 = __nccwpck_require__(80544); const restError_js_1 = __nccwpck_require__(8666); /** * The programmatic identifier of the bearerTokenAuthenticationPolicy. */ exports.bearerTokenAuthenticationPolicyName = "bearerTokenAuthenticationPolicy"; /** * Try to send the given request. * * When a response is received, returns a tuple of the response received and, if the response was received * inside a thrown RestError, the RestError that was thrown. * * Otherwise, if an error was thrown while sending the request that did not provide an underlying response, it * will be rethrown. */ async function trySendRequest(request, next) { try { return [await next(request), undefined]; } catch (e) { if ((0, restError_js_1.isRestError)(e) && e.response) { return [e.response, e]; } else { throw e; } } } /** * Default authorize request handler */ async function defaultAuthorizeRequest(options) { const { scopes, getAccessToken, request } = options; // Enable CAE true by default const getTokenOptions = { abortSignal: request.abortSignal, tracingOptions: request.tracingOptions, enableCae: true, }; const accessToken = await getAccessToken(scopes, getTokenOptions); if (accessToken) { options.request.headers.set("Authorization", `Bearer ${accessToken.token}`); } } /** * We will retrieve the challenge only if the response status code was 401, * and if the response contained the header "WWW-Authenticate" with a non-empty value. */ function isChallengeResponse(response) { return response.status === 401 && response.headers.has("WWW-Authenticate"); } /** * Re-authorize the request for CAE challenge. * The response containing the challenge is `options.response`. * If this method returns true, the underlying request will be sent once again. */ async function authorizeRequestOnCaeChallenge(onChallengeOptions, caeClaims) { var _a; const { scopes } = onChallengeOptions; const accessToken = await onChallengeOptions.getAccessToken(scopes, { enableCae: true, claims: caeClaims, }); if (!accessToken) { return false; } onChallengeOptions.request.headers.set("Authorization", `${(_a = accessToken.tokenType) !== null && _a !== void 0 ? _a : "Bearer"} ${accessToken.token}`); return true; } /** * A policy that can request a token from a TokenCredential implementation and * then apply it to the Authorization header of a request as a Bearer token. */ function bearerTokenAuthenticationPolicy(options) { var _a, _b, _c; const { credential, scopes, challengeCallbacks } = options; const logger = options.logger || log_js_1.logger; const callbacks = { authorizeRequest: (_b = (_a = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequest) === null || _a === void 0 ? void 0 : _a.bind(challengeCallbacks)) !== null && _b !== void 0 ? _b : defaultAuthorizeRequest, authorizeRequestOnChallenge: (_c = challengeCallbacks === null || challengeCallbacks === void 0 ? void 0 : challengeCallbacks.authorizeRequestOnChallenge) === null || _c === void 0 ? void 0 : _c.bind(challengeCallbacks), }; // This function encapsulates the entire process of reliably retrieving the token // The options are left out of the public API until there's demand to configure this. // Remember to extend `BearerTokenAuthenticationPolicyOptions` with `TokenCyclerOptions` // in order to pass through the `options` object. const getAccessToken = credential ? (0, tokenCycler_js_1.createTokenCycler)(credential /* , options */) : () => Promise.resolve(null); return { name: exports.bearerTokenAuthenticationPolicyName, /** * If there's no challenge parameter: * - It will try to retrieve the token using the cache, or the credential's getToken. * - Then it will try the next policy with or without the retrieved token. * * It uses the challenge parameters to: * - Skip a first attempt to get the token from the credential if there's no cached token, * since it expects the token to be retrievable only after the challenge. * - Prepare the outgoing request if the `prepareRequest` method has been provided. * - Send an initial request to receive the challenge if it fails. * - Process a challenge if the response contains it. * - Retrieve a token with the challenge information, then re-send the request. */ async sendRequest(request, next) { if (!request.url.toLowerCase().startsWith("https://")) { throw new Error("Bearer token authentication is not permitted for non-TLS protected (non-https) URLs."); } await callbacks.authorizeRequest({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, getAccessToken, logger, }); let response; let error; let shouldSendRequest; [response, error] = await trySendRequest(request, next); if (isChallengeResponse(response)) { let claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); // Handle CAE by default when receive CAE claim if (claims) { let parsedClaim; // Return the response immediately if claims is not a valid base64 encoded string try { parsedClaim = atob(claims); } catch (e) { logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); return response; } shouldSendRequest = await authorizeRequestOnCaeChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], response, request, getAccessToken, logger, }, parsedClaim); // Send updated request and handle response for RestError if (shouldSendRequest) { [response, error] = await trySendRequest(request, next); } } else if (callbacks.authorizeRequestOnChallenge) { // Handle custom challenges when client provides custom callback shouldSendRequest = await callbacks.authorizeRequestOnChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], request, response, getAccessToken, logger, }); // Send updated request and handle response for RestError if (shouldSendRequest) { [response, error] = await trySendRequest(request, next); } // If we get another CAE Claim, we will handle it by default and return whatever value we receive for this if (isChallengeResponse(response)) { claims = getCaeChallengeClaims(response.headers.get("WWW-Authenticate")); if (claims) { let parsedClaim; try { parsedClaim = atob(claims); } catch (e) { logger.warning(`The WWW-Authenticate header contains "claims" that cannot be parsed. Unable to perform the Continuous Access Evaluation authentication flow. Unparsable claims: ${claims}`); return response; } shouldSendRequest = await authorizeRequestOnCaeChallenge({ scopes: Array.isArray(scopes) ? scopes : [scopes], response, request, getAccessToken, logger, }, parsedClaim); // Send updated request and handle response for RestError if (shouldSendRequest) { [response, error] = await trySendRequest(request, next); } } } } } if (error) { throw error; } else { return response; } }, }; } /** * Converts: `Bearer a="b", c="d", Pop e="f", g="h"`. * Into: `[ { scheme: 'Bearer', params: { a: 'b', c: 'd' } }, { scheme: 'Pop', params: { e: 'f', g: 'h' } } ]`. * * @internal */ function parseChallenges(challenges) { // Challenge regex seperates the string to individual challenges with different schemes in the format `Scheme a="b", c=d` // The challenge regex captures parameteres with either quotes values or unquoted values const challengeRegex = /(\w+)\s+((?:\w+=(?:"[^"]*"|[^,]*),?\s*)+)/g; // Parameter regex captures the claims group removed from the scheme in the format `a="b"` and `c="d"` // CAE challenge always have quoted parameters. For more reference, https://learn.microsoft.com/entra/identity-platform/claims-challenge const paramRegex = /(\w+)="([^"]*)"/g; const parsedChallenges = []; let match; // Iterate over each challenge match while ((match = challengeRegex.exec(challenges)) !== null) { const scheme = match[1]; const paramsString = match[2]; const params = {}; let paramMatch; // Iterate over each parameter match while ((paramMatch = paramRegex.exec(paramsString)) !== null) { params[paramMatch[1]] = paramMatch[2]; } parsedChallenges.push({ scheme, params }); } return parsedChallenges; } /** * Parse a pipeline response and look for a CAE challenge with "Bearer" scheme * Return the value in the header without parsing the challenge * @internal */ function getCaeChallengeClaims(challenges) { var _a; if (!challenges) { return; } // Find all challenges present in the header const parsedChallenges = parseChallenges(challenges); return (_a = parsedChallenges.find((x) => x.scheme === "Bearer" && x.params.claims && x.params.error === "insufficient_claims")) === null || _a === void 0 ? void 0 : _a.params.claims; } //# sourceMappingURL=bearerTokenAuthenticationPolicy.js.map /***/ }), /***/ 39295: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.decompressResponsePolicyName = void 0; exports.decompressResponsePolicy = decompressResponsePolicy; /** * The programmatic identifier of the decompressResponsePolicy. */ exports.decompressResponsePolicyName = "decompressResponsePolicy"; /** * A policy to enable response decompression according to Accept-Encoding header * https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Accept-Encoding */ function decompressResponsePolicy() { return { name: exports.decompressResponsePolicyName, async sendRequest(request, next) { // HEAD requests have no body if (request.method !== "HEAD") { request.headers.set("Accept-Encoding", "gzip,deflate"); } return next(request); }, }; } //# sourceMappingURL=decompressResponsePolicy.js.map /***/ }), /***/ 48170: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.defaultRetryPolicyName = void 0; exports.defaultRetryPolicy = defaultRetryPolicy; const exponentialRetryStrategy_js_1 = __nccwpck_require__(50002); const throttlingRetryStrategy_js_1 = __nccwpck_require__(97084); const retryPolicy_js_1 = __nccwpck_require__(56085); const constants_js_1 = __nccwpck_require__(66427); /** * Name of the {@link defaultRetryPolicy} */ exports.defaultRetryPolicyName = "defaultRetryPolicy"; /** * A policy that retries according to three strategies: * - When the server sends a 429 response with a Retry-After header. * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). * - Or otherwise if the outgoing request fails, it will retry with an exponentially increasing delay. */ function defaultRetryPolicy(options = {}) { var _a; return { name: exports.defaultRetryPolicyName, sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)(), (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(options)], { maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, }).sendRequest, }; } //# sourceMappingURL=defaultRetryPolicy.js.map /***/ }), /***/ 16708: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.exponentialRetryPolicyName = void 0; exports.exponentialRetryPolicy = exponentialRetryPolicy; const exponentialRetryStrategy_js_1 = __nccwpck_require__(50002); const retryPolicy_js_1 = __nccwpck_require__(56085); const constants_js_1 = __nccwpck_require__(66427); /** * The programmatic identifier of the exponentialRetryPolicy. */ exports.exponentialRetryPolicyName = "exponentialRetryPolicy"; /** * A policy that attempts to retry requests while introducing an exponentially increasing delay. * @param options - Options that configure retry logic. */ function exponentialRetryPolicy(options = {}) { var _a; return (0, retryPolicy_js_1.retryPolicy)([ (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreSystemErrors: true })), ], { maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, }); } //# sourceMappingURL=exponentialRetryPolicy.js.map /***/ }), /***/ 75497: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.formDataPolicyName = void 0; exports.formDataPolicy = formDataPolicy; const core_util_1 = __nccwpck_require__(87779); const httpHeaders_js_1 = __nccwpck_require__(192); /** * The programmatic identifier of the formDataPolicy. */ exports.formDataPolicyName = "formDataPolicy"; function formDataToFormDataMap(formData) { var _a; const formDataMap = {}; for (const [key, value] of formData.entries()) { (_a = formDataMap[key]) !== null && _a !== void 0 ? _a : (formDataMap[key] = []); formDataMap[key].push(value); } return formDataMap; } /** * A policy that encodes FormData on the request into the body. */ function formDataPolicy() { return { name: exports.formDataPolicyName, async sendRequest(request, next) { if (core_util_1.isNodeLike && typeof FormData !== "undefined" && request.body instanceof FormData) { request.formData = formDataToFormDataMap(request.body); request.body = undefined; } if (request.formData) { const contentType = request.headers.get("Content-Type"); if (contentType && contentType.indexOf("application/x-www-form-urlencoded") !== -1) { request.body = wwwFormUrlEncode(request.formData); } else { await prepareFormData(request.formData, request); } request.formData = undefined; } return next(request); }, }; } function wwwFormUrlEncode(formData) { const urlSearchParams = new URLSearchParams(); for (const [key, value] of Object.entries(formData)) { if (Array.isArray(value)) { for (const subValue of value) { urlSearchParams.append(key, subValue.toString()); } } else { urlSearchParams.append(key, value.toString()); } } return urlSearchParams.toString(); } async function prepareFormData(formData, request) { // validate content type (multipart/form-data) const contentType = request.headers.get("Content-Type"); if (contentType && !contentType.startsWith("multipart/form-data")) { // content type is specified and is not multipart/form-data. Exit. return; } request.headers.set("Content-Type", contentType !== null && contentType !== void 0 ? contentType : "multipart/form-data"); // set body to MultipartRequestBody using content from FormDataMap const parts = []; for (const [fieldName, values] of Object.entries(formData)) { for (const value of Array.isArray(values) ? values : [values]) { if (typeof value === "string") { parts.push({ headers: (0, httpHeaders_js_1.createHttpHeaders)({ "Content-Disposition": `form-data; name="${fieldName}"`, }), body: (0, core_util_1.stringToUint8Array)(value, "utf-8"), }); } else if (value === undefined || value === null || typeof value !== "object") { throw new Error(`Unexpected value for key ${fieldName}: ${value}. Value should be serialized to string first.`); } else { // using || instead of ?? here since if value.name is empty we should create a file name const fileName = value.name || "blob"; const headers = (0, httpHeaders_js_1.createHttpHeaders)(); headers.set("Content-Disposition", `form-data; name="${fieldName}"; filename="${fileName}"`); // again, || is used since an empty value.type means the content type is unset headers.set("Content-Type", value.type || "application/octet-stream"); parts.push({ headers, body: value, }); } } } request.multipartBody = { parts }; } //# sourceMappingURL=formDataPolicy.js.map /***/ }), /***/ 53253: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.logPolicyName = void 0; exports.logPolicy = logPolicy; const log_js_1 = __nccwpck_require__(80544); const sanitizer_js_1 = __nccwpck_require__(35204); /** * The programmatic identifier of the logPolicy. */ exports.logPolicyName = "logPolicy"; /** * A policy that logs all requests and responses. * @param options - Options to configure logPolicy. */ function logPolicy(options = {}) { var _a; const logger = (_a = options.logger) !== null && _a !== void 0 ? _a : log_js_1.logger.info; const sanitizer = new sanitizer_js_1.Sanitizer({ additionalAllowedHeaderNames: options.additionalAllowedHeaderNames, additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, }); return { name: exports.logPolicyName, async sendRequest(request, next) { if (!logger.enabled) { return next(request); } logger(`Request: ${sanitizer.sanitize(request)}`); const response = await next(request); logger(`Response status code: ${response.status}`); logger(`Headers: ${sanitizer.sanitize(response.headers)}`); return response; }, }; } //# sourceMappingURL=logPolicy.js.map /***/ }), /***/ 45807: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.multipartPolicyName = void 0; exports.multipartPolicy = multipartPolicy; const core_util_1 = __nccwpck_require__(87779); const concat_js_1 = __nccwpck_require__(62471); const typeGuards_js_1 = __nccwpck_require__(22621); function generateBoundary() { return `----AzSDKFormBoundary${(0, core_util_1.randomUUID)()}`; } function encodeHeaders(headers) { let result = ""; for (const [key, value] of headers) { result += `${key}: ${value}\r\n`; } return result; } function getLength(source) { if (source instanceof Uint8Array) { return source.byteLength; } else if ((0, typeGuards_js_1.isBlob)(source)) { // if was created using createFile then -1 means we have an unknown size return source.size === -1 ? undefined : source.size; } else { return undefined; } } function getTotalLength(sources) { let total = 0; for (const source of sources) { const partLength = getLength(source); if (partLength === undefined) { return undefined; } else { total += partLength; } } return total; } async function buildRequestBody(request, parts, boundary) { const sources = [ (0, core_util_1.stringToUint8Array)(`--${boundary}`, "utf-8"), ...parts.flatMap((part) => [ (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), (0, core_util_1.stringToUint8Array)(encodeHeaders(part.headers), "utf-8"), (0, core_util_1.stringToUint8Array)("\r\n", "utf-8"), part.body, (0, core_util_1.stringToUint8Array)(`\r\n--${boundary}`, "utf-8"), ]), (0, core_util_1.stringToUint8Array)("--\r\n\r\n", "utf-8"), ]; const contentLength = getTotalLength(sources); if (contentLength) { request.headers.set("Content-Length", contentLength); } request.body = await (0, concat_js_1.concat)(sources); } /** * Name of multipart policy */ exports.multipartPolicyName = "multipartPolicy"; const maxBoundaryLength = 70; const validBoundaryCharacters = new Set(`abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789'()+,-./:=?`); function assertValidBoundary(boundary) { if (boundary.length > maxBoundaryLength) { throw new Error(`Multipart boundary "${boundary}" exceeds maximum length of 70 characters`); } if (Array.from(boundary).some((x) => !validBoundaryCharacters.has(x))) { throw new Error(`Multipart boundary "${boundary}" contains invalid characters`); } } /** * Pipeline policy for multipart requests */ function multipartPolicy() { return { name: exports.multipartPolicyName, async sendRequest(request, next) { var _a; if (!request.multipartBody) { return next(request); } if (request.body) { throw new Error("multipartBody and regular body cannot be set at the same time"); } let boundary = request.multipartBody.boundary; const contentTypeHeader = (_a = request.headers.get("Content-Type")) !== null && _a !== void 0 ? _a : "multipart/mixed"; const parsedHeader = contentTypeHeader.match(/^(multipart\/[^ ;]+)(?:; *boundary=(.+))?$/); if (!parsedHeader) { throw new Error(`Got multipart request body, but content-type header was not multipart: ${contentTypeHeader}`); } const [, contentType, parsedBoundary] = parsedHeader; if (parsedBoundary && boundary && parsedBoundary !== boundary) { throw new Error(`Multipart boundary was specified as ${parsedBoundary} in the header, but got ${boundary} in the request body`); } boundary !== null && boundary !== void 0 ? boundary : (boundary = parsedBoundary); if (boundary) { assertValidBoundary(boundary); } else { boundary = generateBoundary(); } request.headers.set("Content-Type", `${contentType}; boundary=${boundary}`); await buildRequestBody(request, request.multipartBody.parts, boundary); request.multipartBody = undefined; return next(request); }, }; } //# sourceMappingURL=multipartPolicy.js.map /***/ }), /***/ 36827: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ndJsonPolicyName = void 0; exports.ndJsonPolicy = ndJsonPolicy; /** * The programmatic identifier of the ndJsonPolicy. */ exports.ndJsonPolicyName = "ndJsonPolicy"; /** * ndJsonPolicy is a policy used to control keep alive settings for every request. */ function ndJsonPolicy() { return { name: exports.ndJsonPolicyName, async sendRequest(request, next) { // There currently isn't a good way to bypass the serializer if (typeof request.body === "string" && request.body.startsWith("[")) { const body = JSON.parse(request.body); if (Array.isArray(body)) { request.body = body.map((item) => JSON.stringify(item) + "\n").join(""); } } return next(request); }, }; } //# sourceMappingURL=ndJsonPolicy.js.map /***/ }), /***/ 32815: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.globalNoProxyList = exports.proxyPolicyName = void 0; exports.loadNoProxy = loadNoProxy; exports.getDefaultProxySettings = getDefaultProxySettings; exports.proxyPolicy = proxyPolicy; const https_proxy_agent_1 = __nccwpck_require__(3669); const http_proxy_agent_1 = __nccwpck_require__(81970); const log_js_1 = __nccwpck_require__(80544); const HTTPS_PROXY = "HTTPS_PROXY"; const HTTP_PROXY = "HTTP_PROXY"; const ALL_PROXY = "ALL_PROXY"; const NO_PROXY = "NO_PROXY"; /** * The programmatic identifier of the proxyPolicy. */ exports.proxyPolicyName = "proxyPolicy"; /** * Stores the patterns specified in NO_PROXY environment variable. * @internal */ exports.globalNoProxyList = []; let noProxyListLoaded = false; /** A cache of whether a host should bypass the proxy. */ const globalBypassedMap = new Map(); function getEnvironmentValue(name) { if (process.env[name]) { return process.env[name]; } else if (process.env[name.toLowerCase()]) { return process.env[name.toLowerCase()]; } return undefined; } function loadEnvironmentProxyValue() { if (!process) { return undefined; } const httpsProxy = getEnvironmentValue(HTTPS_PROXY); const allProxy = getEnvironmentValue(ALL_PROXY); const httpProxy = getEnvironmentValue(HTTP_PROXY); return httpsProxy || allProxy || httpProxy; } /** * Check whether the host of a given `uri` matches any pattern in the no proxy list. * If there's a match, any request sent to the same host shouldn't have the proxy settings set. * This implementation is a port of https://github.com/Azure/azure-sdk-for-net/blob/8cca811371159e527159c7eb65602477898683e2/sdk/core/Azure.Core/src/Pipeline/Internal/HttpEnvironmentProxy.cs#L210 */ function isBypassed(uri, noProxyList, bypassedMap) { if (noProxyList.length === 0) { return false; } const host = new URL(uri).hostname; if (bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.has(host)) { return bypassedMap.get(host); } let isBypassedFlag = false; for (const pattern of noProxyList) { if (pattern[0] === ".") { // This should match either domain it self or any subdomain or host // .foo.com will match foo.com it self or *.foo.com if (host.endsWith(pattern)) { isBypassedFlag = true; } else { if (host.length === pattern.length - 1 && host === pattern.slice(1)) { isBypassedFlag = true; } } } else { if (host === pattern) { isBypassedFlag = true; } } } bypassedMap === null || bypassedMap === void 0 ? void 0 : bypassedMap.set(host, isBypassedFlag); return isBypassedFlag; } function loadNoProxy() { const noProxy = getEnvironmentValue(NO_PROXY); noProxyListLoaded = true; if (noProxy) { return noProxy .split(",") .map((item) => item.trim()) .filter((item) => item.length); } return []; } /** * This method converts a proxy url into `ProxySettings` for use with ProxyPolicy. * If no argument is given, it attempts to parse a proxy URL from the environment * variables `HTTPS_PROXY` or `HTTP_PROXY`. * @param proxyUrl - The url of the proxy to use. May contain authentication information. * @deprecated - Internally this method is no longer necessary when setting proxy information. */ function getDefaultProxySettings(proxyUrl) { if (!proxyUrl) { proxyUrl = loadEnvironmentProxyValue(); if (!proxyUrl) { return undefined; } } const parsedUrl = new URL(proxyUrl); const schema = parsedUrl.protocol ? parsedUrl.protocol + "//" : ""; return { host: schema + parsedUrl.hostname, port: Number.parseInt(parsedUrl.port || "80"), username: parsedUrl.username, password: parsedUrl.password, }; } /** * This method attempts to parse a proxy URL from the environment * variables `HTTPS_PROXY` or `HTTP_PROXY`. */ function getDefaultProxySettingsInternal() { const envProxy = loadEnvironmentProxyValue(); return envProxy ? new URL(envProxy) : undefined; } function getUrlFromProxySettings(settings) { let parsedProxyUrl; try { parsedProxyUrl = new URL(settings.host); } catch (_a) { throw new Error(`Expecting a valid host string in proxy settings, but found "${settings.host}".`); } parsedProxyUrl.port = String(settings.port); if (settings.username) { parsedProxyUrl.username = settings.username; } if (settings.password) { parsedProxyUrl.password = settings.password; } return parsedProxyUrl; } function setProxyAgentOnRequest(request, cachedAgents, proxyUrl) { // Custom Agent should take precedence so if one is present // we should skip to avoid overwriting it. if (request.agent) { return; } const url = new URL(request.url); const isInsecure = url.protocol !== "https:"; if (request.tlsSettings) { log_js_1.logger.warning("TLS settings are not supported in combination with custom Proxy, certificates provided to the client will be ignored."); } const headers = request.headers.toJSON(); if (isInsecure) { if (!cachedAgents.httpProxyAgent) { cachedAgents.httpProxyAgent = new http_proxy_agent_1.HttpProxyAgent(proxyUrl, { headers }); } request.agent = cachedAgents.httpProxyAgent; } else { if (!cachedAgents.httpsProxyAgent) { cachedAgents.httpsProxyAgent = new https_proxy_agent_1.HttpsProxyAgent(proxyUrl, { headers }); } request.agent = cachedAgents.httpsProxyAgent; } } /** * A policy that allows one to apply proxy settings to all requests. * If not passed static settings, they will be retrieved from the HTTPS_PROXY * or HTTP_PROXY environment variables. * @param proxySettings - ProxySettings to use on each request. * @param options - additional settings, for example, custom NO_PROXY patterns */ function proxyPolicy(proxySettings, options) { if (!noProxyListLoaded) { exports.globalNoProxyList.push(...loadNoProxy()); } const defaultProxy = proxySettings ? getUrlFromProxySettings(proxySettings) : getDefaultProxySettingsInternal(); const cachedAgents = {}; return { name: exports.proxyPolicyName, async sendRequest(request, next) { var _a; if (!request.proxySettings && defaultProxy && !isBypassed(request.url, (_a = options === null || options === void 0 ? void 0 : options.customNoProxyList) !== null && _a !== void 0 ? _a : exports.globalNoProxyList, (options === null || options === void 0 ? void 0 : options.customNoProxyList) ? undefined : globalBypassedMap)) { setProxyAgentOnRequest(request, cachedAgents, defaultProxy); } else if (request.proxySettings) { setProxyAgentOnRequest(request, cachedAgents, getUrlFromProxySettings(request.proxySettings)); } return next(request); }, }; } //# sourceMappingURL=proxyPolicy.js.map /***/ }), /***/ 64087: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.redirectPolicyName = void 0; exports.redirectPolicy = redirectPolicy; /** * The programmatic identifier of the redirectPolicy. */ exports.redirectPolicyName = "redirectPolicy"; /** * Methods that are allowed to follow redirects 301 and 302 */ const allowedRedirect = ["GET", "HEAD"]; /** * A policy to follow Location headers from the server in order * to support server-side redirection. * In the browser, this policy is not used. * @param options - Options to control policy behavior. */ function redirectPolicy(options = {}) { const { maxRetries = 20 } = options; return { name: exports.redirectPolicyName, async sendRequest(request, next) { const response = await next(request); return handleRedirect(next, response, maxRetries); }, }; } async function handleRedirect(next, response, maxRetries, currentRetries = 0) { const { request, status, headers } = response; const locationHeader = headers.get("location"); if (locationHeader && (status === 300 || (status === 301 && allowedRedirect.includes(request.method)) || (status === 302 && allowedRedirect.includes(request.method)) || (status === 303 && request.method === "POST") || status === 307) && currentRetries < maxRetries) { const url = new URL(locationHeader, request.url); request.url = url.toString(); // POST request with Status code 303 should be converted into a // redirected GET request if the redirect url is present in the location header if (status === 303) { request.method = "GET"; request.headers.delete("Content-Length"); delete request.body; } request.headers.delete("Authorization"); const res = await next(request); return handleRedirect(next, res, maxRetries, currentRetries + 1); } return response; } //# sourceMappingURL=redirectPolicy.js.map /***/ }), /***/ 56085: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.retryPolicy = retryPolicy; const helpers_js_1 = __nccwpck_require__(653); const logger_1 = __nccwpck_require__(26515); const abort_controller_1 = __nccwpck_require__(93287); const constants_js_1 = __nccwpck_require__(66427); const retryPolicyLogger = (0, logger_1.createClientLogger)("core-rest-pipeline retryPolicy"); /** * The programmatic identifier of the retryPolicy. */ const retryPolicyName = "retryPolicy"; /** * retryPolicy is a generic policy to enable retrying requests when certain conditions are met */ function retryPolicy(strategies, options = { maxRetries: constants_js_1.DEFAULT_RETRY_POLICY_COUNT }) { const logger = options.logger || retryPolicyLogger; return { name: retryPolicyName, async sendRequest(request, next) { var _a, _b; let response; let responseError; let retryCount = -1; // eslint-disable-next-line no-constant-condition retryRequest: while (true) { retryCount += 1; response = undefined; responseError = undefined; try { logger.info(`Retry ${retryCount}: Attempting to send request`, request.requestId); response = await next(request); logger.info(`Retry ${retryCount}: Received a response from request`, request.requestId); } catch (e) { logger.error(`Retry ${retryCount}: Received an error from request`, request.requestId); // RestErrors are valid targets for the retry strategies. // If none of the retry strategies can work with them, they will be thrown later in this policy. // If the received error is not a RestError, it is immediately thrown. responseError = e; if (!e || responseError.name !== "RestError") { throw e; } response = responseError.response; } if ((_a = request.abortSignal) === null || _a === void 0 ? void 0 : _a.aborted) { logger.error(`Retry ${retryCount}: Request aborted.`); const abortError = new abort_controller_1.AbortError(); throw abortError; } if (retryCount >= ((_b = options.maxRetries) !== null && _b !== void 0 ? _b : constants_js_1.DEFAULT_RETRY_POLICY_COUNT)) { logger.info(`Retry ${retryCount}: Maximum retries reached. Returning the last received response, or throwing the last received error.`); if (responseError) { throw responseError; } else if (response) { return response; } else { throw new Error("Maximum retries reached with no response or error to throw"); } } logger.info(`Retry ${retryCount}: Processing ${strategies.length} retry strategies.`); strategiesLoop: for (const strategy of strategies) { const strategyLogger = strategy.logger || retryPolicyLogger; strategyLogger.info(`Retry ${retryCount}: Processing retry strategy ${strategy.name}.`); const modifiers = strategy.retry({ retryCount, response, responseError, }); if (modifiers.skipStrategy) { strategyLogger.info(`Retry ${retryCount}: Skipped.`); continue strategiesLoop; } const { errorToThrow, retryAfterInMs, redirectTo } = modifiers; if (errorToThrow) { strategyLogger.error(`Retry ${retryCount}: Retry strategy ${strategy.name} throws error:`, errorToThrow); throw errorToThrow; } if (retryAfterInMs || retryAfterInMs === 0) { strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} retries after ${retryAfterInMs}`); await (0, helpers_js_1.delay)(retryAfterInMs, undefined, { abortSignal: request.abortSignal }); continue retryRequest; } if (redirectTo) { strategyLogger.info(`Retry ${retryCount}: Retry strategy ${strategy.name} redirects to ${redirectTo}`); request.url = redirectTo; continue retryRequest; } } if (responseError) { logger.info(`None of the retry strategies could work with the received error. Throwing it.`); throw responseError; } if (response) { logger.info(`None of the retry strategies could work with the received response. Returning it.`); return response; } // If all the retries skip and there's no response, // we're still in the retry loop, so a new request will be sent // until `maxRetries` is reached. } }, }; } //# sourceMappingURL=retryPolicy.js.map /***/ }), /***/ 95686: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.setClientRequestIdPolicyName = void 0; exports.setClientRequestIdPolicy = setClientRequestIdPolicy; /** * The programmatic identifier of the setClientRequestIdPolicy. */ exports.setClientRequestIdPolicyName = "setClientRequestIdPolicy"; /** * Each PipelineRequest gets a unique id upon creation. * This policy passes that unique id along via an HTTP header to enable better * telemetry and tracing. * @param requestIdHeaderName - The name of the header to pass the request ID to. */ function setClientRequestIdPolicy(requestIdHeaderName = "x-ms-client-request-id") { return { name: exports.setClientRequestIdPolicyName, async sendRequest(request, next) { if (!request.headers.has(requestIdHeaderName)) { request.headers.set(requestIdHeaderName, request.requestId); } return next(request); }, }; } //# sourceMappingURL=setClientRequestIdPolicy.js.map /***/ }), /***/ 96518: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.systemErrorRetryPolicyName = void 0; exports.systemErrorRetryPolicy = systemErrorRetryPolicy; const exponentialRetryStrategy_js_1 = __nccwpck_require__(50002); const retryPolicy_js_1 = __nccwpck_require__(56085); const constants_js_1 = __nccwpck_require__(66427); /** * Name of the {@link systemErrorRetryPolicy} */ exports.systemErrorRetryPolicyName = "systemErrorRetryPolicy"; /** * A retry policy that specifically seeks to handle errors in the * underlying transport layer (e.g. DNS lookup failures) rather than * retryable error codes from the server itself. * @param options - Options that customize the policy. */ function systemErrorRetryPolicy(options = {}) { var _a; return { name: exports.systemErrorRetryPolicyName, sendRequest: (0, retryPolicy_js_1.retryPolicy)([ (0, exponentialRetryStrategy_js_1.exponentialRetryStrategy)(Object.assign(Object.assign({}, options), { ignoreHttpStatusCodes: true })), ], { maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, }).sendRequest, }; } //# sourceMappingURL=systemErrorRetryPolicy.js.map /***/ }), /***/ 97540: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.throttlingRetryPolicyName = void 0; exports.throttlingRetryPolicy = throttlingRetryPolicy; const throttlingRetryStrategy_js_1 = __nccwpck_require__(97084); const retryPolicy_js_1 = __nccwpck_require__(56085); const constants_js_1 = __nccwpck_require__(66427); /** * Name of the {@link throttlingRetryPolicy} */ exports.throttlingRetryPolicyName = "throttlingRetryPolicy"; /** * A policy that retries when the server sends a 429 response with a Retry-After header. * * To learn more, please refer to * https://docs.microsoft.com/en-us/azure/azure-resource-manager/resource-manager-request-limits, * https://docs.microsoft.com/en-us/azure/azure-subscription-service-limits and * https://docs.microsoft.com/en-us/azure/virtual-machines/troubleshooting/troubleshooting-throttling-errors * * @param options - Options that configure retry logic. */ function throttlingRetryPolicy(options = {}) { var _a; return { name: exports.throttlingRetryPolicyName, sendRequest: (0, retryPolicy_js_1.retryPolicy)([(0, throttlingRetryStrategy_js_1.throttlingRetryStrategy)()], { maxRetries: (_a = options.maxRetries) !== null && _a !== void 0 ? _a : constants_js_1.DEFAULT_RETRY_POLICY_COUNT, }).sendRequest, }; } //# sourceMappingURL=throttlingRetryPolicy.js.map /***/ }), /***/ 75798: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.tlsPolicyName = void 0; exports.tlsPolicy = tlsPolicy; /** * Name of the TLS Policy */ exports.tlsPolicyName = "tlsPolicy"; /** * Gets a pipeline policy that adds the client certificate to the HttpClient agent for authentication. */ function tlsPolicy(tlsSettings) { return { name: exports.tlsPolicyName, sendRequest: async (req, next) => { // Users may define a request tlsSettings, honor those over the client level one if (!req.tlsSettings) { req.tlsSettings = tlsSettings; } return next(req); }, }; } //# sourceMappingURL=tlsPolicy.js.map /***/ }), /***/ 93237: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.tracingPolicyName = void 0; exports.tracingPolicy = tracingPolicy; const core_tracing_1 = __nccwpck_require__(20623); const constants_js_1 = __nccwpck_require__(66427); const userAgent_js_1 = __nccwpck_require__(28431); const log_js_1 = __nccwpck_require__(80544); const core_util_1 = __nccwpck_require__(87779); const restError_js_1 = __nccwpck_require__(8666); const sanitizer_js_1 = __nccwpck_require__(35204); /** * The programmatic identifier of the tracingPolicy. */ exports.tracingPolicyName = "tracingPolicy"; /** * A simple policy to create OpenTelemetry Spans for each request made by the pipeline * that has SpanOptions with a parent. * Requests made without a parent Span will not be recorded. * @param options - Options to configure the telemetry logged by the tracing policy. */ function tracingPolicy(options = {}) { const userAgentPromise = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); const sanitizer = new sanitizer_js_1.Sanitizer({ additionalAllowedQueryParameters: options.additionalAllowedQueryParameters, }); const tracingClient = tryCreateTracingClient(); return { name: exports.tracingPolicyName, async sendRequest(request, next) { var _a; if (!tracingClient) { return next(request); } const userAgent = await userAgentPromise; const spanAttributes = { "http.url": sanitizer.sanitizeUrl(request.url), "http.method": request.method, "http.user_agent": userAgent, requestId: request.requestId, }; if (userAgent) { spanAttributes["http.user_agent"] = userAgent; } const { span, tracingContext } = (_a = tryCreateSpan(tracingClient, request, spanAttributes)) !== null && _a !== void 0 ? _a : {}; if (!span || !tracingContext) { return next(request); } try { const response = await tracingClient.withContext(tracingContext, next, request); tryProcessResponse(span, response); return response; } catch (err) { tryProcessError(span, err); throw err; } }, }; } function tryCreateTracingClient() { try { return (0, core_tracing_1.createTracingClient)({ namespace: "", packageName: "@azure/core-rest-pipeline", packageVersion: constants_js_1.SDK_VERSION, }); } catch (e) { log_js_1.logger.warning(`Error when creating the TracingClient: ${(0, core_util_1.getErrorMessage)(e)}`); return undefined; } } function tryCreateSpan(tracingClient, request, spanAttributes) { try { // As per spec, we do not need to differentiate between HTTP and HTTPS in span name. const { span, updatedOptions } = tracingClient.startSpan(`HTTP ${request.method}`, { tracingOptions: request.tracingOptions }, { spanKind: "client", spanAttributes, }); // If the span is not recording, don't do any more work. if (!span.isRecording()) { span.end(); return undefined; } // set headers const headers = tracingClient.createRequestHeaders(updatedOptions.tracingOptions.tracingContext); for (const [key, value] of Object.entries(headers)) { request.headers.set(key, value); } return { span, tracingContext: updatedOptions.tracingOptions.tracingContext }; } catch (e) { log_js_1.logger.warning(`Skipping creating a tracing span due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); return undefined; } } function tryProcessError(span, error) { try { span.setStatus({ status: "error", error: (0, core_util_1.isError)(error) ? error : undefined, }); if ((0, restError_js_1.isRestError)(error) && error.statusCode) { span.setAttribute("http.status_code", error.statusCode); } span.end(); } catch (e) { log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } } function tryProcessResponse(span, response) { try { span.setAttribute("http.status_code", response.status); const serviceRequestId = response.headers.get("x-ms-request-id"); if (serviceRequestId) { span.setAttribute("serviceRequestId", serviceRequestId); } span.setStatus({ status: "success", }); span.end(); } catch (e) { log_js_1.logger.warning(`Skipping tracing span processing due to an error: ${(0, core_util_1.getErrorMessage)(e)}`); } } //# sourceMappingURL=tracingPolicy.js.map /***/ }), /***/ 32799: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.userAgentPolicyName = void 0; exports.userAgentPolicy = userAgentPolicy; const userAgent_js_1 = __nccwpck_require__(28431); const UserAgentHeaderName = (0, userAgent_js_1.getUserAgentHeaderName)(); /** * The programmatic identifier of the userAgentPolicy. */ exports.userAgentPolicyName = "userAgentPolicy"; /** * A policy that sets the User-Agent header (or equivalent) to reflect * the library version. * @param options - Options to customize the user agent value. */ function userAgentPolicy(options = {}) { const userAgentValue = (0, userAgent_js_1.getUserAgentValue)(options.userAgentPrefix); return { name: exports.userAgentPolicyName, async sendRequest(request, next) { if (!request.headers.has(UserAgentHeaderName)) { request.headers.set(UserAgentHeaderName, await userAgentValue); } return next(request); }, }; } //# sourceMappingURL=userAgentPolicy.js.map /***/ }), /***/ 8666: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.RestError = void 0; exports.isRestError = isRestError; const core_util_1 = __nccwpck_require__(87779); const inspect_js_1 = __nccwpck_require__(90995); const sanitizer_js_1 = __nccwpck_require__(35204); const errorSanitizer = new sanitizer_js_1.Sanitizer(); /** * A custom error type for failed pipeline requests. */ class RestError extends Error { constructor(message, options = {}) { super(message); this.name = "RestError"; this.code = options.code; this.statusCode = options.statusCode; // The request and response may contain sensitive information in the headers or body. // To help prevent this sensitive information being accidentally logged, the request and response // properties are marked as non-enumerable here. This prevents them showing up in the output of // JSON.stringify and console.log. Object.defineProperty(this, "request", { value: options.request, enumerable: false }); Object.defineProperty(this, "response", { value: options.response, enumerable: false }); Object.setPrototypeOf(this, RestError.prototype); } /** * Logging method for util.inspect in Node */ [inspect_js_1.custom]() { // Extract non-enumerable properties and add them back. This is OK since in this output the request and // response get sanitized. return `RestError: ${this.message} \n ${errorSanitizer.sanitize(Object.assign(Object.assign({}, this), { request: this.request, response: this.response }))}`; } } exports.RestError = RestError; /** * Something went wrong when making the request. * This means the actual request failed for some reason, * such as a DNS issue or the connection being lost. */ RestError.REQUEST_SEND_ERROR = "REQUEST_SEND_ERROR"; /** * This means that parsing the response from the server failed. * It may have been malformed. */ RestError.PARSE_ERROR = "PARSE_ERROR"; /** * Typeguard for RestError * @param e - Something caught by a catch clause. */ function isRestError(e) { if (e instanceof RestError) { return true; } return (0, core_util_1.isError)(e) && e.name === "RestError"; } //# sourceMappingURL=restError.js.map /***/ }), /***/ 50002: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.exponentialRetryStrategy = exponentialRetryStrategy; exports.isExponentialRetryResponse = isExponentialRetryResponse; exports.isSystemError = isSystemError; const core_util_1 = __nccwpck_require__(87779); const throttlingRetryStrategy_js_1 = __nccwpck_require__(97084); // intervals are in milliseconds const DEFAULT_CLIENT_RETRY_INTERVAL = 1000; const DEFAULT_CLIENT_MAX_RETRY_INTERVAL = 1000 * 64; /** * A retry strategy that retries with an exponentially increasing delay in these two cases: * - When there are errors in the underlying transport layer (e.g. DNS lookup failures). * - Or otherwise if the outgoing request fails (408, greater or equal than 500, except for 501 and 505). */ function exponentialRetryStrategy(options = {}) { var _a, _b; const retryInterval = (_a = options.retryDelayInMs) !== null && _a !== void 0 ? _a : DEFAULT_CLIENT_RETRY_INTERVAL; const maxRetryInterval = (_b = options.maxRetryDelayInMs) !== null && _b !== void 0 ? _b : DEFAULT_CLIENT_MAX_RETRY_INTERVAL; return { name: "exponentialRetryStrategy", retry({ retryCount, response, responseError }) { const matchedSystemError = isSystemError(responseError); const ignoreSystemErrors = matchedSystemError && options.ignoreSystemErrors; const isExponential = isExponentialRetryResponse(response); const ignoreExponentialResponse = isExponential && options.ignoreHttpStatusCodes; const unknownResponse = response && ((0, throttlingRetryStrategy_js_1.isThrottlingRetryResponse)(response) || !isExponential); if (unknownResponse || ignoreExponentialResponse || ignoreSystemErrors) { return { skipStrategy: true }; } if (responseError && !matchedSystemError && !isExponential) { return { errorToThrow: responseError }; } return (0, core_util_1.calculateRetryDelay)(retryCount, { retryDelayInMs: retryInterval, maxRetryDelayInMs: maxRetryInterval, }); }, }; } /** * A response is a retry response if it has status codes: * - 408, or * - Greater or equal than 500, except for 501 and 505. */ function isExponentialRetryResponse(response) { return Boolean(response && response.status !== undefined && (response.status >= 500 || response.status === 408) && response.status !== 501 && response.status !== 505); } /** * Determines whether an error from a pipeline response was triggered in the network layer. */ function isSystemError(err) { if (!err) { return false; } return (err.code === "ETIMEDOUT" || err.code === "ESOCKETTIMEDOUT" || err.code === "ECONNREFUSED" || err.code === "ECONNRESET" || err.code === "ENOENT" || err.code === "ENOTFOUND"); } //# sourceMappingURL=exponentialRetryStrategy.js.map /***/ }), /***/ 97084: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isThrottlingRetryResponse = isThrottlingRetryResponse; exports.throttlingRetryStrategy = throttlingRetryStrategy; const helpers_js_1 = __nccwpck_require__(653); /** * The header that comes back from Azure services representing * the amount of time (minimum) to wait to retry (in seconds or timestamp after which we can retry). */ const RetryAfterHeader = "Retry-After"; /** * The headers that come back from Azure services representing * the amount of time (minimum) to wait to retry. * * "retry-after-ms", "x-ms-retry-after-ms" : milliseconds * "Retry-After" : seconds or timestamp */ const AllRetryAfterHeaders = ["retry-after-ms", "x-ms-retry-after-ms", RetryAfterHeader]; /** * A response is a throttling retry response if it has a throttling status code (429 or 503), * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. * * Returns the `retryAfterInMs` value if the response is a throttling retry response. * If not throttling retry response, returns `undefined`. * * @internal */ function getRetryAfterInMs(response) { if (!(response && [429, 503].includes(response.status))) return undefined; try { // Headers: "retry-after-ms", "x-ms-retry-after-ms", "Retry-After" for (const header of AllRetryAfterHeaders) { const retryAfterValue = (0, helpers_js_1.parseHeaderValueAsNumber)(response, header); if (retryAfterValue === 0 || retryAfterValue) { // "Retry-After" header ==> seconds // "retry-after-ms", "x-ms-retry-after-ms" headers ==> milli-seconds const multiplyingFactor = header === RetryAfterHeader ? 1000 : 1; return retryAfterValue * multiplyingFactor; // in milli-seconds } } // RetryAfterHeader ("Retry-After") has a special case where it might be formatted as a date instead of a number of seconds const retryAfterHeader = response.headers.get(RetryAfterHeader); if (!retryAfterHeader) return; const date = Date.parse(retryAfterHeader); const diff = date - Date.now(); // negative diff would mean a date in the past, so retry asap with 0 milliseconds return Number.isFinite(diff) ? Math.max(0, diff) : undefined; } catch (_a) { return undefined; } } /** * A response is a retry response if it has a throttling status code (429 or 503), * as long as one of the [ "Retry-After" or "retry-after-ms" or "x-ms-retry-after-ms" ] headers has a valid value. */ function isThrottlingRetryResponse(response) { return Number.isFinite(getRetryAfterInMs(response)); } function throttlingRetryStrategy() { return { name: "throttlingRetryStrategy", retry({ response }) { const retryAfterInMs = getRetryAfterInMs(response); if (!Number.isFinite(retryAfterInMs)) { return { skipStrategy: true }; } return { retryAfterInMs, }; }, }; } //# sourceMappingURL=throttlingRetryStrategy.js.map /***/ }), /***/ 62471: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.concat = concat; const tslib_1 = __nccwpck_require__(61860); const node_stream_1 = __nccwpck_require__(57075); const typeGuards_js_1 = __nccwpck_require__(22621); const file_js_1 = __nccwpck_require__(97073); function streamAsyncIterator() { return tslib_1.__asyncGenerator(this, arguments, function* streamAsyncIterator_1() { const reader = this.getReader(); try { while (true) { const { done, value } = yield tslib_1.__await(reader.read()); if (done) { return yield tslib_1.__await(void 0); } yield yield tslib_1.__await(value); } } finally { reader.releaseLock(); } }); } function makeAsyncIterable(webStream) { if (!webStream[Symbol.asyncIterator]) { webStream[Symbol.asyncIterator] = streamAsyncIterator.bind(webStream); } if (!webStream.values) { webStream.values = streamAsyncIterator.bind(webStream); } } function ensureNodeStream(stream) { if (stream instanceof ReadableStream) { makeAsyncIterable(stream); return node_stream_1.Readable.fromWeb(stream); } else { return stream; } } function toStream(source) { if (source instanceof Uint8Array) { return node_stream_1.Readable.from(Buffer.from(source)); } else if ((0, typeGuards_js_1.isBlob)(source)) { return toStream((0, file_js_1.getRawContent)(source)); } else { return ensureNodeStream(source); } } /** * Utility function that concatenates a set of binary inputs into one combined output. * * @param sources - array of sources for the concatenation * @returns - in Node, a (() =\> NodeJS.ReadableStream) which, when read, produces a concatenation of all the inputs. * In browser, returns a `Blob` representing all the concatenated inputs. * * @internal */ async function concat(sources) { return function () { const streams = sources.map((x) => (typeof x === "function" ? x() : x)).map(toStream); return node_stream_1.Readable.from((function () { return tslib_1.__asyncGenerator(this, arguments, function* () { var _a, e_1, _b, _c; for (const stream of streams) { try { for (var _d = true, stream_1 = (e_1 = void 0, tslib_1.__asyncValues(stream)), stream_1_1; stream_1_1 = yield tslib_1.__await(stream_1.next()), _a = stream_1_1.done, !_a; _d = true) { _c = stream_1_1.value; _d = false; const chunk = _c; yield yield tslib_1.__await(chunk); } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { if (!_d && !_a && (_b = stream_1.return)) yield tslib_1.__await(_b.call(stream_1)); } finally { if (e_1) throw e_1.error; } } } }); })()); }; } //# sourceMappingURL=concat.js.map /***/ }), /***/ 97073: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getRawContent = getRawContent; exports.createFileFromStream = createFileFromStream; exports.createFile = createFile; const core_util_1 = __nccwpck_require__(87779); const typeGuards_js_1 = __nccwpck_require__(22621); const unimplementedMethods = { arrayBuffer: () => { throw new Error("Not implemented"); }, slice: () => { throw new Error("Not implemented"); }, text: () => { throw new Error("Not implemented"); }, }; /** * Private symbol used as key on objects created using createFile containing the * original source of the file object. * * This is used in Node to access the original Node stream without using Blob#stream, which * returns a web stream. This is done to avoid a couple of bugs to do with Blob#stream and * Readable#to/fromWeb in Node versions we support: * - https://github.com/nodejs/node/issues/42694 (fixed in Node 18.14) * - https://github.com/nodejs/node/issues/48916 (fixed in Node 20.6) * * Once these versions are no longer supported, we may be able to stop doing this. * * @internal */ const rawContent = Symbol("rawContent"); function hasRawContent(x) { return typeof x[rawContent] === "function"; } /** * Extract the raw content from a given blob-like object. If the input was created using createFile * or createFileFromStream, the exact content passed into createFile/createFileFromStream will be used. * For true instances of Blob and File, returns the blob's content as a Web ReadableStream. * * @internal */ function getRawContent(blob) { if (hasRawContent(blob)) { return blob[rawContent](); } else { return blob.stream(); } } /** * Create an object that implements the File interface. This object is intended to be * passed into RequestBodyType.formData, and is not guaranteed to work as expected in * other situations. * * Use this function to: * - Create a File object for use in RequestBodyType.formData in environments where the * global File object is unavailable. * - Create a File-like object from a readable stream without reading the stream into memory. * * @param stream - the content of the file as a callback returning a stream. When a File object made using createFile is * passed in a request's form data map, the stream will not be read into memory * and instead will be streamed when the request is made. In the event of a retry, the * stream needs to be read again, so this callback SHOULD return a fresh stream if possible. * @param name - the name of the file. * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ function createFileFromStream(stream, name, options = {}) { var _a, _b, _c, _d; return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: (_d = options.size) !== null && _d !== void 0 ? _d : -1, name, stream: () => { const s = stream(); if ((0, typeGuards_js_1.isNodeReadableStream)(s)) { throw new Error("Not supported: a Node stream was provided as input to createFileFromStream."); } return s; }, [rawContent]: stream }); } /** * Create an object that implements the File interface. This object is intended to be * passed into RequestBodyType.formData, and is not guaranteed to work as expected in * other situations. * * Use this function create a File object for use in RequestBodyType.formData in environments where the global File object is unavailable. * * @param content - the content of the file as a Uint8Array in memory. * @param name - the name of the file. * @param options - optional metadata about the file, e.g. file name, file size, MIME type. */ function createFile(content, name, options = {}) { var _a, _b, _c; if (core_util_1.isNodeLike) { return Object.assign(Object.assign({}, unimplementedMethods), { type: (_a = options.type) !== null && _a !== void 0 ? _a : "", lastModified: (_b = options.lastModified) !== null && _b !== void 0 ? _b : new Date().getTime(), webkitRelativePath: (_c = options.webkitRelativePath) !== null && _c !== void 0 ? _c : "", size: content.byteLength, name, arrayBuffer: async () => content.buffer, stream: () => new Blob([content]).stream(), [rawContent]: () => content }); } else { return new File([content], name, options); } } //# sourceMappingURL=file.js.map /***/ }), /***/ 653: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.delay = delay; exports.parseHeaderValueAsNumber = parseHeaderValueAsNumber; const abort_controller_1 = __nccwpck_require__(93287); const StandardAbortMessage = "The operation was aborted."; /** * A wrapper for setTimeout that resolves a promise after delayInMs milliseconds. * @param delayInMs - The number of milliseconds to be delayed. * @param value - The value to be resolved with after a timeout of t milliseconds. * @param options - The options for delay - currently abort options * - abortSignal - The abortSignal associated with containing operation. * - abortErrorMsg - The abort error message associated with containing operation. * @returns Resolved promise */ function delay(delayInMs, value, options) { return new Promise((resolve, reject) => { let timer = undefined; let onAborted = undefined; const rejectOnAbort = () => { return reject(new abort_controller_1.AbortError((options === null || options === void 0 ? void 0 : options.abortErrorMsg) ? options === null || options === void 0 ? void 0 : options.abortErrorMsg : StandardAbortMessage)); }; const removeListeners = () => { if ((options === null || options === void 0 ? void 0 : options.abortSignal) && onAborted) { options.abortSignal.removeEventListener("abort", onAborted); } }; onAborted = () => { if (timer) { clearTimeout(timer); } removeListeners(); return rejectOnAbort(); }; if ((options === null || options === void 0 ? void 0 : options.abortSignal) && options.abortSignal.aborted) { return rejectOnAbort(); } timer = setTimeout(() => { removeListeners(); resolve(value); }, delayInMs); if (options === null || options === void 0 ? void 0 : options.abortSignal) { options.abortSignal.addEventListener("abort", onAborted); } }); } /** * @internal * @returns the parsed value or undefined if the parsed value is invalid. */ function parseHeaderValueAsNumber(response, headerName) { const value = response.headers.get(headerName); if (!value) return; const valueAsNum = Number(value); if (Number.isNaN(valueAsNum)) return; return valueAsNum; } //# sourceMappingURL=helpers.js.map /***/ }), /***/ 90995: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.custom = void 0; const node_util_1 = __nccwpck_require__(57975); exports.custom = node_util_1.inspect.custom; //# sourceMappingURL=inspect.js.map /***/ }), /***/ 35204: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Sanitizer = void 0; const core_util_1 = __nccwpck_require__(87779); const RedactedString = "REDACTED"; // Make sure this list is up-to-date with the one under core/logger/Readme#Keyconcepts const defaultAllowedHeaderNames = [ "x-ms-client-request-id", "x-ms-return-client-request-id", "x-ms-useragent", "x-ms-correlation-request-id", "x-ms-request-id", "client-request-id", "ms-cv", "return-client-request-id", "traceparent", "Access-Control-Allow-Credentials", "Access-Control-Allow-Headers", "Access-Control-Allow-Methods", "Access-Control-Allow-Origin", "Access-Control-Expose-Headers", "Access-Control-Max-Age", "Access-Control-Request-Headers", "Access-Control-Request-Method", "Origin", "Accept", "Accept-Encoding", "Cache-Control", "Connection", "Content-Length", "Content-Type", "Date", "ETag", "Expires", "If-Match", "If-Modified-Since", "If-None-Match", "If-Unmodified-Since", "Last-Modified", "Pragma", "Request-Id", "Retry-After", "Server", "Transfer-Encoding", "User-Agent", "WWW-Authenticate", ]; const defaultAllowedQueryParameters = ["api-version"]; /** * @internal */ class Sanitizer { constructor({ additionalAllowedHeaderNames: allowedHeaderNames = [], additionalAllowedQueryParameters: allowedQueryParameters = [], } = {}) { allowedHeaderNames = defaultAllowedHeaderNames.concat(allowedHeaderNames); allowedQueryParameters = defaultAllowedQueryParameters.concat(allowedQueryParameters); this.allowedHeaderNames = new Set(allowedHeaderNames.map((n) => n.toLowerCase())); this.allowedQueryParameters = new Set(allowedQueryParameters.map((p) => p.toLowerCase())); } sanitize(obj) { const seen = new Set(); return JSON.stringify(obj, (key, value) => { // Ensure Errors include their interesting non-enumerable members if (value instanceof Error) { return Object.assign(Object.assign({}, value), { name: value.name, message: value.message }); } if (key === "headers") { return this.sanitizeHeaders(value); } else if (key === "url") { return this.sanitizeUrl(value); } else if (key === "query") { return this.sanitizeQuery(value); } else if (key === "body") { // Don't log the request body return undefined; } else if (key === "response") { // Don't log response again return undefined; } else if (key === "operationSpec") { // When using sendOperationRequest, the request carries a massive // field with the autorest spec. No need to log it. return undefined; } else if (Array.isArray(value) || (0, core_util_1.isObject)(value)) { if (seen.has(value)) { return "[Circular]"; } seen.add(value); } return value; }, 2); } sanitizeUrl(value) { if (typeof value !== "string" || value === null || value === "") { return value; } const url = new URL(value); if (!url.search) { return value; } for (const [key] of url.searchParams) { if (!this.allowedQueryParameters.has(key.toLowerCase())) { url.searchParams.set(key, RedactedString); } } return url.toString(); } sanitizeHeaders(obj) { const sanitized = {}; for (const key of Object.keys(obj)) { if (this.allowedHeaderNames.has(key.toLowerCase())) { sanitized[key] = obj[key]; } else { sanitized[key] = RedactedString; } } return sanitized; } sanitizeQuery(value) { if (typeof value !== "object" || value === null) { return value; } const sanitized = {}; for (const k of Object.keys(value)) { if (this.allowedQueryParameters.has(k.toLowerCase())) { sanitized[k] = value[k]; } else { sanitized[k] = RedactedString; } } return sanitized; } } exports.Sanitizer = Sanitizer; //# sourceMappingURL=sanitizer.js.map /***/ }), /***/ 39202: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.DEFAULT_CYCLER_OPTIONS = void 0; exports.createTokenCycler = createTokenCycler; const helpers_js_1 = __nccwpck_require__(653); // Default options for the cycler if none are provided exports.DEFAULT_CYCLER_OPTIONS = { forcedRefreshWindowInMs: 1000, // Force waiting for a refresh 1s before the token expires retryIntervalInMs: 3000, // Allow refresh attempts every 3s refreshWindowInMs: 1000 * 60 * 2, // Start refreshing 2m before expiry }; /** * Converts an an unreliable access token getter (which may resolve with null) * into an AccessTokenGetter by retrying the unreliable getter in a regular * interval. * * @param getAccessToken - A function that produces a promise of an access token that may fail by returning null. * @param retryIntervalInMs - The time (in milliseconds) to wait between retry attempts. * @param refreshTimeout - The timestamp after which the refresh attempt will fail, throwing an exception. * @returns - A promise that, if it resolves, will resolve with an access token. */ async function beginRefresh(getAccessToken, retryIntervalInMs, refreshTimeout) { // This wrapper handles exceptions gracefully as long as we haven't exceeded // the timeout. async function tryGetAccessToken() { if (Date.now() < refreshTimeout) { try { return await getAccessToken(); } catch (_a) { return null; } } else { const finalToken = await getAccessToken(); // Timeout is up, so throw if it's still null if (finalToken === null) { throw new Error("Failed to refresh access token."); } return finalToken; } } let token = await tryGetAccessToken(); while (token === null) { await (0, helpers_js_1.delay)(retryIntervalInMs); token = await tryGetAccessToken(); } return token; } /** * Creates a token cycler from a credential, scopes, and optional settings. * * A token cycler represents a way to reliably retrieve a valid access token * from a TokenCredential. It will handle initializing the token, refreshing it * when it nears expiration, and synchronizes refresh attempts to avoid * concurrency hazards. * * @param credential - the underlying TokenCredential that provides the access * token * @param tokenCyclerOptions - optionally override default settings for the cycler * * @returns - a function that reliably produces a valid access token */ function createTokenCycler(credential, tokenCyclerOptions) { let refreshWorker = null; let token = null; let tenantId; const options = Object.assign(Object.assign({}, exports.DEFAULT_CYCLER_OPTIONS), tokenCyclerOptions); /** * This little holder defines several predicates that we use to construct * the rules of refreshing the token. */ const cycler = { /** * Produces true if a refresh job is currently in progress. */ get isRefreshing() { return refreshWorker !== null; }, /** * Produces true if the cycler SHOULD refresh (we are within the refresh * window and not already refreshing) */ get shouldRefresh() { var _a; if (cycler.isRefreshing) { return false; } if ((token === null || token === void 0 ? void 0 : token.refreshAfterTimestamp) && token.refreshAfterTimestamp < Date.now()) { return true; } return ((_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : 0) - options.refreshWindowInMs < Date.now(); }, /** * Produces true if the cycler MUST refresh (null or nearly-expired * token). */ get mustRefresh() { return (token === null || token.expiresOnTimestamp - options.forcedRefreshWindowInMs < Date.now()); }, }; /** * Starts a refresh job or returns the existing job if one is already * running. */ function refresh(scopes, getTokenOptions) { var _a; if (!cycler.isRefreshing) { // We bind `scopes` here to avoid passing it around a lot const tryGetAccessToken = () => credential.getToken(scopes, getTokenOptions); // Take advantage of promise chaining to insert an assignment to `token` // before the refresh can be considered done. refreshWorker = beginRefresh(tryGetAccessToken, options.retryIntervalInMs, // If we don't have a token, then we should timeout immediately (_a = token === null || token === void 0 ? void 0 : token.expiresOnTimestamp) !== null && _a !== void 0 ? _a : Date.now()) .then((_token) => { refreshWorker = null; token = _token; tenantId = getTokenOptions.tenantId; return token; }) .catch((reason) => { // We also should reset the refresher if we enter a failed state. All // existing awaiters will throw, but subsequent requests will start a // new retry chain. refreshWorker = null; token = null; tenantId = undefined; throw reason; }); } return refreshWorker; } return async (scopes, tokenOptions) => { // // Simple rules: // - If we MUST refresh, then return the refresh task, blocking // the pipeline until a token is available. // - If we SHOULD refresh, then run refresh but don't return it // (we can still use the cached token). // - Return the token, since it's fine if we didn't return in // step 1. // const hasClaimChallenge = Boolean(tokenOptions.claims); const tenantIdChanged = tenantId !== tokenOptions.tenantId; if (hasClaimChallenge) { // If we've received a claim, we know the existing token isn't valid // We want to clear it so that that refresh worker won't use the old expiration time as a timeout token = null; } // If the tenantId passed in token options is different to the one we have // Or if we are in claim challenge and the token was rejected and a new access token need to be issued, we need to // refresh the token with the new tenantId or token. const mustRefresh = tenantIdChanged || hasClaimChallenge || cycler.mustRefresh; if (mustRefresh) { return refresh(scopes, tokenOptions); } if (cycler.shouldRefresh) { refresh(scopes, tokenOptions); } return token; }; } //# sourceMappingURL=tokenCycler.js.map /***/ }), /***/ 22621: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isNodeReadableStream = isNodeReadableStream; exports.isWebReadableStream = isWebReadableStream; exports.isReadableStream = isReadableStream; exports.isBlob = isBlob; function isNodeReadableStream(x) { return Boolean(x && typeof x["pipe"] === "function"); } function isWebReadableStream(x) { return Boolean(x && typeof x.getReader === "function" && typeof x.tee === "function"); } function isReadableStream(x) { return isNodeReadableStream(x) || isWebReadableStream(x); } function isBlob(x) { return typeof x.stream === "function"; } //# sourceMappingURL=typeGuards.js.map /***/ }), /***/ 28431: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getUserAgentHeaderName = getUserAgentHeaderName; exports.getUserAgentValue = getUserAgentValue; const userAgentPlatform_js_1 = __nccwpck_require__(31848); const constants_js_1 = __nccwpck_require__(66427); function getUserAgentString(telemetryInfo) { const parts = []; for (const [key, value] of telemetryInfo) { const token = value ? `${key}/${value}` : key; parts.push(token); } return parts.join(" "); } /** * @internal */ function getUserAgentHeaderName() { return (0, userAgentPlatform_js_1.getHeaderName)(); } /** * @internal */ async function getUserAgentValue(prefix) { const runtimeInfo = new Map(); runtimeInfo.set("core-rest-pipeline", constants_js_1.SDK_VERSION); await (0, userAgentPlatform_js_1.setPlatformSpecificData)(runtimeInfo); const defaultAgent = getUserAgentString(runtimeInfo); const userAgentValue = prefix ? `${prefix} ${defaultAgent}` : defaultAgent; return userAgentValue; } //# sourceMappingURL=userAgent.js.map /***/ }), /***/ 31848: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getHeaderName = getHeaderName; exports.setPlatformSpecificData = setPlatformSpecificData; const tslib_1 = __nccwpck_require__(61860); const os = tslib_1.__importStar(__nccwpck_require__(48161)); const process = tslib_1.__importStar(__nccwpck_require__(1708)); /** * @internal */ function getHeaderName() { return "User-Agent"; } /** * @internal */ async function setPlatformSpecificData(map) { if (process && process.versions) { const versions = process.versions; if (versions.bun) { map.set("Bun", versions.bun); } else if (versions.deno) { map.set("Deno", versions.deno); } else if (versions.node) { map.set("Node", versions.node); } } map.set("OS", `(${os.arch()}-${os.type()}-${os.release()})`); } //# sourceMappingURL=userAgentPlatform.js.map /***/ }), /***/ 45455: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ class AbortError extends Error { constructor(message) { super(message); this.name = "AbortError"; } } exports.AbortError = AbortError; //# sourceMappingURL=AbortError.js.map /***/ }), /***/ 93287: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; var AbortError_js_1 = __nccwpck_require__(45455); Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 20623: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTracingClient = exports.useInstrumenter = void 0; var instrumenter_js_1 = __nccwpck_require__(48729); Object.defineProperty(exports, "useInstrumenter", ({ enumerable: true, get: function () { return instrumenter_js_1.useInstrumenter; } })); var tracingClient_js_1 = __nccwpck_require__(71057); Object.defineProperty(exports, "createTracingClient", ({ enumerable: true, get: function () { return tracingClient_js_1.createTracingClient; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 48729: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createDefaultTracingSpan = createDefaultTracingSpan; exports.createDefaultInstrumenter = createDefaultInstrumenter; exports.useInstrumenter = useInstrumenter; exports.getInstrumenter = getInstrumenter; const tracingContext_js_1 = __nccwpck_require__(79186); const state_js_1 = __nccwpck_require__(38914); function createDefaultTracingSpan() { return { end: () => { // noop }, isRecording: () => false, recordException: () => { // noop }, setAttribute: () => { // noop }, setStatus: () => { // noop }, addEvent: () => { // noop }, }; } function createDefaultInstrumenter() { return { createRequestHeaders: () => { return {}; }, parseTraceparentHeader: () => { return undefined; }, startSpan: (_name, spanOptions) => { return { span: createDefaultTracingSpan(), tracingContext: (0, tracingContext_js_1.createTracingContext)({ parentContext: spanOptions.tracingContext }), }; }, withContext(_context, callback, ...callbackArgs) { return callback(...callbackArgs); }, }; } /** * Extends the Azure SDK with support for a given instrumenter implementation. * * @param instrumenter - The instrumenter implementation to use. */ function useInstrumenter(instrumenter) { state_js_1.state.instrumenterImplementation = instrumenter; } /** * Gets the currently set instrumenter, a No-Op instrumenter by default. * * @returns The currently set instrumenter */ function getInstrumenter() { if (!state_js_1.state.instrumenterImplementation) { state_js_1.state.instrumenterImplementation = createDefaultInstrumenter(); } return state_js_1.state.instrumenterImplementation; } //# sourceMappingURL=instrumenter.js.map /***/ }), /***/ 38914: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.state = void 0; /** * @internal * * Holds the singleton instrumenter, to be shared across CJS and ESM imports. */ exports.state = { instrumenterImplementation: undefined, }; //# sourceMappingURL=state-cjs.cjs.map /***/ }), /***/ 71057: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createTracingClient = createTracingClient; const instrumenter_js_1 = __nccwpck_require__(48729); const tracingContext_js_1 = __nccwpck_require__(79186); /** * Creates a new tracing client. * * @param options - Options used to configure the tracing client. * @returns - An instance of {@link TracingClient}. */ function createTracingClient(options) { const { namespace, packageName, packageVersion } = options; function startSpan(name, operationOptions, spanOptions) { var _a; const startSpanResult = (0, instrumenter_js_1.getInstrumenter)().startSpan(name, Object.assign(Object.assign({}, spanOptions), { packageName: packageName, packageVersion: packageVersion, tracingContext: (_a = operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions) === null || _a === void 0 ? void 0 : _a.tracingContext })); let tracingContext = startSpanResult.tracingContext; const span = startSpanResult.span; if (!tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)) { tracingContext = tracingContext.setValue(tracingContext_js_1.knownContextKeys.namespace, namespace); } span.setAttribute("az.namespace", tracingContext.getValue(tracingContext_js_1.knownContextKeys.namespace)); const updatedOptions = Object.assign({}, operationOptions, { tracingOptions: Object.assign(Object.assign({}, operationOptions === null || operationOptions === void 0 ? void 0 : operationOptions.tracingOptions), { tracingContext }), }); return { span, updatedOptions, }; } async function withSpan(name, operationOptions, callback, spanOptions) { const { span, updatedOptions } = startSpan(name, operationOptions, spanOptions); try { const result = await withContext(updatedOptions.tracingOptions.tracingContext, () => Promise.resolve(callback(updatedOptions, span))); span.setStatus({ status: "success" }); return result; } catch (err) { span.setStatus({ status: "error", error: err }); throw err; } finally { span.end(); } } function withContext(context, callback, ...callbackArgs) { return (0, instrumenter_js_1.getInstrumenter)().withContext(context, callback, ...callbackArgs); } /** * Parses a traceparent header value into a span identifier. * * @param traceparentHeader - The traceparent header to parse. * @returns An implementation-specific identifier for the span. */ function parseTraceparentHeader(traceparentHeader) { return (0, instrumenter_js_1.getInstrumenter)().parseTraceparentHeader(traceparentHeader); } /** * Creates a set of request headers to propagate tracing information to a backend. * * @param tracingContext - The context containing the span to serialize. * @returns The set of headers to add to a request. */ function createRequestHeaders(tracingContext) { return (0, instrumenter_js_1.getInstrumenter)().createRequestHeaders(tracingContext); } return { startSpan, withSpan, withContext, parseTraceparentHeader, createRequestHeaders, }; } //# sourceMappingURL=tracingClient.js.map /***/ }), /***/ 79186: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TracingContextImpl = exports.knownContextKeys = void 0; exports.createTracingContext = createTracingContext; /** @internal */ exports.knownContextKeys = { span: Symbol.for("@azure/core-tracing span"), namespace: Symbol.for("@azure/core-tracing namespace"), }; /** * Creates a new {@link TracingContext} with the given options. * @param options - A set of known keys that may be set on the context. * @returns A new {@link TracingContext} with the given options. * * @internal */ function createTracingContext(options = {}) { let context = new TracingContextImpl(options.parentContext); if (options.span) { context = context.setValue(exports.knownContextKeys.span, options.span); } if (options.namespace) { context = context.setValue(exports.knownContextKeys.namespace, options.namespace); } return context; } /** @internal */ class TracingContextImpl { constructor(initialContext) { this._contextMap = initialContext instanceof TracingContextImpl ? new Map(initialContext._contextMap) : new Map(); } setValue(key, value) { const newContext = new TracingContextImpl(this); newContext._contextMap.set(key, value); return newContext; } getValue(key) { return this._contextMap.get(key); } deleteValue(key) { const newContext = new TracingContextImpl(this); newContext._contextMap.delete(key); return newContext; } } exports.TracingContextImpl = TracingContextImpl; //# sourceMappingURL=tracingContext.js.map /***/ }), /***/ 95209: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.cancelablePromiseRace = cancelablePromiseRace; /** * promise.race() wrapper that aborts rest of promises as soon as the first promise settles. */ async function cancelablePromiseRace(abortablePromiseBuilders, options) { var _a, _b; const aborter = new AbortController(); function abortHandler() { aborter.abort(); } (_a = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _a === void 0 ? void 0 : _a.addEventListener("abort", abortHandler); try { return await Promise.race(abortablePromiseBuilders.map((p) => p({ abortSignal: aborter.signal }))); } finally { aborter.abort(); (_b = options === null || options === void 0 ? void 0 : options.abortSignal) === null || _b === void 0 ? void 0 : _b.removeEventListener("abort", abortHandler); } } //# sourceMappingURL=aborterUtils.js.map /***/ }), /***/ 2741: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.uint8ArrayToString = uint8ArrayToString; exports.stringToUint8Array = stringToUint8Array; /** * The helper that transforms bytes with specific character encoding into string * @param bytes - the uint8array bytes * @param format - the format we use to encode the byte * @returns a string of the encoded string */ function uint8ArrayToString(bytes, format) { return Buffer.from(bytes).toString(format); } /** * The helper that transforms string to specific character encoded bytes array. * @param value - the string to be converted * @param format - the format we use to decode the value * @returns a uint8array */ function stringToUint8Array(value, format) { return Buffer.from(value, format); } //# sourceMappingURL=bytesEncoding.js.map /***/ }), /***/ 8162: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. var _a, _b, _c, _d; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isReactNative = exports.isNodeRuntime = exports.isNode = exports.isNodeLike = exports.isBun = exports.isDeno = exports.isWebWorker = exports.isBrowser = void 0; /** * A constant that indicates whether the environment the code is running is a Web Browser. */ // eslint-disable-next-line @azure/azure-sdk/ts-no-window exports.isBrowser = typeof window !== "undefined" && typeof window.document !== "undefined"; /** * A constant that indicates whether the environment the code is running is a Web Worker. */ exports.isWebWorker = typeof self === "object" && typeof (self === null || self === void 0 ? void 0 : self.importScripts) === "function" && (((_a = self.constructor) === null || _a === void 0 ? void 0 : _a.name) === "DedicatedWorkerGlobalScope" || ((_b = self.constructor) === null || _b === void 0 ? void 0 : _b.name) === "ServiceWorkerGlobalScope" || ((_c = self.constructor) === null || _c === void 0 ? void 0 : _c.name) === "SharedWorkerGlobalScope"); /** * A constant that indicates whether the environment the code is running is Deno. */ exports.isDeno = typeof Deno !== "undefined" && typeof Deno.version !== "undefined" && typeof Deno.version.deno !== "undefined"; /** * A constant that indicates whether the environment the code is running is Bun.sh. */ exports.isBun = typeof Bun !== "undefined" && typeof Bun.version !== "undefined"; /** * A constant that indicates whether the environment the code is running is a Node.js compatible environment. */ exports.isNodeLike = typeof globalThis.process !== "undefined" && Boolean(globalThis.process.version) && Boolean((_d = globalThis.process.versions) === null || _d === void 0 ? void 0 : _d.node); /** * A constant that indicates whether the environment the code is running is a Node.js compatible environment. * @deprecated Use `isNodeLike` instead. */ exports.isNode = exports.isNodeLike; /** * A constant that indicates whether the environment the code is running is Node.JS. */ exports.isNodeRuntime = exports.isNodeLike && !exports.isBun && !exports.isDeno; /** * A constant that indicates whether the environment the code is running is in React-Native. */ // https://github.com/facebook/react-native/blob/main/packages/react-native/Libraries/Core/setUpNavigator.js exports.isReactNative = typeof navigator !== "undefined" && (navigator === null || navigator === void 0 ? void 0 : navigator.product) === "ReactNative"; //# sourceMappingURL=checkEnvironment.js.map /***/ }), /***/ 63128: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.createAbortablePromise = createAbortablePromise; const abort_controller_1 = __nccwpck_require__(16492); /** * Creates an abortable promise. * @param buildPromise - A function that takes the resolve and reject functions as parameters. * @param options - The options for the abortable promise. * @returns A promise that can be aborted. */ function createAbortablePromise(buildPromise, options) { const { cleanupBeforeAbort, abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return new Promise((resolve, reject) => { function rejectOnAbort() { reject(new abort_controller_1.AbortError(abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : "The operation was aborted.")); } function removeListeners() { abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.removeEventListener("abort", onAbort); } function onAbort() { cleanupBeforeAbort === null || cleanupBeforeAbort === void 0 ? void 0 : cleanupBeforeAbort(); removeListeners(); rejectOnAbort(); } if (abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.aborted) { return rejectOnAbort(); } try { buildPromise((x) => { removeListeners(); resolve(x); }, (x) => { removeListeners(); reject(x); }); } catch (err) { reject(err); } abortSignal === null || abortSignal === void 0 ? void 0 : abortSignal.addEventListener("abort", onAbort); }); } //# sourceMappingURL=createAbortablePromise.js.map /***/ }), /***/ 90636: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.delay = delay; exports.calculateRetryDelay = calculateRetryDelay; const createAbortablePromise_js_1 = __nccwpck_require__(63128); const random_js_1 = __nccwpck_require__(64196); const StandardAbortMessage = "The delay was aborted."; /** * A wrapper for setTimeout that resolves a promise after timeInMs milliseconds. * @param timeInMs - The number of milliseconds to be delayed. * @param options - The options for delay - currently abort options * @returns Promise that is resolved after timeInMs */ function delay(timeInMs, options) { let token; const { abortSignal, abortErrorMsg } = options !== null && options !== void 0 ? options : {}; return (0, createAbortablePromise_js_1.createAbortablePromise)((resolve) => { token = setTimeout(resolve, timeInMs); }, { cleanupBeforeAbort: () => clearTimeout(token), abortSignal, abortErrorMsg: abortErrorMsg !== null && abortErrorMsg !== void 0 ? abortErrorMsg : StandardAbortMessage, }); } /** * Calculates the delay interval for retry attempts using exponential delay with jitter. * @param retryAttempt - The current retry attempt number. * @param config - The exponential retry configuration. * @returns An object containing the calculated retry delay. */ function calculateRetryDelay(retryAttempt, config) { // Exponentially increase the delay each time const exponentialDelay = config.retryDelayInMs * Math.pow(2, retryAttempt); // Don't let the delay exceed the maximum const clampedDelay = Math.min(config.maxRetryDelayInMs, exponentialDelay); // Allow the final value to have some "jitter" (within 50% of the delay size) so // that retries across multiple clients don't occur simultaneously. const retryAfterInMs = clampedDelay / 2 + (0, random_js_1.getRandomIntegerInclusive)(0, clampedDelay / 2); return { retryAfterInMs }; } //# sourceMappingURL=delay.js.map /***/ }), /***/ 99945: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isError = isError; exports.getErrorMessage = getErrorMessage; const object_js_1 = __nccwpck_require__(17756); /** * Typeguard for an error object shape (has name and message) * @param e - Something caught by a catch clause. */ function isError(e) { if ((0, object_js_1.isObject)(e)) { const hasName = typeof e.name === "string"; const hasMessage = typeof e.message === "string"; return hasName && hasMessage; } return false; } /** * Given what is thought to be an error object, return the message if possible. * If the message is missing, returns a stringified version of the input. * @param e - Something thrown from a try block * @returns The error message or a string of the input */ function getErrorMessage(e) { if (isError(e)) { return e.message; } else { let stringified; try { if (typeof e === "object" && e) { stringified = JSON.stringify(e); } else { stringified = String(e); } } catch (err) { stringified = "[unable to stringify input]"; } return `Unknown error ${stringified}`; } } //# sourceMappingURL=error.js.map /***/ }), /***/ 87779: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.stringToUint8Array = exports.uint8ArrayToString = exports.isWebWorker = exports.isReactNative = exports.isDeno = exports.isNodeRuntime = exports.isNodeLike = exports.isNode = exports.isBun = exports.isBrowser = exports.randomUUID = exports.objectHasProperty = exports.isObjectWithProperties = exports.isDefined = exports.computeSha256Hmac = exports.computeSha256Hash = exports.getErrorMessage = exports.isError = exports.isObject = exports.getRandomIntegerInclusive = exports.createAbortablePromise = exports.cancelablePromiseRace = exports.calculateRetryDelay = exports.delay = void 0; var delay_js_1 = __nccwpck_require__(90636); Object.defineProperty(exports, "delay", ({ enumerable: true, get: function () { return delay_js_1.delay; } })); Object.defineProperty(exports, "calculateRetryDelay", ({ enumerable: true, get: function () { return delay_js_1.calculateRetryDelay; } })); var aborterUtils_js_1 = __nccwpck_require__(95209); Object.defineProperty(exports, "cancelablePromiseRace", ({ enumerable: true, get: function () { return aborterUtils_js_1.cancelablePromiseRace; } })); var createAbortablePromise_js_1 = __nccwpck_require__(63128); Object.defineProperty(exports, "createAbortablePromise", ({ enumerable: true, get: function () { return createAbortablePromise_js_1.createAbortablePromise; } })); var random_js_1 = __nccwpck_require__(64196); Object.defineProperty(exports, "getRandomIntegerInclusive", ({ enumerable: true, get: function () { return random_js_1.getRandomIntegerInclusive; } })); var object_js_1 = __nccwpck_require__(17756); Object.defineProperty(exports, "isObject", ({ enumerable: true, get: function () { return object_js_1.isObject; } })); var error_js_1 = __nccwpck_require__(99945); Object.defineProperty(exports, "isError", ({ enumerable: true, get: function () { return error_js_1.isError; } })); Object.defineProperty(exports, "getErrorMessage", ({ enumerable: true, get: function () { return error_js_1.getErrorMessage; } })); var sha256_js_1 = __nccwpck_require__(19732); Object.defineProperty(exports, "computeSha256Hash", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hash; } })); Object.defineProperty(exports, "computeSha256Hmac", ({ enumerable: true, get: function () { return sha256_js_1.computeSha256Hmac; } })); var typeGuards_js_1 = __nccwpck_require__(66277); Object.defineProperty(exports, "isDefined", ({ enumerable: true, get: function () { return typeGuards_js_1.isDefined; } })); Object.defineProperty(exports, "isObjectWithProperties", ({ enumerable: true, get: function () { return typeGuards_js_1.isObjectWithProperties; } })); Object.defineProperty(exports, "objectHasProperty", ({ enumerable: true, get: function () { return typeGuards_js_1.objectHasProperty; } })); var uuidUtils_js_1 = __nccwpck_require__(98795); Object.defineProperty(exports, "randomUUID", ({ enumerable: true, get: function () { return uuidUtils_js_1.randomUUID; } })); var checkEnvironment_js_1 = __nccwpck_require__(8162); Object.defineProperty(exports, "isBrowser", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBrowser; } })); Object.defineProperty(exports, "isBun", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isBun; } })); Object.defineProperty(exports, "isNode", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNode; } })); Object.defineProperty(exports, "isNodeLike", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeLike; } })); Object.defineProperty(exports, "isNodeRuntime", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isNodeRuntime; } })); Object.defineProperty(exports, "isDeno", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isDeno; } })); Object.defineProperty(exports, "isReactNative", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isReactNative; } })); Object.defineProperty(exports, "isWebWorker", ({ enumerable: true, get: function () { return checkEnvironment_js_1.isWebWorker; } })); var bytesEncoding_js_1 = __nccwpck_require__(2741); Object.defineProperty(exports, "uint8ArrayToString", ({ enumerable: true, get: function () { return bytesEncoding_js_1.uint8ArrayToString; } })); Object.defineProperty(exports, "stringToUint8Array", ({ enumerable: true, get: function () { return bytesEncoding_js_1.stringToUint8Array; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 17756: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isObject = isObject; /** * Helper to determine when an input is a generic JS object. * @returns true when input is an object type that is not null, Array, RegExp, or Date. */ function isObject(input) { return (typeof input === "object" && input !== null && !Array.isArray(input) && !(input instanceof RegExp) && !(input instanceof Date)); } //# sourceMappingURL=object.js.map /***/ }), /***/ 64196: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.getRandomIntegerInclusive = getRandomIntegerInclusive; /** * Returns a random integer value between a lower and upper bound, * inclusive of both bounds. * Note that this uses Math.random and isn't secure. If you need to use * this for any kind of security purpose, find a better source of random. * @param min - The smallest integer value allowed. * @param max - The largest integer value allowed. */ function getRandomIntegerInclusive(min, max) { // Make sure inputs are integers. min = Math.ceil(min); max = Math.floor(max); // Pick a random offset from zero to the size of the range. // Since Math.random() can never return 1, we have to make the range one larger // in order to be inclusive of the maximum value after we take the floor. const offset = Math.floor(Math.random() * (max - min + 1)); return offset + min; } //# sourceMappingURL=random.js.map /***/ }), /***/ 19732: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.computeSha256Hmac = computeSha256Hmac; exports.computeSha256Hash = computeSha256Hash; const crypto_1 = __nccwpck_require__(76982); /** * Generates a SHA-256 HMAC signature. * @param key - The HMAC key represented as a base64 string, used to generate the cryptographic HMAC hash. * @param stringToSign - The data to be signed. * @param encoding - The textual encoding to use for the returned HMAC digest. */ async function computeSha256Hmac(key, stringToSign, encoding) { const decodedKey = Buffer.from(key, "base64"); return (0, crypto_1.createHmac)("sha256", decodedKey).update(stringToSign).digest(encoding); } /** * Generates a SHA-256 hash. * @param content - The data to be included in the hash. * @param encoding - The textual encoding to use for the returned hash. */ async function computeSha256Hash(content, encoding) { return (0, crypto_1.createHash)("sha256").update(content).digest(encoding); } //# sourceMappingURL=sha256.js.map /***/ }), /***/ 66277: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.isDefined = isDefined; exports.isObjectWithProperties = isObjectWithProperties; exports.objectHasProperty = objectHasProperty; /** * Helper TypeGuard that checks if something is defined or not. * @param thing - Anything */ function isDefined(thing) { return typeof thing !== "undefined" && thing !== null; } /** * Helper TypeGuard that checks if the input is an object with the specified properties. * @param thing - Anything. * @param properties - The name of the properties that should appear in the object. */ function isObjectWithProperties(thing, properties) { if (!isDefined(thing) || typeof thing !== "object") { return false; } for (const property of properties) { if (!objectHasProperty(thing, property)) { return false; } } return true; } /** * Helper TypeGuard that checks if the input is an object with the specified property. * @param thing - Any object. * @param property - The name of the property that should appear in the object. */ function objectHasProperty(thing, property) { return (isDefined(thing) && typeof thing === "object" && property in thing); } //# sourceMappingURL=typeGuards.js.map /***/ }), /***/ 98795: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. var _a; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.randomUUID = randomUUID; const crypto_1 = __nccwpck_require__(76982); // NOTE: This is a workaround until we can use `globalThis.crypto.randomUUID` in Node.js 19+. const uuidFunction = typeof ((_a = globalThis === null || globalThis === void 0 ? void 0 : globalThis.crypto) === null || _a === void 0 ? void 0 : _a.randomUUID) === "function" ? globalThis.crypto.randomUUID.bind(globalThis.crypto) : crypto_1.randomUUID; /** * Generated Universally Unique Identifier * * @returns RFC4122 v4 UUID. */ function randomUUID() { return uuidFunction(); } //# sourceMappingURL=uuidUtils.js.map /***/ }), /***/ 31658: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ class AbortError extends Error { constructor(message) { super(message); this.name = "AbortError"; } } exports.AbortError = AbortError; //# sourceMappingURL=AbortError.js.map /***/ }), /***/ 16492: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; var AbortError_js_1 = __nccwpck_require__(31658); Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 78756: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.XML_CHARKEY = exports.XML_ATTRKEY = exports.parseXML = exports.stringifyXML = void 0; var xml_js_1 = __nccwpck_require__(48133); Object.defineProperty(exports, "stringifyXML", ({ enumerable: true, get: function () { return xml_js_1.stringifyXML; } })); Object.defineProperty(exports, "parseXML", ({ enumerable: true, get: function () { return xml_js_1.parseXML; } })); var xml_common_js_1 = __nccwpck_require__(93406); Object.defineProperty(exports, "XML_ATTRKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_ATTRKEY; } })); Object.defineProperty(exports, "XML_CHARKEY", ({ enumerable: true, get: function () { return xml_common_js_1.XML_CHARKEY; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 93406: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.XML_CHARKEY = exports.XML_ATTRKEY = void 0; /** * Default key used to access the XML attributes. */ exports.XML_ATTRKEY = "$"; /** * Default key used to access the XML value content. */ exports.XML_CHARKEY = "_"; //# sourceMappingURL=xml.common.js.map /***/ }), /***/ 48133: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.stringifyXML = stringifyXML; exports.parseXML = parseXML; const fast_xml_parser_1 = __nccwpck_require__(39741); const xml_common_js_1 = __nccwpck_require__(93406); function getCommonOptions(options) { var _a; return { attributesGroupName: xml_common_js_1.XML_ATTRKEY, textNodeName: (_a = options.xmlCharKey) !== null && _a !== void 0 ? _a : xml_common_js_1.XML_CHARKEY, ignoreAttributes: false, suppressBooleanAttributes: false, }; } function getSerializerOptions(options = {}) { var _a, _b; return Object.assign(Object.assign({}, getCommonOptions(options)), { attributeNamePrefix: "@_", format: true, suppressEmptyNode: true, indentBy: "", rootNodeName: (_a = options.rootName) !== null && _a !== void 0 ? _a : "root", cdataPropName: (_b = options.cdataPropName) !== null && _b !== void 0 ? _b : "__cdata" }); } function getParserOptions(options = {}) { return Object.assign(Object.assign({}, getCommonOptions(options)), { parseAttributeValue: false, parseTagValue: false, attributeNamePrefix: "", stopNodes: options.stopNodes, processEntities: true }); } /** * Converts given JSON object to XML string * @param obj - JSON object to be converted into XML string * @param opts - Options that govern the XML building of given JSON object * `rootName` indicates the name of the root element in the resulting XML */ function stringifyXML(obj, opts = {}) { const parserOptions = getSerializerOptions(opts); const j2x = new fast_xml_parser_1.XMLBuilder(parserOptions); const node = { [parserOptions.rootNodeName]: obj }; const xmlData = j2x.build(node); return `${xmlData}`.replace(/\n/g, ""); } /** * Converts given XML string into JSON * @param str - String containing the XML content to be parsed into JSON * @param opts - Options that govern the parsing of given xml string * `includeRoot` indicates whether the root element is to be included or not in the output */ async function parseXML(str, opts = {}) { if (!str) { throw new Error("Document is empty"); } const validation = fast_xml_parser_1.XMLValidator.validate(str); if (validation !== true) { throw validation; } const parser = new fast_xml_parser_1.XMLParser(getParserOptions(opts)); const parsedXml = parser.parse(str); // Remove the node. // This is a change in behavior on fxp v4. Issue #424 if (parsedXml["?xml"]) { delete parsedXml["?xml"]; } if (!opts.includeRoot) { for (const key of Object.keys(parsedXml)) { const value = parsedXml[key]; return typeof value === "object" ? Object.assign({}, value) : value; } } return parsedXml; } //# sourceMappingURL=xml.js.map /***/ }), /***/ 1676: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); const log_js_1 = __nccwpck_require__(36757); const debugEnvVariable = (typeof process !== "undefined" && process.env && process.env.DEBUG) || undefined; let enabledString; let enabledNamespaces = []; let skippedNamespaces = []; const debuggers = []; if (debugEnvVariable) { enable(debugEnvVariable); } const debugObj = Object.assign((namespace) => { return createDebugger(namespace); }, { enable, enabled, disable, log: log_js_1.log, }); function enable(namespaces) { enabledString = namespaces; enabledNamespaces = []; skippedNamespaces = []; const wildcard = /\*/g; const namespaceList = namespaces.split(",").map((ns) => ns.trim().replace(wildcard, ".*?")); for (const ns of namespaceList) { if (ns.startsWith("-")) { skippedNamespaces.push(new RegExp(`^${ns.substr(1)}$`)); } else { enabledNamespaces.push(new RegExp(`^${ns}$`)); } } for (const instance of debuggers) { instance.enabled = enabled(instance.namespace); } } function enabled(namespace) { if (namespace.endsWith("*")) { return true; } for (const skipped of skippedNamespaces) { if (skipped.test(namespace)) { return false; } } for (const enabledNamespace of enabledNamespaces) { if (enabledNamespace.test(namespace)) { return true; } } return false; } function disable() { const result = enabledString || ""; enable(""); return result; } function createDebugger(namespace) { const newDebugger = Object.assign(debug, { enabled: enabled(namespace), destroy, log: debugObj.log, namespace, extend, }); function debug(...args) { if (!newDebugger.enabled) { return; } if (args.length > 0) { args[0] = `${namespace} ${args[0]}`; } newDebugger.log(...args); } debuggers.push(newDebugger); return newDebugger; } function destroy() { const index = debuggers.indexOf(this); if (index >= 0) { debuggers.splice(index, 1); return true; } return false; } function extend(namespace) { const newDebugger = createDebugger(`${this.namespace}:${namespace}`); newDebugger.log = this.log; return newDebugger; } exports["default"] = debugObj; //# sourceMappingURL=debug.js.map /***/ }), /***/ 26515: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AzureLogger = void 0; exports.setLogLevel = setLogLevel; exports.getLogLevel = getLogLevel; exports.createClientLogger = createClientLogger; const tslib_1 = __nccwpck_require__(61860); const debug_js_1 = tslib_1.__importDefault(__nccwpck_require__(1676)); const registeredLoggers = new Set(); const logLevelFromEnv = (typeof process !== "undefined" && process.env && process.env.AZURE_LOG_LEVEL) || undefined; let azureLogLevel; /** * The AzureLogger provides a mechanism for overriding where logs are output to. * By default, logs are sent to stderr. * Override the `log` method to redirect logs to another location. */ exports.AzureLogger = (0, debug_js_1.default)("azure"); exports.AzureLogger.log = (...args) => { debug_js_1.default.log(...args); }; const AZURE_LOG_LEVELS = ["verbose", "info", "warning", "error"]; if (logLevelFromEnv) { // avoid calling setLogLevel because we don't want a mis-set environment variable to crash if (isAzureLogLevel(logLevelFromEnv)) { setLogLevel(logLevelFromEnv); } else { console.error(`AZURE_LOG_LEVEL set to unknown log level '${logLevelFromEnv}'; logging is not enabled. Acceptable values: ${AZURE_LOG_LEVELS.join(", ")}.`); } } /** * Immediately enables logging at the specified log level. If no level is specified, logging is disabled. * @param level - The log level to enable for logging. * Options from most verbose to least verbose are: * - verbose * - info * - warning * - error */ function setLogLevel(level) { if (level && !isAzureLogLevel(level)) { throw new Error(`Unknown log level '${level}'. Acceptable values: ${AZURE_LOG_LEVELS.join(",")}`); } azureLogLevel = level; const enabledNamespaces = []; for (const logger of registeredLoggers) { if (shouldEnable(logger)) { enabledNamespaces.push(logger.namespace); } } debug_js_1.default.enable(enabledNamespaces.join(",")); } /** * Retrieves the currently specified log level. */ function getLogLevel() { return azureLogLevel; } const levelMap = { verbose: 400, info: 300, warning: 200, error: 100, }; /** * Creates a logger for use by the Azure SDKs that inherits from `AzureLogger`. * @param namespace - The name of the SDK package. * @hidden */ function createClientLogger(namespace) { const clientRootLogger = exports.AzureLogger.extend(namespace); patchLogMethod(exports.AzureLogger, clientRootLogger); return { error: createLogger(clientRootLogger, "error"), warning: createLogger(clientRootLogger, "warning"), info: createLogger(clientRootLogger, "info"), verbose: createLogger(clientRootLogger, "verbose"), }; } function patchLogMethod(parent, child) { child.log = (...args) => { parent.log(...args); }; } function createLogger(parent, level) { const logger = Object.assign(parent.extend(level), { level, }); patchLogMethod(parent, logger); if (shouldEnable(logger)) { const enabledNamespaces = debug_js_1.default.disable(); debug_js_1.default.enable(enabledNamespaces + "," + logger.namespace); } registeredLoggers.add(logger); return logger; } function shouldEnable(logger) { return Boolean(azureLogLevel && levelMap[logger.level] <= levelMap[azureLogLevel]); } function isAzureLogLevel(logLevel) { return AZURE_LOG_LEVELS.includes(logLevel); } //# sourceMappingURL=index.js.map /***/ }), /***/ 36757: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.log = log; const tslib_1 = __nccwpck_require__(61860); const node_os_1 = __nccwpck_require__(48161); const node_util_1 = tslib_1.__importDefault(__nccwpck_require__(57975)); const process = tslib_1.__importStar(__nccwpck_require__(1708)); function log(message, ...args) { process.stderr.write(`${node_util_1.default.format(message, ...args)}${node_os_1.EOL}`); } //# sourceMappingURL=log.js.map /***/ }), /***/ 24841: /***/ ((__unused_webpack_module, exports) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; /** * This error is thrown when an asynchronous operation has been aborted. * Check for this error by testing the `name` that the name property of the * error matches `"AbortError"`. * * @example * ```ts * const controller = new AbortController(); * controller.abort(); * try { * doAsyncWork(controller.signal) * } catch (e) { * if (e.name === 'AbortError') { * // handle abort error here. * } * } * ``` */ class AbortError extends Error { constructor(message) { super(message); this.name = "AbortError"; } } exports.AbortError = AbortError; //# sourceMappingURL=AbortError.js.map /***/ }), /***/ 24517: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright (c) Microsoft Corporation. // Licensed under the MIT license. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AbortError = void 0; var AbortError_js_1 = __nccwpck_require__(24841); Object.defineProperty(exports, "AbortError", ({ enumerable: true, get: function () { return AbortError_js_1.AbortError; } })); //# sourceMappingURL=index.js.map /***/ }), /***/ 27182: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const WritableStream = (__nccwpck_require__(57075).Writable) const inherits = (__nccwpck_require__(57975).inherits) const StreamSearch = __nccwpck_require__(84136) const PartStream = __nccwpck_require__(50612) const HeaderParser = __nccwpck_require__(62271) const DASH = 45 const B_ONEDASH = Buffer.from('-') const B_CRLF = Buffer.from('\r\n') const EMPTY_FN = function () {} function Dicer (cfg) { if (!(this instanceof Dicer)) { return new Dicer(cfg) } WritableStream.call(this, cfg) if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } this._headerFirst = cfg.headerFirst this._dashes = 0 this._parts = 0 this._finished = false this._realFinish = false this._isPreamble = true this._justMatched = false this._firstWrite = true this._inHeader = true this._part = undefined this._cb = undefined this._ignoreData = false this._partOpts = { highWaterMark: cfg.partHwm } this._pause = false const self = this this._hparser = new HeaderParser(cfg) this._hparser.on('header', function (header) { self._inHeader = false self._part.emit('header', header) }) } inherits(Dicer, WritableStream) Dicer.prototype.emit = function (ev) { if (ev === 'finish' && !this._realFinish) { if (!this._finished) { const self = this process.nextTick(function () { self.emit('error', new Error('Unexpected end of multipart data')) if (self._part && !self._ignoreData) { const type = (self._isPreamble ? 'Preamble' : 'Part') self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) self._part.push(null) process.nextTick(function () { self._realFinish = true self.emit('finish') self._realFinish = false }) return } self._realFinish = true self.emit('finish') self._realFinish = false }) } } else { WritableStream.prototype.emit.apply(this, arguments) } } Dicer.prototype._write = function (data, encoding, cb) { // ignore unexpected data (e.g. extra trailer data after finished) if (!this._hparser && !this._bparser) { return cb() } if (this._headerFirst && this._isPreamble) { if (!this._part) { this._part = new PartStream(this._partOpts) if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } } const r = this._hparser.push(data) if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } } // allows for "easier" testing if (this._firstWrite) { this._bparser.push(B_CRLF) this._firstWrite = false } this._bparser.push(data) if (this._pause) { this._cb = cb } else { cb() } } Dicer.prototype.reset = function () { this._part = undefined this._bparser = undefined this._hparser = undefined } Dicer.prototype.setBoundary = function (boundary) { const self = this this._bparser = new StreamSearch('\r\n--' + boundary) this._bparser.on('info', function (isMatch, data, start, end) { self._oninfo(isMatch, data, start, end) }) } Dicer.prototype._ignore = function () { if (this._part && !this._ignoreData) { this._ignoreData = true this._part.on('error', EMPTY_FN) // we must perform some kind of read on the stream even though we are // ignoring the data, otherwise node's Readable stream will not emit 'end' // after pushing null to the stream this._part.resume() } } Dicer.prototype._oninfo = function (isMatch, data, start, end) { let buf; const self = this; let i = 0; let r; let shouldWriteMore = true if (!this._part && this._justMatched && data) { while (this._dashes < 2 && (start + i) < end) { if (data[start + i] === DASH) { ++i ++this._dashes } else { if (this._dashes) { buf = B_ONEDASH } this._dashes = 0 break } } if (this._dashes === 2) { if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } this.reset() this._finished = true // no more parts will be added if (self._parts === 0) { self._realFinish = true self.emit('finish') self._realFinish = false } } if (this._dashes) { return } } if (this._justMatched) { this._justMatched = false } if (!this._part) { this._part = new PartStream(this._partOpts) this._part._read = function (n) { self._unpause() } if (this._isPreamble && this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { this.emit('part', this._part) } else { this._ignore() } if (!this._isPreamble) { this._inHeader = true } } if (data && start < end && !this._ignoreData) { if (this._isPreamble || !this._inHeader) { if (buf) { shouldWriteMore = this._part.push(buf) } shouldWriteMore = this._part.push(data.slice(start, end)) if (!shouldWriteMore) { this._pause = true } } else if (!this._isPreamble && this._inHeader) { if (buf) { this._hparser.push(buf) } r = this._hparser.push(data.slice(start, end)) if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } } } if (isMatch) { this._hparser.reset() if (this._isPreamble) { this._isPreamble = false } else { if (start !== end) { ++this._parts this._part.on('end', function () { if (--self._parts === 0) { if (self._finished) { self._realFinish = true self.emit('finish') self._realFinish = false } else { self._unpause() } } }) } } this._part.push(null) this._part = undefined this._ignoreData = false this._justMatched = true this._dashes = 0 } } Dicer.prototype._unpause = function () { if (!this._pause) { return } this._pause = false if (this._cb) { const cb = this._cb this._cb = undefined cb() } } module.exports = Dicer /***/ }), /***/ 62271: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const EventEmitter = (__nccwpck_require__(78474).EventEmitter) const inherits = (__nccwpck_require__(57975).inherits) const getLimit = __nccwpck_require__(22393) const StreamSearch = __nccwpck_require__(84136) const B_DCRLF = Buffer.from('\r\n\r\n') const RE_CRLF = /\r\n/g const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex function HeaderParser (cfg) { EventEmitter.call(this) cfg = cfg || {} const self = this this.nread = 0 this.maxed = false this.npairs = 0 this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) this.buffer = '' this.header = {} this.finished = false this.ss = new StreamSearch(B_DCRLF) this.ss.on('info', function (isMatch, data, start, end) { if (data && !self.maxed) { if (self.nread + end - start >= self.maxHeaderSize) { end = self.maxHeaderSize - self.nread + start self.nread = self.maxHeaderSize self.maxed = true } else { self.nread += (end - start) } self.buffer += data.toString('binary', start, end) } if (isMatch) { self._finish() } }) } inherits(HeaderParser, EventEmitter) HeaderParser.prototype.push = function (data) { const r = this.ss.push(data) if (this.finished) { return r } } HeaderParser.prototype.reset = function () { this.finished = false this.buffer = '' this.header = {} this.ss.reset() } HeaderParser.prototype._finish = function () { if (this.buffer) { this._parseHeader() } this.ss.matches = this.ss.maxMatches const header = this.header this.header = {} this.buffer = '' this.finished = true this.nread = this.npairs = 0 this.maxed = false this.emit('header', header) } HeaderParser.prototype._parseHeader = function () { if (this.npairs === this.maxHeaderPairs) { return } const lines = this.buffer.split(RE_CRLF) const len = lines.length let m, h for (var i = 0; i < len; ++i) { // eslint-disable-line no-var if (lines[i].length === 0) { continue } if (lines[i][0] === '\t' || lines[i][0] === ' ') { // folded header content // RFC2822 says to just remove the CRLF and not the whitespace following // it, so we follow the RFC and include the leading whitespace ... if (h) { this.header[h][this.header[h].length - 1] += lines[i] continue } } const posColon = lines[i].indexOf(':') if ( posColon === -1 || posColon === 0 ) { return } m = RE_HDR.exec(lines[i]) h = m[1].toLowerCase() this.header[h] = this.header[h] || [] this.header[h].push((m[2] || '')) if (++this.npairs === this.maxHeaderPairs) { break } } } module.exports = HeaderParser /***/ }), /***/ 50612: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const inherits = (__nccwpck_require__(57975).inherits) const ReadableStream = (__nccwpck_require__(57075).Readable) function PartStream (opts) { ReadableStream.call(this, opts) } inherits(PartStream, ReadableStream) PartStream.prototype._read = function (n) {} module.exports = PartStream /***/ }), /***/ 84136: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /** * Copyright Brian White. All rights reserved. * * @see https://github.com/mscdex/streamsearch * * Permission is hereby granted, free of charge, to any person obtaining a copy * of this software and associated documentation files (the "Software"), to * deal in the Software without restriction, including without limitation the * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or * sell copies of the Software, and to permit persons to whom the Software is * furnished to do so, subject to the following conditions: * * The above copyright notice and this permission notice shall be included in * all copies or substantial portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS * IN THE SOFTWARE. * * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool */ const EventEmitter = (__nccwpck_require__(78474).EventEmitter) const inherits = (__nccwpck_require__(57975).inherits) function SBMH (needle) { if (typeof needle === 'string') { needle = Buffer.from(needle) } if (!Buffer.isBuffer(needle)) { throw new TypeError('The needle has to be a String or a Buffer.') } const needleLength = needle.length if (needleLength === 0) { throw new Error('The needle cannot be an empty String/Buffer.') } if (needleLength > 256) { throw new Error('The needle cannot have a length bigger than 256.') } this.maxMatches = Infinity this.matches = 0 this._occ = new Array(256) .fill(needleLength) // Initialize occurrence table. this._lookbehind_size = 0 this._needle = needle this._bufpos = 0 this._lookbehind = Buffer.alloc(needleLength) // Populate occurrence table with analysis of the needle, // ignoring last letter. for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var this._occ[needle[i]] = needleLength - 1 - i } } inherits(SBMH, EventEmitter) SBMH.prototype.reset = function () { this._lookbehind_size = 0 this.matches = 0 this._bufpos = 0 } SBMH.prototype.push = function (chunk, pos) { if (!Buffer.isBuffer(chunk)) { chunk = Buffer.from(chunk, 'binary') } const chlen = chunk.length this._bufpos = pos || 0 let r while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } return r } SBMH.prototype._sbmh_feed = function (data) { const len = data.length const needle = this._needle const needleLength = needle.length const lastNeedleChar = needle[needleLength - 1] // Positive: points to a position in `data` // pos == 3 points to data[3] // Negative: points to a position in the lookbehind buffer // pos == -2 points to lookbehind[lookbehind_size - 2] let pos = -this._lookbehind_size let ch if (pos < 0) { // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool // search with character lookup code that considers both the // lookbehind buffer and the current round's haystack data. // // Loop until // there is a match. // or until // we've moved past the position that requires the // lookbehind buffer. In this case we switch to the // optimized loop. // or until // the character to look at lies outside the haystack. while (pos < 0 && pos <= len - needleLength) { ch = this._sbmh_lookup_char(data, pos + needleLength - 1) if ( ch === lastNeedleChar && this._sbmh_memcmp(data, pos, needleLength - 1) ) { this._lookbehind_size = 0 ++this.matches this.emit('info', true) return (this._bufpos = pos + needleLength) } pos += this._occ[ch] } // No match. if (pos < 0) { // There's too few data for Boyer-Moore-Horspool to run, // so let's use a different algorithm to skip as much as // we can. // Forward pos until // the trailing part of lookbehind + data // looks like the beginning of the needle // or until // pos == 0 while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } } if (pos >= 0) { // Discard lookbehind buffer. this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) this._lookbehind_size = 0 } else { // Cut off part of the lookbehind buffer that has // been processed and append the entire haystack // into it. const bytesToCutOff = this._lookbehind_size + pos if (bytesToCutOff > 0) { // The cut off data is guaranteed not to contain the needle. this.emit('info', false, this._lookbehind, 0, bytesToCutOff) } this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, this._lookbehind_size - bytesToCutOff) this._lookbehind_size -= bytesToCutOff data.copy(this._lookbehind, this._lookbehind_size) this._lookbehind_size += len this._bufpos = len return len } } pos += (pos >= 0) * this._bufpos // Lookbehind buffer is now empty. We only need to check if the // needle is in the haystack. if (data.indexOf(needle, pos) !== -1) { pos = data.indexOf(needle, pos) ++this.matches if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } return (this._bufpos = pos + needleLength) } else { pos = len - needleLength } // There was no match. If there's trailing haystack data that we cannot // match yet using the Boyer-Moore-Horspool algorithm (because the trailing // data is less than the needle size) then match using a modified // algorithm that starts matching from the beginning instead of the end. // Whatever trailing data is left after running this algorithm is added to // the lookbehind buffer. while ( pos < len && ( data[pos] !== needle[0] || ( (Buffer.compare( data.subarray(pos, pos + len - pos), needle.subarray(0, len - pos) ) !== 0) ) ) ) { ++pos } if (pos < len) { data.copy(this._lookbehind, 0, pos, pos + (len - pos)) this._lookbehind_size = len - pos } // Everything until pos is guaranteed not to contain needle data. if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } this._bufpos = len return len } SBMH.prototype._sbmh_lookup_char = function (data, pos) { return (pos < 0) ? this._lookbehind[this._lookbehind_size + pos] : data[pos] } SBMH.prototype._sbmh_memcmp = function (data, pos, len) { for (var i = 0; i < len; ++i) { // eslint-disable-line no-var if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } } return true } module.exports = SBMH /***/ }), /***/ 89581: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const WritableStream = (__nccwpck_require__(57075).Writable) const { inherits } = __nccwpck_require__(57975) const Dicer = __nccwpck_require__(27182) const MultipartParser = __nccwpck_require__(41192) const UrlencodedParser = __nccwpck_require__(80855) const parseParams = __nccwpck_require__(8929) function Busboy (opts) { if (!(this instanceof Busboy)) { return new Busboy(opts) } if (typeof opts !== 'object') { throw new TypeError('Busboy expected an options-Object.') } if (typeof opts.headers !== 'object') { throw new TypeError('Busboy expected an options-Object with headers-attribute.') } if (typeof opts.headers['content-type'] !== 'string') { throw new TypeError('Missing Content-Type-header.') } const { headers, ...streamOptions } = opts this.opts = { autoDestroy: false, ...streamOptions } WritableStream.call(this, this.opts) this._done = false this._parser = this.getParserByHeaders(headers) this._finished = false } inherits(Busboy, WritableStream) Busboy.prototype.emit = function (ev) { if (ev === 'finish') { if (!this._done) { this._parser?.end() return } else if (this._finished) { return } this._finished = true } WritableStream.prototype.emit.apply(this, arguments) } Busboy.prototype.getParserByHeaders = function (headers) { const parsed = parseParams(headers['content-type']) const cfg = { defCharset: this.opts.defCharset, fileHwm: this.opts.fileHwm, headers, highWaterMark: this.opts.highWaterMark, isPartAFile: this.opts.isPartAFile, limits: this.opts.limits, parsedConType: parsed, preservePath: this.opts.preservePath } if (MultipartParser.detect.test(parsed[0])) { return new MultipartParser(this, cfg) } if (UrlencodedParser.detect.test(parsed[0])) { return new UrlencodedParser(this, cfg) } throw new Error('Unsupported Content-Type.') } Busboy.prototype._write = function (chunk, encoding, cb) { this._parser.write(chunk, cb) } module.exports = Busboy module.exports["default"] = Busboy module.exports.Busboy = Busboy module.exports.Dicer = Dicer /***/ }), /***/ 41192: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; // TODO: // * support 1 nested multipart level // (see second multipart example here: // http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) // * support limits.fieldNameSize // -- this will require modifications to utils.parseParams const { Readable } = __nccwpck_require__(57075) const { inherits } = __nccwpck_require__(57975) const Dicer = __nccwpck_require__(27182) const parseParams = __nccwpck_require__(8929) const decodeText = __nccwpck_require__(72747) const basename = __nccwpck_require__(20692) const getLimit = __nccwpck_require__(22393) const RE_BOUNDARY = /^boundary$/i const RE_FIELD = /^form-data$/i const RE_CHARSET = /^charset$/i const RE_FILENAME = /^filename$/i const RE_NAME = /^name$/i Multipart.detect = /^multipart\/form-data/i function Multipart (boy, cfg) { let i let len const self = this let boundary const limits = cfg.limits const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) const parsedConType = cfg.parsedConType || [] const defCharset = cfg.defCharset || 'utf8' const preservePath = cfg.preservePath const fileOpts = { highWaterMark: cfg.fileHwm } for (i = 0, len = parsedConType.length; i < len; ++i) { if (Array.isArray(parsedConType[i]) && RE_BOUNDARY.test(parsedConType[i][0])) { boundary = parsedConType[i][1] break } } function checkFinished () { if (nends === 0 && finished && !boy._done) { finished = false self.end() } } if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) const filesLimit = getLimit(limits, 'files', Infinity) const fieldsLimit = getLimit(limits, 'fields', Infinity) const partsLimit = getLimit(limits, 'parts', Infinity) const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) let nfiles = 0 let nfields = 0 let nends = 0 let curFile let curField let finished = false this._needDrain = false this._pause = false this._cb = undefined this._nparts = 0 this._boy = boy const parserCfg = { boundary, maxHeaderPairs: headerPairsLimit, maxHeaderSize: headerSizeLimit, partHwm: fileOpts.highWaterMark, highWaterMark: cfg.highWaterMark } this.parser = new Dicer(parserCfg) this.parser.on('drain', function () { self._needDrain = false if (self._cb && !self._pause) { const cb = self._cb self._cb = undefined cb() } }).on('part', function onPart (part) { if (++self._nparts > partsLimit) { self.parser.removeListener('part', onPart) self.parser.on('part', skipPart) boy.hitPartsLimit = true boy.emit('partsLimit') return skipPart(part) } // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let // us emit 'end' early since we know the part has ended if we are already // seeing the next part if (curField) { const field = curField field.emit('end') field.removeAllListeners('end') } part.on('header', function (header) { let contype let fieldname let parsed let charset let encoding let filename let nsize = 0 if (header['content-type']) { parsed = parseParams(header['content-type'][0]) if (parsed[0]) { contype = parsed[0].toLowerCase() for (i = 0, len = parsed.length; i < len; ++i) { if (RE_CHARSET.test(parsed[i][0])) { charset = parsed[i][1].toLowerCase() break } } } } if (contype === undefined) { contype = 'text/plain' } if (charset === undefined) { charset = defCharset } if (header['content-disposition']) { parsed = parseParams(header['content-disposition'][0]) if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } for (i = 0, len = parsed.length; i < len; ++i) { if (RE_NAME.test(parsed[i][0])) { fieldname = parsed[i][1] } else if (RE_FILENAME.test(parsed[i][0])) { filename = parsed[i][1] if (!preservePath) { filename = basename(filename) } } } } else { return skipPart(part) } if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } let onData, onEnd if (isPartAFile(fieldname, contype, filename)) { // file/binary field if (nfiles === filesLimit) { if (!boy.hitFilesLimit) { boy.hitFilesLimit = true boy.emit('filesLimit') } return skipPart(part) } ++nfiles if (boy.listenerCount('file') === 0) { self.parser._ignore() return } ++nends const file = new FileStream(fileOpts) curFile = file file.on('end', function () { --nends self._pause = false checkFinished() if (self._cb && !self._needDrain) { const cb = self._cb self._cb = undefined cb() } }) file._read = function (n) { if (!self._pause) { return } self._pause = false if (self._cb && !self._needDrain) { const cb = self._cb self._cb = undefined cb() } } boy.emit('file', fieldname, file, filename, encoding, contype) onData = function (data) { if ((nsize += data.length) > fileSizeLimit) { const extralen = fileSizeLimit - nsize + data.length if (extralen > 0) { file.push(data.slice(0, extralen)) } file.truncated = true file.bytesRead = fileSizeLimit part.removeAllListeners('data') file.emit('limit') return } else if (!file.push(data)) { self._pause = true } file.bytesRead = nsize } onEnd = function () { curFile = undefined file.push(null) } } else { // non-file field if (nfields === fieldsLimit) { if (!boy.hitFieldsLimit) { boy.hitFieldsLimit = true boy.emit('fieldsLimit') } return skipPart(part) } ++nfields ++nends let buffer = '' let truncated = false curField = part onData = function (data) { if ((nsize += data.length) > fieldSizeLimit) { const extralen = (fieldSizeLimit - (nsize - data.length)) buffer += data.toString('binary', 0, extralen) truncated = true part.removeAllListeners('data') } else { buffer += data.toString('binary') } } onEnd = function () { curField = undefined if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) --nends checkFinished() } } /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become broken. Streams2/streams3 is a huge black box of confusion, but somehow overriding the sync state seems to fix things again (and still seems to work for previous node versions). */ part._readableState.sync = false part.on('data', onData) part.on('end', onEnd) }).on('error', function (err) { if (curFile) { curFile.emit('error', err) } }) }).on('error', function (err) { boy.emit('error', err) }).on('finish', function () { finished = true checkFinished() }) } Multipart.prototype.write = function (chunk, cb) { const r = this.parser.write(chunk) if (r && !this._pause) { cb() } else { this._needDrain = !r this._cb = cb } } Multipart.prototype.end = function () { const self = this if (self.parser.writable) { self.parser.end() } else if (!self._boy._done) { process.nextTick(function () { self._boy._done = true self._boy.emit('finish') }) } } function skipPart (part) { part.resume() } function FileStream (opts) { Readable.call(this, opts) this.bytesRead = 0 this.truncated = false } inherits(FileStream, Readable) FileStream.prototype._read = function (n) {} module.exports = Multipart /***/ }), /***/ 80855: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; const Decoder = __nccwpck_require__(11496) const decodeText = __nccwpck_require__(72747) const getLimit = __nccwpck_require__(22393) const RE_CHARSET = /^charset$/i UrlEncoded.detect = /^application\/x-www-form-urlencoded/i function UrlEncoded (boy, cfg) { const limits = cfg.limits const parsedConType = cfg.parsedConType this.boy = boy this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) this.fieldsLimit = getLimit(limits, 'fields', Infinity) let charset for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var if (Array.isArray(parsedConType[i]) && RE_CHARSET.test(parsedConType[i][0])) { charset = parsedConType[i][1].toLowerCase() break } } if (charset === undefined) { charset = cfg.defCharset || 'utf8' } this.decoder = new Decoder() this.charset = charset this._fields = 0 this._state = 'key' this._checkingBytes = true this._bytesKey = 0 this._bytesVal = 0 this._key = '' this._val = '' this._keyTrunc = false this._valTrunc = false this._hitLimit = false } UrlEncoded.prototype.write = function (data, cb) { if (this._fields === this.fieldsLimit) { if (!this.boy.hitFieldsLimit) { this.boy.hitFieldsLimit = true this.boy.emit('fieldsLimit') } return cb() } let idxeq; let idxamp; let i; let p = 0; const len = data.length while (p < len) { if (this._state === 'key') { idxeq = idxamp = undefined for (i = p; i < len; ++i) { if (!this._checkingBytes) { ++p } if (data[i] === 0x3D/* = */) { idxeq = i break } else if (data[i] === 0x26/* & */) { idxamp = i break } if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { this._hitLimit = true break } else if (this._checkingBytes) { ++this._bytesKey } } if (idxeq !== undefined) { // key with assignment if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } this._state = 'val' this._hitLimit = false this._checkingBytes = true this._val = '' this._bytesVal = 0 this._valTrunc = false this.decoder.reset() p = idxeq + 1 } else if (idxamp !== undefined) { // key with no assignment ++this._fields let key; const keyTrunc = this._keyTrunc if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } this._hitLimit = false this._checkingBytes = true this._key = '' this._bytesKey = 0 this._keyTrunc = false this.decoder.reset() if (key.length) { this.boy.emit('field', decodeText(key, 'binary', this.charset), '', keyTrunc, false) } p = idxamp + 1 if (this._fields === this.fieldsLimit) { return cb() } } else if (this._hitLimit) { // we may not have hit the actual limit if there are encoded bytes... if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } p = i if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { // yep, we actually did hit the limit this._checkingBytes = false this._keyTrunc = true } } else { if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } p = len } } else { idxamp = undefined for (i = p; i < len; ++i) { if (!this._checkingBytes) { ++p } if (data[i] === 0x26/* & */) { idxamp = i break } if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { this._hitLimit = true break } else if (this._checkingBytes) { ++this._bytesVal } } if (idxamp !== undefined) { ++this._fields if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } this.boy.emit('field', decodeText(this._key, 'binary', this.charset), decodeText(this._val, 'binary', this.charset), this._keyTrunc, this._valTrunc) this._state = 'key' this._hitLimit = false this._checkingBytes = true this._key = '' this._bytesKey = 0 this._keyTrunc = false this.decoder.reset() p = idxamp + 1 if (this._fields === this.fieldsLimit) { return cb() } } else if (this._hitLimit) { // we may not have hit the actual limit if there are encoded bytes... if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } p = i if ((this._val === '' && this.fieldSizeLimit === 0) || (this._bytesVal = this._val.length) === this.fieldSizeLimit) { // yep, we actually did hit the limit this._checkingBytes = false this._valTrunc = true } } else { if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } p = len } } } cb() } UrlEncoded.prototype.end = function () { if (this.boy._done) { return } if (this._state === 'key' && this._key.length > 0) { this.boy.emit('field', decodeText(this._key, 'binary', this.charset), '', this._keyTrunc, false) } else if (this._state === 'val') { this.boy.emit('field', decodeText(this._key, 'binary', this.charset), decodeText(this._val, 'binary', this.charset), this._keyTrunc, this._valTrunc) } this.boy._done = true this.boy.emit('finish') } module.exports = UrlEncoded /***/ }), /***/ 11496: /***/ ((module) => { "use strict"; const RE_PLUS = /\+/g const HEX = [ 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 ] function Decoder () { this.buffer = undefined } Decoder.prototype.write = function (str) { // Replace '+' with ' ' before decoding str = str.replace(RE_PLUS, ' ') let res = '' let i = 0; let p = 0; const len = str.length for (; i < len; ++i) { if (this.buffer !== undefined) { if (!HEX[str.charCodeAt(i)]) { res += '%' + this.buffer this.buffer = undefined --i // retry character } else { this.buffer += str[i] ++p if (this.buffer.length === 2) { res += String.fromCharCode(parseInt(this.buffer, 16)) this.buffer = undefined } } } else if (str[i] === '%') { if (i > p) { res += str.substring(p, i) p = i } this.buffer = '' ++p } } if (p < len && this.buffer === undefined) { res += str.substring(p) } return res } Decoder.prototype.reset = function () { this.buffer = undefined } module.exports = Decoder /***/ }), /***/ 20692: /***/ ((module) => { "use strict"; module.exports = function basename (path) { if (typeof path !== 'string') { return '' } for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var switch (path.charCodeAt(i)) { case 0x2F: // '/' case 0x5C: // '\' path = path.slice(i + 1) return (path === '..' || path === '.' ? '' : path) } } return (path === '..' || path === '.' ? '' : path) } /***/ }), /***/ 72747: /***/ (function(module) { "use strict"; // Node has always utf-8 const utf8Decoder = new TextDecoder('utf-8') const textDecoders = new Map([ ['utf-8', utf8Decoder], ['utf8', utf8Decoder] ]) function getDecoder (charset) { let lc while (true) { switch (charset) { case 'utf-8': case 'utf8': return decoders.utf8 case 'latin1': case 'ascii': // TODO: Make these a separate, strict decoder? case 'us-ascii': case 'iso-8859-1': case 'iso8859-1': case 'iso88591': case 'iso_8859-1': case 'windows-1252': case 'iso_8859-1:1987': case 'cp1252': case 'x-cp1252': return decoders.latin1 case 'utf16le': case 'utf-16le': case 'ucs2': case 'ucs-2': return decoders.utf16le case 'base64': return decoders.base64 default: if (lc === undefined) { lc = true charset = charset.toLowerCase() continue } return decoders.other.bind(charset) } } } const decoders = { utf8: (data, sourceEncoding) => { if (data.length === 0) { return '' } if (typeof data === 'string') { data = Buffer.from(data, sourceEncoding) } return data.utf8Slice(0, data.length) }, latin1: (data, sourceEncoding) => { if (data.length === 0) { return '' } if (typeof data === 'string') { return data } return data.latin1Slice(0, data.length) }, utf16le: (data, sourceEncoding) => { if (data.length === 0) { return '' } if (typeof data === 'string') { data = Buffer.from(data, sourceEncoding) } return data.ucs2Slice(0, data.length) }, base64: (data, sourceEncoding) => { if (data.length === 0) { return '' } if (typeof data === 'string') { data = Buffer.from(data, sourceEncoding) } return data.base64Slice(0, data.length) }, other: (data, sourceEncoding) => { if (data.length === 0) { return '' } if (typeof data === 'string') { data = Buffer.from(data, sourceEncoding) } if (textDecoders.has(this.toString())) { try { return textDecoders.get(this).decode(data) } catch {} } return typeof data === 'string' ? data : data.toString() } } function decodeText (text, sourceEncoding, destEncoding) { if (text) { return getDecoder(destEncoding)(text, sourceEncoding) } return text } module.exports = decodeText /***/ }), /***/ 22393: /***/ ((module) => { "use strict"; module.exports = function getLimit (limits, name, defaultLimit) { if ( !limits || limits[name] === undefined || limits[name] === null ) { return defaultLimit } if ( typeof limits[name] !== 'number' || isNaN(limits[name]) ) { throw new TypeError('Limit ' + name + ' is not a valid number') } return limits[name] } /***/ }), /***/ 8929: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /* eslint-disable object-property-newline */ const decodeText = __nccwpck_require__(72747) const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g const EncodedLookup = { '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' } function encodedReplacer (match) { return EncodedLookup[match] } const STATE_KEY = 0 const STATE_VALUE = 1 const STATE_CHARSET = 2 const STATE_LANG = 3 function parseParams (str) { const res = [] let state = STATE_KEY let charset = '' let inquote = false let escaping = false let p = 0 let tmp = '' const len = str.length for (var i = 0; i < len; ++i) { // eslint-disable-line no-var const char = str[i] if (char === '\\' && inquote) { if (escaping) { escaping = false } else { escaping = true continue } } else if (char === '"') { if (!escaping) { if (inquote) { inquote = false state = STATE_KEY } else { inquote = true } continue } else { escaping = false } } else { if (escaping && inquote) { tmp += '\\' } escaping = false if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { if (state === STATE_CHARSET) { state = STATE_LANG charset = tmp.substring(1) } else { state = STATE_VALUE } tmp = '' continue } else if (state === STATE_KEY && (char === '*' || char === '=') && res.length) { state = char === '*' ? STATE_CHARSET : STATE_VALUE res[p] = [tmp, undefined] tmp = '' continue } else if (!inquote && char === ';') { state = STATE_KEY if (charset) { if (tmp.length) { tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), 'binary', charset) } charset = '' } else if (tmp.length) { tmp = decodeText(tmp, 'binary', 'utf8') } if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } tmp = '' ++p continue } else if (!inquote && (char === ' ' || char === '\t')) { continue } } tmp += char } if (charset && tmp.length) { tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), 'binary', charset) } else if (tmp) { tmp = decodeText(tmp, 'binary', 'utf8') } if (res[p] === undefined) { if (tmp) { res[p] = tmp } } else { res[p][1] = tmp } return res } module.exports = parseParams /***/ }), /***/ 76637: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.AclRoleAccessorMethods = exports.Acl = void 0; const promisify_1 = __nccwpck_require__(60206); /** * Attach functionality to a {@link Storage.acl} instance. This will add an * object for each role group (owners, readers, and writers), with each object * containing methods to add or delete a type of entity. * * As an example, here are a few methods that are created. * * myBucket.acl.readers.deleteGroup('groupId', function(err) {}); * * myBucket.acl.owners.addUser('email@example.com', function(err, acl) {}); * * myBucket.acl.writers.addDomain('example.com', function(err, acl) {}); * * @private */ class AclRoleAccessorMethods { constructor() { this.owners = {}; this.readers = {}; this.writers = {}; /** * An object of convenience methods to add or delete owner ACL permissions * for a given entity. * * The supported methods include: * * - `myFile.acl.owners.addAllAuthenticatedUsers` * - `myFile.acl.owners.deleteAllAuthenticatedUsers` * - `myFile.acl.owners.addAllUsers` * - `myFile.acl.owners.deleteAllUsers` * - `myFile.acl.owners.addDomain` * - `myFile.acl.owners.deleteDomain` * - `myFile.acl.owners.addGroup` * - `myFile.acl.owners.deleteGroup` * - `myFile.acl.owners.addProject` * - `myFile.acl.owners.deleteProject` * - `myFile.acl.owners.addUser` * - `myFile.acl.owners.deleteUser` * * @name Acl#owners * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * //- * // Add a user as an owner of a file. * //- * const myBucket = gcs.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * myFile.acl.owners.addUser('email@example.com', function(err, aclObject) * {}); * * //- * // For reference, the above command is the same as running the following. * //- * myFile.acl.add({ * entity: 'user-email@example.com', * role: gcs.acl.OWNER_ROLE * }, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myFile.acl.owners.addUser('email@example.com').then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ this.owners = {}; /** * An object of convenience methods to add or delete reader ACL permissions * for a given entity. * * The supported methods include: * * - `myFile.acl.readers.addAllAuthenticatedUsers` * - `myFile.acl.readers.deleteAllAuthenticatedUsers` * - `myFile.acl.readers.addAllUsers` * - `myFile.acl.readers.deleteAllUsers` * - `myFile.acl.readers.addDomain` * - `myFile.acl.readers.deleteDomain` * - `myFile.acl.readers.addGroup` * - `myFile.acl.readers.deleteGroup` * - `myFile.acl.readers.addProject` * - `myFile.acl.readers.deleteProject` * - `myFile.acl.readers.addUser` * - `myFile.acl.readers.deleteUser` * * @name Acl#readers * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * //- * // Add a user as a reader of a file. * //- * myFile.acl.readers.addUser('email@example.com', function(err, aclObject) * {}); * * //- * // For reference, the above command is the same as running the following. * //- * myFile.acl.add({ * entity: 'user-email@example.com', * role: gcs.acl.READER_ROLE * }, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myFile.acl.readers.addUser('email@example.com').then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ this.readers = {}; /** * An object of convenience methods to add or delete writer ACL permissions * for a given entity. * * The supported methods include: * * - `myFile.acl.writers.addAllAuthenticatedUsers` * - `myFile.acl.writers.deleteAllAuthenticatedUsers` * - `myFile.acl.writers.addAllUsers` * - `myFile.acl.writers.deleteAllUsers` * - `myFile.acl.writers.addDomain` * - `myFile.acl.writers.deleteDomain` * - `myFile.acl.writers.addGroup` * - `myFile.acl.writers.deleteGroup` * - `myFile.acl.writers.addProject` * - `myFile.acl.writers.deleteProject` * - `myFile.acl.writers.addUser` * - `myFile.acl.writers.deleteUser` * * @name Acl#writers * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * //- * // Add a user as a writer of a file. * //- * myFile.acl.writers.addUser('email@example.com', function(err, aclObject) * {}); * * //- * // For reference, the above command is the same as running the following. * //- * myFile.acl.add({ * entity: 'user-email@example.com', * role: gcs.acl.WRITER_ROLE * }, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myFile.acl.writers.addUser('email@example.com').then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ this.writers = {}; AclRoleAccessorMethods.roles.forEach(this._assignAccessMethods.bind(this)); } _assignAccessMethods(role) { const accessMethods = AclRoleAccessorMethods.accessMethods; const entities = AclRoleAccessorMethods.entities; const roleGroup = role.toLowerCase() + 's'; // eslint-disable-next-line @typescript-eslint/no-explicit-any this[roleGroup] = entities.reduce((acc, entity) => { const isPrefix = entity.charAt(entity.length - 1) === '-'; accessMethods.forEach(accessMethod => { let method = accessMethod + entity[0].toUpperCase() + entity.substring(1); if (isPrefix) { method = method.replace('-', ''); } // Wrap the parent accessor method (e.g. `add` or `delete`) to avoid the // more complex API of specifying an `entity` and `role`. // eslint-disable-next-line @typescript-eslint/no-explicit-any acc[method] = (entityId, options, callback) => { let apiEntity; if (typeof options === 'function') { callback = options; options = {}; } if (isPrefix) { apiEntity = entity + entityId; } else { // If the entity is not a prefix, it is a special entity group // that does not require further details. The accessor methods // only accept a callback. apiEntity = entity; callback = entityId; } options = Object.assign({ entity: apiEntity, role, }, options); const args = [options]; if (typeof callback === 'function') { args.push(callback); } // eslint-disable-next-line @typescript-eslint/no-explicit-any return this[accessMethod].apply(this, args); }; }); return acc; }, {}); } } exports.AclRoleAccessorMethods = AclRoleAccessorMethods; AclRoleAccessorMethods.accessMethods = ['add', 'delete']; AclRoleAccessorMethods.entities = [ // Special entity groups that do not require further specification. 'allAuthenticatedUsers', 'allUsers', // Entity groups that require specification, e.g. `user-email@example.com`. 'domain-', 'group-', 'project-', 'user-', ]; AclRoleAccessorMethods.roles = ['OWNER', 'READER', 'WRITER']; /** * Cloud Storage uses access control lists (ACLs) to manage object and * bucket access. ACLs are the mechanism you use to share objects with other * users and allow other users to access your buckets and objects. * * An ACL consists of one or more entries, where each entry grants permissions * to an entity. Permissions define the actions that can be performed against an * object or bucket (for example, `READ` or `WRITE`); the entity defines who the * permission applies to (for example, a specific user or group of users). * * Where an `entity` value is accepted, we follow the format the Cloud Storage * API expects. * * Refer to * https://cloud.google.com/storage/docs/json_api/v1/defaultObjectAccessControls * for the most up-to-date values. * * - `user-userId` * - `user-email` * - `group-groupId` * - `group-email` * - `domain-domain` * - `project-team-projectId` * - `allUsers` * - `allAuthenticatedUsers` * * Examples: * * - The user "liz@example.com" would be `user-liz@example.com`. * - The group "example@googlegroups.com" would be * `group-example@googlegroups.com`. * - To refer to all members of the Google Apps for Business domain * "example.com", the entity would be `domain-example.com`. * * For more detailed information, see * {@link http://goo.gl/6qBBPO| About Access Control Lists}. * * @constructor Acl * @mixin * @param {object} options Configuration options. */ class Acl extends AclRoleAccessorMethods { constructor(options) { super(); this.pathPrefix = options.pathPrefix; this.request_ = options.request; } /** * @typedef {array} AddAclResponse * @property {object} 0 The Acl Objects. * @property {object} 1 The full API response. */ /** * @callback AddAclCallback * @param {?Error} err Request error, if any. * @param {object} acl The Acl Objects. * @param {object} apiResponse The full API response. */ /** * Add access controls on a {@link Bucket} or {@link File}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/insert| BucketAccessControls: insert API Documentation} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} * * @param {object} options Configuration options. * @param {string} options.entity Whose permissions will be added. * @param {string} options.role Permissions allowed for the defined entity. * See {@link https://cloud.google.com/storage/docs/access-control Access * Control}. * @param {number} [options.generation] **File Objects Only** Select a specific * revision of this file (as opposed to the latest version, the default). * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {AddAclCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * const options = { * entity: 'user-useremail@example.com', * role: gcs.acl.OWNER_ROLE * }; * * myBucket.acl.add(options, function(err, aclObject, apiResponse) {}); * * //- * // For file ACL operations, you can also specify a `generation` property. * // Here is how you would grant ownership permissions to a user on a * specific * // revision of a file. * //- * myFile.acl.add({ * entity: 'user-useremail@example.com', * role: gcs.acl.OWNER_ROLE, * generation: 1 * }, function(err, aclObject, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myBucket.acl.add(options).then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/acl.js * region_tag:storage_add_file_owner * Example of adding an owner to a file: * * @example include:samples/acl.js * region_tag:storage_add_bucket_owner * Example of adding an owner to a bucket: * * @example include:samples/acl.js * region_tag:storage_add_bucket_default_owner * Example of adding a default owner to a bucket: */ add(options, callback) { const query = {}; if (options.generation) { query.generation = options.generation; } if (options.userProject) { query.userProject = options.userProject; } this.request({ method: 'POST', uri: '', qs: query, maxRetries: 0, //explicitly set this value since this is a non-idempotent function json: { entity: options.entity, role: options.role.toUpperCase(), }, }, (err, resp) => { if (err) { callback(err, null, resp); return; } callback(null, this.makeAclObject_(resp), resp); }); } /** * @typedef {array} RemoveAclResponse * @property {object} 0 The full API response. */ /** * @callback RemoveAclCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Delete access controls on a {@link Bucket} or {@link File}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/delete| BucketAccessControls: delete API Documentation} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/delete| ObjectAccessControls: delete API Documentation} * * @param {object} options Configuration object. * @param {string} options.entity Whose permissions will be revoked. * @param {int} [options.generation] **File Objects Only** Select a specific * revision of this file (as opposed to the latest version, the default). * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {RemoveAclCallback} callback The callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * myBucket.acl.delete({ * entity: 'user-useremail@example.com' * }, function(err, apiResponse) {}); * * //- * // For file ACL operations, you can also specify a `generation` property. * //- * myFile.acl.delete({ * entity: 'user-useremail@example.com', * generation: 1 * }, function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myFile.acl.delete().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/acl.js * region_tag:storage_remove_bucket_owner * Example of removing an owner from a bucket: * * @example include:samples/acl.js * region_tag:storage_remove_bucket_default_owner * Example of removing a default owner from a bucket: * * @example include:samples/acl.js * region_tag:storage_remove_file_owner * Example of removing an owner from a bucket: */ delete(options, callback) { const query = {}; if (options.generation) { query.generation = options.generation; } if (options.userProject) { query.userProject = options.userProject; } this.request({ method: 'DELETE', uri: '/' + encodeURIComponent(options.entity), qs: query, }, (err, resp) => { callback(err, resp); }); } /** * @typedef {array} GetAclResponse * @property {object|object[]} 0 Single or array of Acl Objects. * @property {object} 1 The full API response. */ /** * @callback GetAclCallback * @param {?Error} err Request error, if any. * @param {object|object[]} acl Single or array of Acl Objects. * @param {object} apiResponse The full API response. */ /** * Get access controls on a {@link Bucket} or {@link File}. If * an entity is omitted, you will receive an array of all applicable access * controls. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/get| BucketAccessControls: get API Documentation} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/get| ObjectAccessControls: get API Documentation} * * @param {object|function} [options] Configuration options. If you want to * receive a list of all access controls, pass the callback function as * the only argument. * @param {string} options.entity Whose permissions will be fetched. * @param {number} [options.generation] **File Objects Only** Select a specific * revision of this file (as opposed to the latest version, the default). * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetAclCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * myBucket.acl.get({ * entity: 'user-useremail@example.com' * }, function(err, aclObject, apiResponse) {}); * * //- * // Get all access controls. * //- * myBucket.acl.get(function(err, aclObjects, apiResponse) { * // aclObjects = [ * // { * // entity: 'user-useremail@example.com', * // role: 'owner' * // } * // ] * }); * * //- * // For file ACL operations, you can also specify a `generation` property. * //- * myFile.acl.get({ * entity: 'user-useremail@example.com', * generation: 1 * }, function(err, aclObject, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myBucket.acl.get().then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/acl.js * region_tag:storage_print_file_acl * Example of printing a file's ACL: * * @example include:samples/acl.js * region_tag:storage_print_file_acl_for_user * Example of printing a file's ACL for a specific user: * * @example include:samples/acl.js * region_tag:storage_print_bucket_acl * Example of printing a bucket's ACL: * * @example include:samples/acl.js * region_tag:storage_print_bucket_acl_for_user * Example of printing a bucket's ACL for a specific user: */ get(optionsOrCallback, cb) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : null; const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; let path = ''; const query = {}; if (options) { path = '/' + encodeURIComponent(options.entity); if (options.generation) { query.generation = options.generation; } if (options.userProject) { query.userProject = options.userProject; } } this.request({ uri: path, qs: query, }, (err, resp) => { if (err) { callback(err, null, resp); return; } let results; if (resp.items) { results = resp.items.map(this.makeAclObject_); } else { results = this.makeAclObject_(resp); } callback(null, results, resp); }); } /** * @typedef {array} UpdateAclResponse * @property {object} 0 The updated Acl Objects. * @property {object} 1 The full API response. */ /** * @callback UpdateAclCallback * @param {?Error} err Request error, if any. * @param {object} acl The updated Acl Objects. * @param {object} apiResponse The full API response. */ /** * Update access controls on a {@link Bucket} or {@link File}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/bucketAccessControls/update| BucketAccessControls: update API Documentation} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/update| ObjectAccessControls: update API Documentation} * * @param {object} options Configuration options. * @param {string} options.entity Whose permissions will be updated. * @param {string} options.role Permissions allowed for the defined entity. * See {@link Storage.acl}. * @param {number} [options.generation] **File Objects Only** Select a specific * revision of this file (as opposed to the latest version, the default). * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {UpdateAclCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * const myFile = myBucket.file('my-file'); * * const options = { * entity: 'user-useremail@example.com', * role: gcs.acl.WRITER_ROLE * }; * * myBucket.acl.update(options, function(err, aclObject, apiResponse) {}); * * //- * // For file ACL operations, you can also specify a `generation` property. * //- * myFile.acl.update({ * entity: 'user-useremail@example.com', * role: gcs.acl.WRITER_ROLE, * generation: 1 * }, function(err, aclObject, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myFile.acl.update(options).then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ update(options, callback) { const query = {}; if (options.generation) { query.generation = options.generation; } if (options.userProject) { query.userProject = options.userProject; } this.request({ method: 'PUT', uri: '/' + encodeURIComponent(options.entity), qs: query, json: { role: options.role.toUpperCase(), }, }, (err, resp) => { if (err) { callback(err, null, resp); return; } callback(null, this.makeAclObject_(resp), resp); }); } /** * Transform API responses to a consistent object format. * * @private */ makeAclObject_(accessControlObject) { const obj = { entity: accessControlObject.entity, role: accessControlObject.role, }; if (accessControlObject.projectTeam) { obj.projectTeam = accessControlObject.projectTeam; } return obj; } /** * Patch requests up to the bucket's request object. * * @private * * @param {string} method Action. * @param {string} path Request path. * @param {*} query Request query object. * @param {*} body Request body contents. * @param {function} callback Callback function. */ request(reqOpts, callback) { reqOpts.uri = this.pathPrefix + reqOpts.uri; this.request_(reqOpts, callback); } } exports.Acl = Acl; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Acl, { exclude: ['request'], }); /***/ }), /***/ 82887: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Bucket = exports.BucketExceptionMessages = exports.AvailableServiceObjectMethods = exports.BucketActionToHTTPMethod = void 0; const index_js_1 = __nccwpck_require__(11325); const paginator_1 = __nccwpck_require__(99469); const promisify_1 = __nccwpck_require__(60206); const fs = __importStar(__nccwpck_require__(79896)); const mime_1 = __importDefault(__nccwpck_require__(94900)); const path = __importStar(__nccwpck_require__(16928)); const p_limit_1 = __importDefault(__nccwpck_require__(58890)); const util_1 = __nccwpck_require__(39023); const async_retry_1 = __importDefault(__nccwpck_require__(45195)); const util_js_1 = __nccwpck_require__(74557); const acl_js_1 = __nccwpck_require__(76637); const file_js_1 = __nccwpck_require__(69975); const iam_js_1 = __nccwpck_require__(52880); const notification_js_1 = __nccwpck_require__(18598); const storage_js_1 = __nccwpck_require__(92848); const signer_js_1 = __nccwpck_require__(7319); const stream_1 = __nccwpck_require__(2203); const url_1 = __nccwpck_require__(87016); var BucketActionToHTTPMethod; (function (BucketActionToHTTPMethod) { BucketActionToHTTPMethod["list"] = "GET"; })(BucketActionToHTTPMethod || (exports.BucketActionToHTTPMethod = BucketActionToHTTPMethod = {})); var AvailableServiceObjectMethods; (function (AvailableServiceObjectMethods) { AvailableServiceObjectMethods[AvailableServiceObjectMethods["setMetadata"] = 0] = "setMetadata"; AvailableServiceObjectMethods[AvailableServiceObjectMethods["delete"] = 1] = "delete"; })(AvailableServiceObjectMethods || (exports.AvailableServiceObjectMethods = AvailableServiceObjectMethods = {})); var BucketExceptionMessages; (function (BucketExceptionMessages) { BucketExceptionMessages["PROVIDE_SOURCE_FILE"] = "You must provide at least one source file."; BucketExceptionMessages["DESTINATION_FILE_NOT_SPECIFIED"] = "A destination file must be specified."; BucketExceptionMessages["CHANNEL_ID_REQUIRED"] = "An ID is required to create a channel."; BucketExceptionMessages["TOPIC_NAME_REQUIRED"] = "A valid topic name is required."; BucketExceptionMessages["CONFIGURATION_OBJECT_PREFIX_REQUIRED"] = "A configuration object with a prefix is required."; BucketExceptionMessages["SPECIFY_FILE_NAME"] = "A file name must be specified."; BucketExceptionMessages["METAGENERATION_NOT_PROVIDED"] = "A metageneration must be provided."; BucketExceptionMessages["SUPPLY_NOTIFICATION_ID"] = "You must supply a notification ID."; })(BucketExceptionMessages || (exports.BucketExceptionMessages = BucketExceptionMessages = {})); /** * @callback Crc32cGeneratorToStringCallback * A method returning the CRC32C as a base64-encoded string. * * @returns {string} * * @example * Hashing the string 'data' should return 'rth90Q==' * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.toString(); // 'rth90Q==' * ``` **/ /** * @callback Crc32cGeneratorValidateCallback * A method validating a base64-encoded CRC32C string. * * @param {string} [value] base64-encoded CRC32C string to validate * @returns {boolean} * * @example * Should return `true` if the value matches, `false` otherwise * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.validate('DkjKuA=='); // false * crc32c.validate('rth90Q=='); // true * ``` **/ /** * @callback Crc32cGeneratorUpdateCallback * A method for passing `Buffer`s for CRC32C generation. * * @param {Buffer} [data] data to update CRC32C value with * @returns {undefined} * * @example * Hashing buffers from 'some ' and 'text\n' * * ```js * const buffer1 = Buffer.from('some '); * crc32c.update(buffer1); * * const buffer2 = Buffer.from('text\n'); * crc32c.update(buffer2); * * crc32c.toString(); // 'DkjKuA==' * ``` **/ /** * @typedef {object} CRC32CValidator * @property {Crc32cGeneratorToStringCallback} * @property {Crc32cGeneratorValidateCallback} * @property {Crc32cGeneratorUpdateCallback} */ /** * A function that generates a CRC32C Validator. Defaults to {@link CRC32C} * * @name Bucket#crc32cGenerator * @type {CRC32CValidator} */ /** * Get and set IAM policies for your bucket. * * @name Bucket#iam * @mixes Iam * * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Get the IAM policy for your bucket. * //- * bucket.iam.getPolicy(function(err, policy) { * console.log(policy); * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.iam.getPolicy().then(function(data) { * const policy = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/iam.js * region_tag:storage_view_bucket_iam_members * Example of retrieving a bucket's IAM policy: * * @example include:samples/iam.js * region_tag:storage_add_bucket_iam_member * Example of adding to a bucket's IAM policy: * * @example include:samples/iam.js * region_tag:storage_remove_bucket_iam_member * Example of removing from a bucket's IAM policy: */ /** * Cloud Storage uses access control lists (ACLs) to manage object and * bucket access. ACLs are the mechanism you use to share objects with other * users and allow other users to access your buckets and objects. * * An ACL consists of one or more entries, where each entry grants permissions * to an entity. Permissions define the actions that can be performed against * an object or bucket (for example, `READ` or `WRITE`); the entity defines * who the permission applies to (for example, a specific user or group of * users). * * The `acl` object on a Bucket instance provides methods to get you a list of * the ACLs defined on your bucket, as well as set, update, and delete them. * * Buckets also have * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} * for all created files. Default ACLs specify permissions that all new * objects added to the bucket will inherit by default. You can add, delete, * get, and update entities and permissions for these as well with * {@link Bucket#acl.default}. * * See {@link http://goo.gl/6qBBPO| About Access Control Lists} * See {@link https://cloud.google.com/storage/docs/access-control/lists#default| Default ACLs} * * @name Bucket#acl * @mixes Acl * @property {Acl} default Cloud Storage Buckets have * {@link https://cloud.google.com/storage/docs/access-control/lists#default| default ACLs} * for all created files. You can add, delete, get, and update entities and * permissions for these as well. The method signatures and examples are all * the same, after only prefixing the method call with `default`. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // Make a bucket's contents publicly readable. * //- * const myBucket = storage.bucket('my-bucket'); * * const options = { * entity: 'allUsers', * role: storage.acl.READER_ROLE * }; * * myBucket.acl.add(options, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myBucket.acl.add(options).then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/acl.js * region_tag:storage_print_bucket_acl * Example of printing a bucket's ACL: * * @example include:samples/acl.js * region_tag:storage_print_bucket_acl_for_user * Example of printing a bucket's ACL for a specific user: * * @example include:samples/acl.js * region_tag:storage_add_bucket_owner * Example of adding an owner to a bucket: * * @example include:samples/acl.js * region_tag:storage_remove_bucket_owner * Example of removing an owner from a bucket: * * @example include:samples/acl.js * region_tag:storage_add_bucket_default_owner * Example of adding a default owner to a bucket: * * @example include:samples/acl.js * region_tag:storage_remove_bucket_default_owner * Example of removing a default owner from a bucket: */ /** * The API-formatted resource description of the bucket. * * Note: This is not guaranteed to be up-to-date when accessed. To get the * latest record, call the `getMetadata()` method. * * @name Bucket#metadata * @type {object} */ /** * The bucket's name. * @name Bucket#name * @type {string} */ /** * Get {@link File} objects for the files currently in the bucket as a * readable object stream. * * @method Bucket#getFilesStream * @param {GetFilesOptions} [query] Query object for listing files. * @returns {ReadableStream} A readable stream that emits {@link File} instances. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.getFilesStream() * .on('error', console.error) * .on('data', function(file) { * // file is a File object. * }) * .on('end', function() { * // All files retrieved. * }); * * //- * // If you anticipate many results, you can end a stream early to prevent * // unnecessary processing and API requests. * //- * bucket.getFilesStream() * .on('data', function(file) { * this.end(); * }); * * //- * // If you're filtering files with a delimiter, you should use * // {@link Bucket#getFiles} and set `autoPaginate: false` in order to * // preserve the `apiResponse` argument. * //- * const prefixes = []; * * function callback(err, files, nextQuery, apiResponse) { * prefixes = prefixes.concat(apiResponse.prefixes); * * if (nextQuery) { * bucket.getFiles(nextQuery, callback); * } else { * // prefixes = The finished array of prefixes. * } * } * * bucket.getFiles({ * autoPaginate: false, * delimiter: '/' * }, callback); * ``` */ /** * Create a Bucket object to interact with a Cloud Storage bucket. * * @class * @hideconstructor * * @param {Storage} storage A {@link Storage} instance. * @param {string} name The name of the bucket. * @param {object} [options] Configuration object. * @param {string} [options.userProject] User project. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * ``` */ class Bucket extends index_js_1.ServiceObject { // eslint-disable-next-line @typescript-eslint/no-unused-vars getFilesStream(query) { // placeholder body, overwritten in constructor return new stream_1.Readable(); } constructor(storage, name, options) { var _a, _b, _c, _d; options = options || {}; // Allow for "gs://"-style input, and strip any trailing slashes. name = name.replace(/^gs:\/\//, '').replace(/\/+$/, ''); const requestQueryObject = {}; if ((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { requestQueryObject.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; } if ((_b = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { requestQueryObject.ifGenerationNotMatch = options.preconditionOpts.ifGenerationNotMatch; } if ((_c = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { requestQueryObject.ifMetagenerationMatch = options.preconditionOpts.ifMetagenerationMatch; } if ((_d = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { requestQueryObject.ifMetagenerationNotMatch = options.preconditionOpts.ifMetagenerationNotMatch; } const userProject = options.userProject; if (typeof userProject === 'string') { requestQueryObject.userProject = userProject; } const methods = { /** * Create a bucket. * * @method Bucket#create * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. * @param {CreateBucketCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * bucket.create(function(err, bucket, apiResponse) { * if (!err) { * // The bucket was created successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.create().then(function(data) { * const bucket = data[0]; * const apiResponse = data[1]; * }); * ``` */ create: { reqOpts: { qs: requestQueryObject, }, }, /** * IamDeleteBucketOptions Configuration options. * @property {boolean} [ignoreNotFound = false] Ignore an error if * the bucket does not exist. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} DeleteBucketResponse * @property {object} 0 The full API response. */ /** * @callback DeleteBucketCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Delete the bucket. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/delete| Buckets: delete API Documentation} * * @method Bucket#delete * @param {DeleteBucketOptions} [options] Configuration options. * @param {boolean} [options.ignoreNotFound = false] Ignore an error if * the bucket does not exist. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {DeleteBucketCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * bucket.delete(function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.delete().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/buckets.js * region_tag:storage_delete_bucket * Another example: */ delete: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {object} BucketExistsOptions Configuration options for Bucket#exists(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} BucketExistsResponse * @property {boolean} 0 Whether the {@link Bucket} exists. */ /** * @callback BucketExistsCallback * @param {?Error} err Request error, if any. * @param {boolean} exists Whether the {@link Bucket} exists. */ /** * Check if the bucket exists. * * @method Bucket#exists * @param {BucketExistsOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {BucketExistsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.exists(function(err, exists) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.exists().then(function(data) { * const exists = data[0]; * }); * ``` */ exists: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {object} [GetBucketOptions] Configuration options for Bucket#get() * @property {boolean} [autoCreate] Automatically create the object if * it does not exist. Default: `false` * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} GetBucketResponse * @property {Bucket} 0 The {@link Bucket}. * @property {object} 1 The full API response. */ /** * @callback GetBucketCallback * @param {?Error} err Request error, if any. * @param {Bucket} bucket The {@link Bucket}. * @param {object} apiResponse The full API response. */ /** * Get a bucket if it exists. * * You may optionally use this to "get or create" an object by providing * an object with `autoCreate` set to `true`. Any extra configuration that * is normally required for the `create` method must be contained within * this object as well. * * @method Bucket#get * @param {GetBucketOptions} [options] Configuration options. * @param {boolean} [options.autoCreate] Automatically create the object if * it does not exist. Default: `false` * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetBucketCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.get(function(err, bucket, apiResponse) { * // `bucket.metadata` has been populated. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.get().then(function(data) { * const bucket = data[0]; * const apiResponse = data[1]; * }); * ``` */ get: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {array} GetBucketMetadataResponse * @property {object} 0 The bucket metadata. * @property {object} 1 The full API response. */ /** * @callback GetBucketMetadataCallback * @param {?Error} err Request error, if any. * @param {object} metadata The bucket metadata. * @param {object} apiResponse The full API response. */ /** * @typedef {object} GetBucketMetadataOptions Configuration options for Bucket#getMetadata(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * Get the bucket's metadata. * * To set metadata, see {@link Bucket#setMetadata}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/get| Buckets: get API Documentation} * * @method Bucket#getMetadata * @param {GetBucketMetadataOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetBucketMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.getMetadata(function(err, metadata, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.getMetadata().then(function(data) { * const metadata = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/requesterPays.js * region_tag:storage_get_requester_pays_status * Example of retrieving the requester pays status of a bucket: */ getMetadata: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {object} SetBucketMetadataOptions Configuration options for Bucket#setMetadata(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} SetBucketMetadataResponse * @property {object} apiResponse The full API response. */ /** * @callback SetBucketMetadataCallback * @param {?Error} err Request error, if any. * @param {object} metadata The bucket metadata. */ /** * Set the bucket's metadata. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} * * @method Bucket#setMetadata * @param {object} metadata The metadata you wish to set. * @param {SetBucketMetadataOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {SetBucketMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Set website metadata field on the bucket. * //- * const metadata = { * website: { * mainPageSuffix: 'http://example.com', * notFoundPage: 'http://example.com/404.html' * } * }; * * bucket.setMetadata(metadata, function(err, apiResponse) {}); * * //- * // Enable versioning for your bucket. * //- * bucket.setMetadata({ * versioning: { * enabled: true * } * }, function(err, apiResponse) {}); * * //- * // Enable KMS encryption for objects within this bucket. * //- * bucket.setMetadata({ * encryption: { * defaultKmsKeyName: 'projects/grape-spaceship-123/...' * } * }, function(err, apiResponse) {}); * * //- * // Set the default event-based hold value for new objects in this * // bucket. * //- * bucket.setMetadata({ * defaultEventBasedHold: true * }, function(err, apiResponse) {}); * * //- * // Remove object lifecycle rules. * //- * bucket.setMetadata({ * lifecycle: null * }, function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.setMetadata(metadata).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ setMetadata: { reqOpts: { qs: requestQueryObject, }, }, }; super({ parent: storage, baseUrl: '/b', id: name, createMethod: storage.createBucket.bind(storage), methods, }); this.name = name; this.storage = storage; this.userProject = options.userProject; this.acl = new acl_js_1.Acl({ request: this.request.bind(this), pathPrefix: '/acl', }); this.acl.default = new acl_js_1.Acl({ request: this.request.bind(this), pathPrefix: '/defaultObjectAcl', }); this.crc32cGenerator = options.crc32cGenerator || this.storage.crc32cGenerator; this.iam = new iam_js_1.Iam(this); this.getFilesStream = paginator_1.paginator.streamify('getFiles'); this.instanceRetryValue = storage.retryOptions.autoRetry; this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; } /** * The bucket's Cloud Storage URI (`gs://`) * * @example * ```ts * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * // `gs://my-bucket` * const href = bucket.cloudStorageURI.href; * ``` */ get cloudStorageURI() { const uri = new url_1.URL('gs://'); uri.host = this.name; return uri; } /** * @typedef {object} AddLifecycleRuleOptions Configuration options for Bucket#addLifecycleRule(). * @property {boolean} [append=true] The new rules will be appended to any * pre-existing rules. */ /** * * @typedef {object} LifecycleRule The new lifecycle rule to be added to objects * in this bucket. * @property {string|object} action The action to be taken upon matching of * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. * **Note**: For configuring a raw-formatted rule object to be passed as `action` * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. * @property {object} condition Condition a bucket must meet before the * action occurs on the bucket. Refer to following supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. * @property {string} [storageClass] When using the `setStorageClass` * action, provide this option to dictate which storage class the object * should update to. Please see * [SetStorageClass option documentation]{@link https://cloud.google.com/storage/docs/lifecycle#setstorageclass} for supported transitions. */ /** * Add an object lifecycle management rule to the bucket. * * By default, an Object Lifecycle Management rule provided to this method * will be included to the existing policy. To replace all existing rules, * supply the `options` argument, setting `append` to `false`. * * To add multiple rules, pass a list to the `rule` parameter. Calling this * function multiple times asynchronously does not guarantee that all rules * are added correctly. * * See {@link https://cloud.google.com/storage/docs/lifecycle| Object Lifecycle Management} * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} * * @param {LifecycleRule|LifecycleRule[]} rule The new lifecycle rule or rules to be added to objects * in this bucket. * @param {string|object} rule.action The action to be taken upon matching of * all the conditions 'delete', 'setStorageClass', or 'AbortIncompleteMultipartUpload'. * **Note**: For configuring a raw-formatted rule object to be passed as `action` * please refer to the [examples]{@link https://cloud.google.com/storage/docs/managing-lifecycles#configexamples}. * @param {object} rule.condition Condition a bucket must meet before the * action occurson the bucket. Refer to followitn supported [conditions]{@link https://cloud.google.com/storage/docs/lifecycle#conditions}. * @param {string} [rule.storageClass] When using the `setStorageClass` * action, provide this option to dictate which storage class the object * should update to. * @param {AddLifecycleRuleOptions} [options] Configuration object. * @param {boolean} [options.append=true] Append the new rule to the existing * policy. * @param {SetBucketMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Automatically have an object deleted from this bucket once it is 3 years * // of age. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * age: 365 * 3 // Specified in days. * } * }, function(err, apiResponse) { * if (err) { * // Error handling omitted. * } * * const lifecycleRules = bucket.metadata.lifecycle.rule; * * // Iterate over the Object Lifecycle Management rules on this bucket. * lifecycleRules.forEach(lifecycleRule => {}); * }); * * //- * // By default, the rule you provide will be added to the existing policy. * // Optionally, you can disable this behavior to replace all of the * // pre-existing rules. * //- * const options = { * append: false * }; * * bucket.addLifecycleRule({ * action: 'delete', * condition: { * age: 365 * 3 // Specified in days. * } * }, options, function(err, apiResponse) { * if (err) { * // Error handling omitted. * } * * // All rules have been replaced with the new "delete" rule. * * // Iterate over the Object Lifecycle Management rules on this bucket. * lifecycleRules.forEach(lifecycleRule => {}); * }); * * //- * // For objects created before 2018, "downgrade" the storage class. * //- * bucket.addLifecycleRule({ * action: 'setStorageClass', * storageClass: 'COLDLINE', * condition: { * createdBefore: new Date('2018') * } * }, function(err, apiResponse) {}); * * //- * // Delete objects created before 2016 which have the Coldline storage * // class. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * matchesStorageClass: [ * 'COLDLINE' * ], * createdBefore: new Date('2016') * } * }, function(err, apiResponse) {}); * * //- * // Delete object that has a noncurrent timestamp that is at least 100 days. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * daysSinceNoncurrentTime: 100 * } * }, function(err, apiResponse) {}); * * //- * // Delete object that has a noncurrent timestamp before 2020-01-01. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * noncurrentTimeBefore: new Date('2020-01-01') * } * }, function(err, apiResponse) {}); * * //- * // Delete object that has a customTime that is at least 100 days. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * daysSinceCustomTime: 100 * } * }, function(err, apiResponse) ()); * * //- * // Delete object that has a customTime before 2020-01-01. * //- * bucket.addLifecycleRule({ * action: 'delete', * condition: { * customTimeBefore: new Date('2020-01-01') * } * }, function(err, apiResponse) {}); * ``` */ addLifecycleRule(rule, optionsOrCallback, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } options = options || {}; const rules = Array.isArray(rule) ? rule : [rule]; for (const curRule of rules) { if (curRule.condition.createdBefore instanceof Date) { curRule.condition.createdBefore = curRule.condition.createdBefore .toISOString() .replace(/T.+$/, ''); } if (curRule.condition.customTimeBefore instanceof Date) { curRule.condition.customTimeBefore = curRule.condition.customTimeBefore .toISOString() .replace(/T.+$/, ''); } if (curRule.condition.noncurrentTimeBefore instanceof Date) { curRule.condition.noncurrentTimeBefore = curRule.condition.noncurrentTimeBefore .toISOString() .replace(/T.+$/, ''); } } if (options.append === false) { this.setMetadata({ lifecycle: { rule: rules } }, options, callback); return; } // The default behavior appends the previously-defined lifecycle rules with // the new ones just passed in by the user. this.getMetadata((err, metadata) => { var _a, _b; if (err) { callback(err); return; } const currentLifecycleRules = Array.isArray((_a = metadata.lifecycle) === null || _a === void 0 ? void 0 : _a.rule) ? (_b = metadata.lifecycle) === null || _b === void 0 ? void 0 : _b.rule : []; this.setMetadata({ lifecycle: { rule: currentLifecycleRules.concat(rules) }, }, options, callback); }); } /** * @typedef {object} CombineOptions * @property {string} [kmsKeyName] Resource name of the Cloud KMS key, of * the form * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, * that will be used to encrypt the object. Overwrites the object * metadata's `kms_key_name` value, if any. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @callback CombineCallback * @param {?Error} err Request error, if any. * @param {File} newFile The new {@link File}. * @param {object} apiResponse The full API response. */ /** * @typedef {array} CombineResponse * @property {File} 0 The new {@link File}. * @property {object} 1 The full API response. */ /** * Combine multiple files into one new file. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/compose| Objects: compose API Documentation} * * @throws {Error} if a non-array is provided as sources argument. * @throws {Error} if no sources are provided. * @throws {Error} if no destination is provided. * * @param {string[]|File[]} sources The source files that will be * combined. * @param {string|File} destination The file you would like the * source files combined into. * @param {CombineOptions} [options] Configuration options. * @param {string} [options.kmsKeyName] Resource name of the Cloud KMS key, of * the form * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, * that will be used to encrypt the object. Overwrites the object * metadata's `kms_key_name` value, if any. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {CombineCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const logBucket = storage.bucket('log-bucket'); * * const sources = [ * logBucket.file('2013-logs.txt'), * logBucket.file('2014-logs.txt') * ]; * * const allLogs = logBucket.file('all-logs.txt'); * * logBucket.combine(sources, allLogs, function(err, newFile, apiResponse) { * // newFile === allLogs * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * logBucket.combine(sources, allLogs).then(function(data) { * const newFile = data[0]; * const apiResponse = data[1]; * }); * ``` */ combine(sources, destination, optionsOrCallback, callback) { var _a; if (!Array.isArray(sources) || sources.length === 0) { throw new Error(BucketExceptionMessages.PROVIDE_SOURCE_FILE); } if (!destination) { throw new Error(BucketExceptionMessages.DESTINATION_FILE_NOT_SPECIFIED); } let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, // Not relevant but param is required AvailableServiceObjectMethods.setMetadata, // Same as above options); const convertToFile = (file) => { if (file instanceof file_js_1.File) { return file; } return this.file(file); }; // eslint-disable-next-line @typescript-eslint/no-explicit-any sources = sources.map(convertToFile); const destinationFile = convertToFile(destination); callback = callback || index_js_1.util.noop; if (!destinationFile.metadata.contentType) { const destinationContentType = mime_1.default.getType(destinationFile.name) || undefined; if (destinationContentType) { destinationFile.metadata.contentType = destinationContentType; } } let maxRetries = this.storage.retryOptions.maxRetries; if ((((_a = destinationFile === null || destinationFile === void 0 ? void 0 : destinationFile.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && options.ifGenerationMatch === undefined && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { maxRetries = 0; } if (options.ifGenerationMatch === undefined) { Object.assign(options, destinationFile.instancePreconditionOpts, options); } // Make the request from the destination File object. destinationFile.request({ method: 'POST', uri: '/compose', maxRetries, json: { destination: { contentType: destinationFile.metadata.contentType, contentEncoding: destinationFile.metadata.contentEncoding, }, sourceObjects: sources.map(source => { const sourceObject = { name: source.name, }; if (source.metadata && source.metadata.generation) { sourceObject.generation = parseInt(source.metadata.generation.toString()); } return sourceObject; }), }, qs: options, }, (err, resp) => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; if (err) { callback(err, null, resp); return; } callback(null, destinationFile, resp); }); } /** * See a {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll request body}. * * @typedef {object} CreateChannelConfig * @property {string} address The address where notifications are * delivered for this channel. * @property {string} [delimiter] Returns results in a directory-like mode. * @property {number} [maxResults] Maximum number of `items` plus `prefixes` * to return in a single page of responses. * @property {string} [pageToken] A previously-returned page token * representing part of the larger set of results to view. * @property {string} [prefix] Filter results to objects whose names begin * with this prefix. * @property {string} [projection=noAcl] Set of properties to return. * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {boolean} [versions=false] If `true`, lists all versions of an object * as distinct results. */ /** * @typedef {object} CreateChannelOptions * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} CreateChannelResponse * @property {Channel} 0 The new {@link Channel}. * @property {object} 1 The full API response. */ /** * @callback CreateChannelCallback * @param {?Error} err Request error, if any. * @param {Channel} channel The new {@link Channel}. * @param {object} apiResponse The full API response. */ /** * Create a channel that will be notified when objects in this bucket changes. * * @throws {Error} If an ID is not provided. * @throws {Error} If an address is not provided. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/watchAll| Objects: watchAll API Documentation} * * @param {string} id The ID of the channel to create. * @param {CreateChannelConfig} config Configuration for creating channel. * @param {string} config.address The address where notifications are * delivered for this channel. * @param {string} [config.delimiter] Returns results in a directory-like mode. * @param {number} [config.maxResults] Maximum number of `items` plus `prefixes` * to return in a single page of responses. * @param {string} [config.pageToken] A previously-returned page token * representing part of the larger set of results to view. * @param {string} [config.prefix] Filter results to objects whose names begin * with this prefix. * @param {string} [config.projection=noAcl] Set of properties to return. * @param {string} [config.userProject] The ID of the project which will be * billed for the request. * @param {boolean} [config.versions=false] If `true`, lists all versions of an object * as distinct results. * @param {CreateChannelOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {CreateChannelCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * const id = 'new-channel-id'; * * const config = { * address: 'https://...' * }; * * bucket.createChannel(id, config, function(err, channel, apiResponse) { * if (!err) { * // Channel created successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.createChannel(id, config).then(function(data) { * const channel = data[0]; * const apiResponse = data[1]; * }); * ``` */ createChannel(id, config, optionsOrCallback, callback) { if (typeof id !== 'string') { throw new Error(BucketExceptionMessages.CHANNEL_ID_REQUIRED); } let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.request({ method: 'POST', uri: '/o/watch', json: Object.assign({ id, type: 'web_hook', }, config), qs: options, }, (err, apiResponse) => { if (err) { callback(err, null, apiResponse); return; } const resourceId = apiResponse.resourceId; const channel = this.storage.channel(id, resourceId); channel.metadata = apiResponse; callback(null, channel, apiResponse); }); } /** * Metadata to set for the Notification. * * @typedef {object} CreateNotificationOptions * @property {object} [customAttributes] An optional list of additional * attributes to attach to each Cloud PubSub message published for this * notification subscription. * @property {string[]} [eventTypes] If present, only send notifications about * listed event types. If empty, sent notifications for all event types. * @property {string} [objectNamePrefix] If present, only apply this * notification configuration to object names that begin with this prefix. * @property {string} [payloadFormat] The desired content of the Payload. * Defaults to `JSON_API_V1`. * * Acceptable values are: * - `JSON_API_V1` * * - `NONE` * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @callback CreateNotificationCallback * @param {?Error} err Request error, if any. * @param {Notification} notification The new {@link Notification}. * @param {object} apiResponse The full API response. */ /** * @typedef {array} CreateNotificationResponse * @property {Notification} 0 The new {@link Notification}. * @property {object} 1 The full API response. */ /** * Creates a notification subscription for the bucket. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} * * @param {Topic|string} topic The Cloud PubSub topic to which this * subscription publishes. If the project ID is omitted, the current * project ID will be used. * * Acceptable formats are: * - `projects/grape-spaceship-123/topics/my-topic` * * - `my-topic` * @param {CreateNotificationOptions} [options] Metadata to set for the * notification. * @param {object} [options.customAttributes] An optional list of additional * attributes to attach to each Cloud PubSub message published for this * notification subscription. * @param {string[]} [options.eventTypes] If present, only send notifications about * listed event types. If empty, sent notifications for all event types. * @param {string} [options.objectNamePrefix] If present, only apply this * notification configuration to object names that begin with this prefix. * @param {string} [options.payloadFormat] The desired content of the Payload. * Defaults to `JSON_API_V1`. * * Acceptable values are: * - `JSON_API_V1` * * - `NONE` * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {CreateNotificationCallback} [callback] Callback function. * @returns {Promise} * @throws {Error} If a valid topic is not provided. * @see Notification#create * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const callback = function(err, notification, apiResponse) { * if (!err) { * // The notification was created successfully. * } * }; * * myBucket.createNotification('my-topic', callback); * * //- * // Configure the nofiication by providing Notification metadata. * //- * const metadata = { * objectNamePrefix: 'prefix-' * }; * * myBucket.createNotification('my-topic', metadata, callback); * * //- * // If the callback is omitted, we'll return a Promise. * //- * myBucket.createNotification('my-topic').then(function(data) { * const notification = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/createNotification.js * region_tag:storage_create_bucket_notifications * Another example: */ createNotification(topic, optionsOrCallback, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } const topicIsObject = topic !== null && typeof topic === 'object'; if (topicIsObject && index_js_1.util.isCustomType(topic, 'pubsub/topic')) { // eslint-disable-next-line @typescript-eslint/no-explicit-any topic = topic.name; } if (typeof topic !== 'string') { throw new Error(BucketExceptionMessages.TOPIC_NAME_REQUIRED); } const body = Object.assign({ topic }, options); if (body.topic.indexOf('projects') !== 0) { body.topic = 'projects/{{projectId}}/topics/' + body.topic; } body.topic = `//pubsub.${this.storage.universeDomain}/` + body.topic; if (!body.payloadFormat) { body.payloadFormat = 'JSON_API_V1'; } const query = {}; if (body.userProject) { query.userProject = body.userProject; delete body.userProject; } this.request({ method: 'POST', uri: '/notificationConfigs', json: (0, util_js_1.convertObjKeysToSnakeCase)(body), qs: query, maxRetries: 0, //explicitly set this value since this is a non-idempotent function }, (err, apiResponse) => { if (err) { callback(err, null, apiResponse); return; } const notification = this.notification(apiResponse.id); notification.metadata = apiResponse; callback(null, notification, apiResponse); }); } /** * @typedef {object} DeleteFilesOptions Query object. See {@link Bucket#getFiles} * for all of the supported properties. * @property {boolean} [force] Suppress errors until all files have been * processed. */ /** * @callback DeleteFilesCallback * @param {?Error|?Error[]} err Request error, if any, or array of errors from * files that were not able to be deleted. * @param {object} [apiResponse] The full API response. */ /** * Iterate over the bucket's files, calling `file.delete()` on each. * * This is not an atomic request. A delete attempt will be * made for each file individually. Any one can fail, in which case only a * portion of the files you intended to be deleted would have. * * Operations are performed in parallel, up to 10 at once. The first error * breaks the loop and will execute the provided callback with it. Specify * `{ force: true }` to suppress the errors until all files have had a chance * to be processed. * * File preconditions cannot be passed to this function. It will not retry unless * the idempotency strategy is set to retry always. * * The `query` object passed as the first argument will also be passed to * {@link Bucket#getFiles}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} * * @param {DeleteFilesOptions} [query] Query object. See {@link Bucket#getFiles} * @param {boolean} [query.force] Suppress errors until all files have been * processed. * @param {DeleteFilesCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Delete all of the files in the bucket. * //- * bucket.deleteFiles(function(err) {}); * * //- * // By default, if a file cannot be deleted, this method will stop deleting * // files from your bucket. You can override this setting with `force: * // true`. * //- * bucket.deleteFiles({ * force: true * }, function(errors) { * // `errors`: * // Array of errors if any occurred, otherwise null. * }); * * //- * // The first argument to this method acts as a query to * // {@link Bucket#getFiles}. As an example, you can delete files * // which match a prefix. * //- * bucket.deleteFiles({ * prefix: 'images/' * }, function(err) { * if (!err) { * // All files in the `images` directory have been deleted. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.deleteFiles().then(function() {}); * ``` */ deleteFiles(queryOrCallback, callback) { let query = {}; if (typeof queryOrCallback === 'function') { callback = queryOrCallback; } else if (queryOrCallback) { query = queryOrCallback; } const MAX_PARALLEL_LIMIT = 10; const MAX_QUEUE_SIZE = 1000; const errors = []; const deleteFile = (file) => { return file.delete(query).catch(err => { if (!query.force) { throw err; } errors.push(err); }); }; (async () => { try { let promises = []; const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); const filesStream = this.getFilesStream(query); for await (const curFile of filesStream) { if (promises.length >= MAX_QUEUE_SIZE) { await Promise.all(promises); promises = []; } promises.push(limit(() => deleteFile(curFile)).catch(e => { filesStream.destroy(); throw e; })); } await Promise.all(promises); callback(errors.length > 0 ? errors : null); } catch (e) { callback(e); return; } })(); } /** * @deprecated * @typedef {array} DeleteLabelsResponse * @property {object} 0 The full API response. */ /** * @deprecated * @callback DeleteLabelsCallback * @param {?Error} err Request error, if any. * @param {object} metadata Bucket's metadata. */ /** * @deprecated Use setMetadata directly * Delete one or more labels from this bucket. * * @param {string|string[]} [labels] The labels to delete. If no labels are * provided, all of the labels are removed. * @param {DeleteLabelsCallback} [callback] Callback function. * @param {DeleteLabelsOptions} [options] Options, including precondition options * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Delete all of the labels from this bucket. * //- * bucket.deleteLabels(function(err, apiResponse) {}); * * //- * // Delete a single label. * //- * bucket.deleteLabels('labelone', function(err, apiResponse) {}); * * //- * // Delete a specific set of labels. * //- * bucket.deleteLabels([ * 'labelone', * 'labeltwo' * ], function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.deleteLabels().then(function(data) { * const apiResponse = data[0]; * }); * ``` */ deleteLabels(labelsOrCallbackOrOptions, optionsOrCallback, callback) { let labels = new Array(); let options = {}; if (typeof labelsOrCallbackOrOptions === 'function') { callback = labelsOrCallbackOrOptions; } else if (typeof labelsOrCallbackOrOptions === 'string') { labels = [labelsOrCallbackOrOptions]; } else if (Array.isArray(labelsOrCallbackOrOptions)) { labels = labelsOrCallbackOrOptions; } else if (labelsOrCallbackOrOptions) { options = labelsOrCallbackOrOptions; } if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } const deleteLabels = (labels) => { const nullLabelMap = labels.reduce((nullLabelMap, labelKey) => { nullLabelMap[labelKey] = null; return nullLabelMap; }, {}); if ((options === null || options === void 0 ? void 0 : options.ifMetagenerationMatch) !== undefined) { this.setLabels(nullLabelMap, options, callback); } else { this.setLabels(nullLabelMap, callback); } }; if (labels.length === 0) { this.getLabels((err, labels) => { if (err) { callback(err); return; } deleteLabels(Object.keys(labels)); }); } else { deleteLabels(labels); } } /** * @typedef {array} DisableRequesterPaysResponse * @property {object} 0 The full API response. */ /** * @callback DisableRequesterPaysCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** *

* Early Access Testers Only *

* This feature is not yet widely-available. *

*
* * Disable `requesterPays` functionality from this bucket. * * @param {DisableRequesterPaysCallback} [callback] Callback function. * @param {DisableRequesterPaysOptions} [options] Options, including precondition options * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.disableRequesterPays(function(err, apiResponse) { * if (!err) { * // requesterPays functionality disabled successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.disableRequesterPays().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/requesterPays.js * region_tag:storage_disable_requester_pays * Example of disabling requester pays: */ disableRequesterPays(optionsOrCallback, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.setMetadata({ billing: { requesterPays: false, }, }, options, callback); } /** * Configuration object for enabling logging. * * @typedef {object} EnableLoggingOptions * @property {string|Bucket} [bucket] The bucket for the log entries. By * default, the current bucket is used. * @property {string} prefix A unique prefix for log object names. */ /** * Enable logging functionality for this bucket. This will make two API * requests, first to grant Cloud Storage WRITE permission to the bucket, then * to set the appropriate configuration on the Bucket's metadata. * * @param {EnableLoggingOptions} config Configuration options. * @param {string|Bucket} [config.bucket] The bucket for the log entries. By * default, the current bucket is used. * @param {string} config.prefix A unique prefix for log object names. * @param {SetBucketMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * const config = { * prefix: 'log' * }; * * bucket.enableLogging(config, function(err, apiResponse) { * if (!err) { * // Logging functionality enabled successfully. * } * }); * * ``` * @example * Optionally, provide a destination bucket. * ``` * const config = { * prefix: 'log', * bucket: 'destination-bucket' * }; * * bucket.enableLogging(config, function(err, apiResponse) {}); * ``` * * @example * If the callback is omitted, we'll return a Promise. * ``` * bucket.enableLogging(config).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ enableLogging(config, callback) { if (!config || typeof config === 'function' || typeof config.prefix === 'undefined') { throw new Error(BucketExceptionMessages.CONFIGURATION_OBJECT_PREFIX_REQUIRED); } let logBucket = this.id; if (config.bucket && config.bucket instanceof Bucket) { logBucket = config.bucket.id; } else if (config.bucket && typeof config.bucket === 'string') { logBucket = config.bucket; } const options = {}; if (config === null || config === void 0 ? void 0 : config.ifMetagenerationMatch) { options.ifMetagenerationMatch = config.ifMetagenerationMatch; } if (config === null || config === void 0 ? void 0 : config.ifMetagenerationNotMatch) { options.ifMetagenerationNotMatch = config.ifMetagenerationNotMatch; } (async () => { try { const [policy] = await this.iam.getPolicy(); policy.bindings.push({ members: ['group:cloud-storage-analytics@google.com'], role: 'roles/storage.objectCreator', }); await this.iam.setPolicy(policy); this.setMetadata({ logging: { logBucket, logObjectPrefix: config.prefix, }, }, options, callback); } catch (e) { callback(e); return; } })(); } /** * @typedef {array} EnableRequesterPaysResponse * @property {object} 0 The full API response. */ /** * @callback EnableRequesterPaysCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** *
* Early Access Testers Only *

* This feature is not yet widely-available. *

*
* * Enable `requesterPays` functionality for this bucket. This enables you, the * bucket owner, to have the requesting user assume the charges for the access * to your bucket and its contents. * * @param {EnableRequesterPaysCallback | EnableRequesterPaysOptions} [optionsOrCallback] * Callback function or precondition options. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.enableRequesterPays(function(err, apiResponse) { * if (!err) { * // requesterPays functionality enabled successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.enableRequesterPays().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/requesterPays.js * region_tag:storage_enable_requester_pays * Example of enabling requester pays: */ enableRequesterPays(optionsOrCallback, cb) { let options = {}; if (typeof optionsOrCallback === 'function') { cb = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.setMetadata({ billing: { requesterPays: true, }, }, options, cb); } /** * Create a {@link File} object. See {@link File} to see how to handle * the different use cases you may have. * * @param {string} name The name of the file in this bucket. * @param {FileOptions} [options] Configuration options. * @param {string|number} [options.generation] Only use a specific revision of * this file. * @param {string} [options.encryptionKey] A custom encryption key. See * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will * be used to encrypt the object. Must be in the format: * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. * KMS key ring must use the same location as the bucket. * @param {string} [options.userProject] The ID of the project which will be * billed for all requests made from File object. * @returns {File} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * const file = bucket.file('my-existing-file.png'); * ``` */ file(name, options) { if (!name) { throw Error(BucketExceptionMessages.SPECIFY_FILE_NAME); } return new file_js_1.File(this, name, options); } /** * @typedef {array} GetFilesResponse * @property {File[]} 0 Array of {@link File} instances. * @param {object} nextQuery 1 A query object to receive more results. * @param {object} apiResponse 2 The full API response. */ /** * @callback GetFilesCallback * @param {?Error} err Request error, if any. * @param {File[]} files Array of {@link File} instances. * @param {object} nextQuery A query object to receive more results. * @param {object} apiResponse The full API response. */ /** * Query object for listing files. * * @typedef {object} GetFilesOptions * @property {boolean} [autoPaginate=true] Have pagination handled * automatically. * @property {string} [delimiter] Results will contain only objects whose * names, aside from the prefix, do not contain delimiter. Objects whose * names, aside from the prefix, contain delimiter will have their name * truncated after the delimiter, returned in `apiResponse.prefixes`. * Duplicate prefixes are omitted. * @property {string} [endOffset] Filter results to objects whose names are * lexicographically before endOffset. If startOffset is also set, the objects * listed have names between startOffset (inclusive) and endOffset (exclusive). * @property {boolean} [includeFoldersAsPrefixes] If true, includes folders and * managed folders in the set of prefixes returned by the query. Only applicable if * delimiter is set to / and autoPaginate is set to false. * See: https://cloud.google.com/storage/docs/managed-folders * @property {boolean} [includeTrailingDelimiter] If true, objects that end in * exactly one instance of delimiter have their metadata included in items[] * in addition to the relevant part of the object name appearing in prefixes[]. * @property {string} [prefix] Filter results to objects whose names begin * with this prefix. * @property {string} [matchGlob] A glob pattern used to filter results, * for example foo*bar * @property {number} [maxApiCalls] Maximum number of API calls to make. * @property {number} [maxResults] Maximum number of items plus prefixes to * return per call. * Note: By default will handle pagination automatically * if more than 1 page worth of results are requested per call. * When `autoPaginate` is set to `false` the smaller of `maxResults` * or 1 page of results will be returned per call. * @property {string} [pageToken] A previously-returned page token * representing part of the larger set of results to view. * @property {boolean} [softDeleted] If true, only soft-deleted object versions will be * listed as distinct results in order of generation number. Note `soft_deleted` and * `versions` cannot be set to true simultaneously. * @property {string} [startOffset] Filter results to objects whose names are * lexicographically equal to or after startOffset. If endOffset is also set, * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {boolean} [versions] If true, returns File objects scoped to * their versions. */ /** * Get {@link File} objects for the files currently in the bucket. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/list| Objects: list API Documentation} * * @param {GetFilesOptions} [query] Query object for listing files. * @param {boolean} [query.autoPaginate=true] Have pagination handled * automatically. * @param {string} [query.delimiter] Results will contain only objects whose * names, aside from the prefix, do not contain delimiter. Objects whose * names, aside from the prefix, contain delimiter will have their name * truncated after the delimiter, returned in `apiResponse.prefixes`. * Duplicate prefixes are omitted. * @param {string} [query.endOffset] Filter results to objects whose names are * lexicographically before endOffset. If startOffset is also set, the objects * listed have names between startOffset (inclusive) and endOffset (exclusive). * @param {boolean} [query.includeFoldersAsPrefixes] If true, includes folders and * managed folders in the set of prefixes returned by the query. Only applicable if * delimiter is set to / and autoPaginate is set to false. * See: https://cloud.google.com/storage/docs/managed-folders * @param {boolean} [query.includeTrailingDelimiter] If true, objects that end in * exactly one instance of delimiter have their metadata included in items[] * in addition to the relevant part of the object name appearing in prefixes[]. * @param {string} [query.prefix] Filter results to objects whose names begin * with this prefix. * @param {number} [query.maxApiCalls] Maximum number of API calls to make. * @param {number} [query.maxResults] Maximum number of items plus prefixes to * return per call. * Note: By default will handle pagination automatically * if more than 1 page worth of results are requested per call. * When `autoPaginate` is set to `false` the smaller of `maxResults` * or 1 page of results will be returned per call. * @param {string} [query.pageToken] A previously-returned page token * representing part of the larger set of results to view. * @param {boolean} [query.softDeleted] If true, only soft-deleted object versions will be * listed as distinct results in order of generation number. Note `soft_deleted` and * `versions` cannot be set to true simultaneously. * @param {string} [query.startOffset] Filter results to objects whose names are * lexicographically equal to or after startOffset. If endOffset is also set, * the objects listed have names between startOffset (inclusive) and endOffset (exclusive). * @param {string} [query.userProject] The ID of the project which will be * billed for the request. * @param {boolean} [query.versions] If true, returns File objects scoped to * their versions. * @param {GetFilesCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.getFiles(function(err, files) { * if (!err) { * // files is an array of File objects. * } * }); * * //- * // If your bucket has versioning enabled, you can get all of your files * // scoped to their generation. * //- * bucket.getFiles({ * versions: true * }, function(err, files) { * // Each file is scoped to its generation. * }); * * //- * // To control how many API requests are made and page through the results * // manually, set `autoPaginate` to `false`. * //- * const callback = function(err, files, nextQuery, apiResponse) { * if (nextQuery) { * // More results exist. * bucket.getFiles(nextQuery, callback); * } * * // The `metadata` property is populated for you with the metadata at the * // time of fetching. * files[0].metadata; * * // However, in cases where you are concerned the metadata could have * // changed, use the `getMetadata` method. * files[0].getMetadata(function(err, metadata) {}); * }; * * bucket.getFiles({ * autoPaginate: false * }, callback); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.getFiles().then(function(data) { * const files = data[0]; * }); * * ``` * @example *
Simulating a File System

With `autoPaginate: false`, it's possible to iterate over files which incorporate a common structure using a delimiter.

Consider the following remote objects:

  1. "a"
  2. "a/b/c/d"
  3. "b/d/e"

Using a delimiter of `/` will return a single file, "a".

`apiResponse.prefixes` will return the "sub-directories" that were found:

  1. "a/"
  2. "b/"
* ``` * bucket.getFiles({ * autoPaginate: false, * delimiter: '/' * }, function(err, files, nextQuery, apiResponse) { * // files = [ * // {File} // File object for file "a" * // ] * * // apiResponse.prefixes = [ * // 'a/', * // 'b/' * // ] * }); * ``` * * @example * Using prefixes, it's now possible to simulate a file system with follow-up requests. * ``` * bucket.getFiles({ * autoPaginate: false, * delimiter: '/', * prefix: 'a/' * }, function(err, files, nextQuery, apiResponse) { * // No files found within "directory" a. * // files = [] * * // However, a "sub-directory" was found. * // This prefix can be used to continue traversing the "file system". * // apiResponse.prefixes = [ * // 'a/b/' * // ] * }); * ``` * * @example include:samples/files.js * region_tag:storage_list_files * Another example: * * @example include:samples/files.js * region_tag:storage_list_files_with_prefix * Example of listing files, filtered by a prefix: */ getFiles(queryOrCallback, callback) { let query = typeof queryOrCallback === 'object' ? queryOrCallback : {}; if (!callback) { callback = queryOrCallback; } query = Object.assign({}, query); if (query.fields && query.autoPaginate && !query.fields.includes('nextPageToken')) { query.fields = `${query.fields},nextPageToken`; } this.request({ uri: '/o', qs: query, }, (err, resp) => { if (err) { // eslint-disable-next-line @typescript-eslint/no-explicit-any callback(err, null, null, resp); return; } const itemsArray = resp.items ? resp.items : []; const files = itemsArray.map((file) => { const options = {}; if (query.fields) { const fileInstance = file; return fileInstance; } if (query.versions) { options.generation = file.generation; } if (file.kmsKeyName) { options.kmsKeyName = file.kmsKeyName; } const fileInstance = this.file(file.name, options); fileInstance.metadata = file; return fileInstance; }); let nextQuery = null; if (resp.nextPageToken) { nextQuery = Object.assign({}, query, { pageToken: resp.nextPageToken, }); } // eslint-disable-next-line @typescript-eslint/no-explicit-any callback(null, files, nextQuery, resp); }); } /** * @deprecated * @typedef {object} GetLabelsOptions Configuration options for Bucket#getLabels(). * @param {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @deprecated * @typedef {array} GetLabelsResponse * @property {object} 0 Object of labels currently set on this bucket. */ /** * @deprecated * @callback GetLabelsCallback * @param {?Error} err Request error, if any. * @param {object} labels Object of labels currently set on this bucket. */ /** * @deprecated Use getMetadata directly. * Get the labels currently set on this bucket. * * @param {object} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetLabelsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.getLabels(function(err, labels) { * if (err) { * // Error handling omitted. * } * * // labels = { * // label: 'labelValue', * // ... * // } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.getLabels().then(function(data) { * const labels = data[0]; * }); * ``` */ getLabels(optionsOrCallback, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.getMetadata(options, (err, metadata) => { if (err) { callback(err, null); return; } callback(null, (metadata === null || metadata === void 0 ? void 0 : metadata.labels) || {}); }); } /** * @typedef {object} GetNotificationsOptions Configuration options for Bucket#getNotification(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @callback GetNotificationsCallback * @param {?Error} err Request error, if any. * @param {Notification[]} notifications Array of {@link Notification} * instances. * @param {object} apiResponse The full API response. */ /** * @typedef {array} GetNotificationsResponse * @property {Notification[]} 0 Array of {@link Notification} instances. * @property {object} 1 The full API response. */ /** * Retrieves a list of notification subscriptions for a given bucket. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/list| Notifications: list} * * @param {GetNotificationsOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetNotificationsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * bucket.getNotifications(function(err, notifications, apiResponse) { * if (!err) { * // notifications is an array of Notification objects. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.getNotifications().then(function(data) { * const notifications = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/listNotifications.js * region_tag:storage_list_bucket_notifications * Another example: */ getNotifications(optionsOrCallback, callback) { let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = optionsOrCallback; } this.request({ uri: '/notificationConfigs', qs: options, }, (err, resp) => { if (err) { callback(err, null, resp); return; } const itemsArray = resp.items ? resp.items : []; const notifications = itemsArray.map((notification) => { const notificationInstance = this.notification(notification.id); notificationInstance.metadata = notification; return notificationInstance; }); callback(null, notifications, resp); }); } /** * @typedef {array} GetSignedUrlResponse * @property {object} 0 The signed URL. */ /** * @callback GetSignedUrlCallback * @param {?Error} err Request error, if any. * @param {object} url The signed URL. */ /** * @typedef {object} GetBucketSignedUrlConfig * @property {string} action Only listing objects within a bucket (HTTP: GET) is supported for bucket-level signed URLs. * @property {*} expires A timestamp when this link will expire. Any value * given is passed to `new Date()`. * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. * @property {string} [version='v2'] The signing version to use, either * 'v2' or 'v4'. * @property {boolean} [virtualHostedStyle=false] Use virtual hosted-style * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs * should generally be preferred instaed of path-style URL. * Currently defaults to `false` for path-style, although this may change in a * future major-version release. * @property {string} [cname] The cname for this bucket, i.e., * "https://cdn.example.com". * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} * @property {object} [extensionHeaders] If these headers are used, the * server will check to make sure that the client provides matching * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} * for the requirements of this feature, most notably: * - The header name must be prefixed with `x-goog-` * - The header name must be all lowercase * * Note: Multi-valued header passed as an array in the extensionHeaders * object is converted into a string, delimited by `,` with * no space. Requests made using the signed URL will need to * delimit multi-valued headers using a single `,` as well, or * else the server will report a mismatched signature. * @property {object} [queryParams] Additional query parameters to include * in the signed URL. */ /** * Get a signed URL to allow limited time access to a bucket. * * In Google Cloud Platform environments, such as Cloud Functions and App * Engine, you usually don't provide a `keyFilename` or `credentials` during * instantiation. In those environments, we call the * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} * to create a signed URL. That API requires either the * `https://www.googleapis.com/auth/iam` or * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are * enabled. * * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} * * @throws {Error} if an expiration timestamp from the past is given. * * @param {GetBucketSignedUrlConfig} config Configuration object. * @param {string} config.action Currently only supports "list" (HTTP: GET). * @param {*} config.expires A timestamp when this link will expire. Any value * given is passed to `new Date()`. * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. * @param {string} [config.version='v2'] The signing version to use, either * 'v2' or 'v4'. * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs * should generally be preferred instaed of path-style URL. * Currently defaults to `false` for path-style, although this may change in a * future major-version release. * @param {string} [config.cname] The cname for this bucket, i.e., * "https://cdn.example.com". * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} * @param {object} [config.extensionHeaders] If these headers are used, the * server will check to make sure that the client provides matching * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} * for the requirements of this feature, most notably: * - The header name must be prefixed with `x-goog-` * - The header name must be all lowercase * * Note: Multi-valued header passed as an array in the extensionHeaders * object is converted into a string, delimited by `,` with * no space. Requests made using the signed URL will need to * delimit multi-valued headers using a single `,` as well, or * else the server will report a mismatched signature. * @property {object} [config.queryParams] Additional query parameters to include * in the signed URL. * @param {GetSignedUrlCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * //- * // Generate a URL that allows temporary access to list files in a bucket. * //- * const request = require('request'); * * const config = { * action: 'list', * expires: '03-17-2025' * }; * * bucket.getSignedUrl(config, function(err, url) { * if (err) { * console.error(err); * return; * } * * // The bucket is now available to be listed from this URL. * request(url, function(err, resp) { * // resp.statusCode = 200 * }); * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.getSignedUrl(config).then(function(data) { * const url = data[0]; * }); * ``` */ getSignedUrl(cfg, callback) { const method = BucketActionToHTTPMethod[cfg.action]; const signConfig = { method, expires: cfg.expires, version: cfg.version, cname: cfg.cname, extensionHeaders: cfg.extensionHeaders || {}, queryParams: cfg.queryParams || {}, host: cfg.host, signingEndpoint: cfg.signingEndpoint, }; if (!this.signer) { this.signer = new signer_js_1.URLSigner(this.storage.authClient, this, undefined, this.storage); } this.signer .getSignedUrl(signConfig) .then(signedUrl => callback(null, signedUrl), callback); } /** * @callback BucketLockCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Lock a previously-defined retention policy. This will prevent changes to * the policy. * * @throws {Error} if a metageneration is not provided. * * @param {number|string} metageneration The bucket's metageneration. This is * accesssible from calling {@link File#getMetadata}. * @param {BucketLockCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const bucket = storage.bucket('albums'); * * const metageneration = 2; * * bucket.lock(metageneration, function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.lock(metageneration).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ lock(metageneration, callback) { const metatype = typeof metageneration; if (metatype !== 'number' && metatype !== 'string') { throw new Error(BucketExceptionMessages.METAGENERATION_NOT_PROVIDED); } this.request({ method: 'POST', uri: '/lockRetentionPolicy', qs: { ifMetagenerationMatch: metageneration, }, }, callback); } /** * @typedef {object} RestoreOptions Options for Bucket#restore(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/restore#resource| Object resource}. * @param {number} [generation] If present, selects a specific revision of this object. * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. */ /** * Restores a soft-deleted bucket * @param {RestoreOptions} options Restore options. * @returns {Promise} */ async restore(options) { const [bucket] = await this.request({ method: 'POST', uri: '/restore', qs: options, }); return bucket; } /** * @typedef {array} MakeBucketPrivateResponse * @property {File[]} 0 List of files made private. */ /** * @callback MakeBucketPrivateCallback * @param {?Error} err Request error, if any. * @param {File[]} files List of files made private. */ /** * @typedef {object} MakeBucketPrivateOptions * @property {boolean} [includeFiles=false] Make each file in the bucket * private. * @property {Metadata} [metadata] Define custom metadata properties to define * along with the operation. * @property {boolean} [force] Queue errors occurred while making files * private until all files have been processed. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * Make the bucket listing private. * * You may also choose to make the contents of the bucket private by * specifying `includeFiles: true`. This will automatically run * {@link File#makePrivate} for every file in the bucket. * * When specifying `includeFiles: true`, use `force: true` to delay execution * of your callback until all files have been processed. By default, the * callback is executed after the first error. Use `force` to queue such * errors until all files have been processed, after which they will be * returned as an array as the first argument to your callback. * * NOTE: This may cause the process to be long-running and use a high number * of requests. Use with caution. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} * * @param {MakeBucketPrivateOptions} [options] Configuration options. * @param {boolean} [options.includeFiles=false] Make each file in the bucket * private. * @param {Metadata} [options.metadata] Define custom metadata properties to define * along with the operation. * @param {boolean} [options.force] Queue errors occurred while making files * private until all files have been processed. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {MakeBucketPrivateCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Make the bucket private. * //- * bucket.makePrivate(function(err) {}); * * //- * // Make the bucket and its contents private. * //- * const opts = { * includeFiles: true * }; * * bucket.makePrivate(opts, function(err, files) { * // `err`: * // The first error to occur, otherwise null. * // * // `files`: * // Array of files successfully made private in the bucket. * }); * * //- * // Make the bucket and its contents private, using force to suppress errors * // until all files have been processed. * //- * const opts = { * includeFiles: true, * force: true * }; * * bucket.makePrivate(opts, function(errors, files) { * // `errors`: * // Array of errors if any occurred, otherwise null. * // * // `files`: * // Array of files successfully made private in the bucket. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.makePrivate(opts).then(function(data) { * const files = data[0]; * }); * ``` */ makePrivate(optionsOrCallback, callback) { var _a, _b, _c, _d; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; options.private = true; const query = { predefinedAcl: 'projectPrivate', }; if (options.userProject) { query.userProject = options.userProject; } if ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) { query.ifGenerationMatch = options.preconditionOpts.ifGenerationMatch; } if ((_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationNotMatch) { query.ifGenerationNotMatch = options.preconditionOpts.ifGenerationNotMatch; } if ((_c = options.preconditionOpts) === null || _c === void 0 ? void 0 : _c.ifMetagenerationMatch) { query.ifMetagenerationMatch = options.preconditionOpts.ifMetagenerationMatch; } if ((_d = options.preconditionOpts) === null || _d === void 0 ? void 0 : _d.ifMetagenerationNotMatch) { query.ifMetagenerationNotMatch = options.preconditionOpts.ifMetagenerationNotMatch; } // You aren't allowed to set both predefinedAcl & acl properties on a bucket // so acl must explicitly be nullified. const metadata = { ...options.metadata, acl: null }; this.setMetadata(metadata, query, (err) => { if (err) { callback(err); } const internalCall = () => { if (options.includeFiles) { return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, options); } return Promise.resolve([]); }; internalCall() .then(files => callback(null, files)) .catch(callback); }); } /** * @typedef {object} MakeBucketPublicOptions * @property {boolean} [includeFiles=false] Make each file in the bucket * private. * @property {boolean} [force] Queue errors occurred while making files * private until all files have been processed. */ /** * @callback MakeBucketPublicCallback * @param {?Error} err Request error, if any. * @param {File[]} files List of files made public. */ /** * @typedef {array} MakeBucketPublicResponse * @property {File[]} 0 List of files made public. */ /** * Make the bucket publicly readable. * * You may also choose to make the contents of the bucket publicly readable by * specifying `includeFiles: true`. This will automatically run * {@link File#makePublic} for every file in the bucket. * * When specifying `includeFiles: true`, use `force: true` to delay execution * of your callback until all files have been processed. By default, the * callback is executed after the first error. Use `force` to queue such * errors until all files have been processed, after which they will be * returned as an array as the first argument to your callback. * * NOTE: This may cause the process to be long-running and use a high number * of requests. Use with caution. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/patch| Buckets: patch API Documentation} * * @param {MakeBucketPublicOptions} [options] Configuration options. * @param {boolean} [options.includeFiles=false] Make each file in the bucket * private. * @param {boolean} [options.force] Queue errors occurred while making files * private until all files have been processed. * @param {MakeBucketPublicCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Make the bucket publicly readable. * //- * bucket.makePublic(function(err) {}); * * //- * // Make the bucket and its contents publicly readable. * //- * const opts = { * includeFiles: true * }; * * bucket.makePublic(opts, function(err, files) { * // `err`: * // The first error to occur, otherwise null. * // * // `files`: * // Array of files successfully made public in the bucket. * }); * * //- * // Make the bucket and its contents publicly readable, using force to * // suppress errors until all files have been processed. * //- * const opts = { * includeFiles: true, * force: true * }; * * bucket.makePublic(opts, function(errors, files) { * // `errors`: * // Array of errors if any occurred, otherwise null. * // * // `files`: * // Array of files successfully made public in the bucket. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.makePublic(opts).then(function(data) { * const files = data[0]; * }); * ``` */ makePublic(optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const req = { public: true, ...options }; this.acl .add({ entity: 'allUsers', role: 'READER', }) .then(() => { return this.acl.default.add({ entity: 'allUsers', role: 'READER', }); }) .then(() => { if (req.includeFiles) { return (0, util_1.promisify)(this.makeAllFilesPublicPrivate_).call(this, req); } return []; }) .then(files => callback(null, files), callback); } /** * Get a reference to a Cloud Pub/Sub Notification. * * @param {string} id ID of notification. * @returns {Notification} * @see Notification * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const notification = bucket.notification('1'); * ``` */ notification(id) { if (!id) { throw new Error(BucketExceptionMessages.SUPPLY_NOTIFICATION_ID); } return new notification_js_1.Notification(this, id); } /** * Remove an already-existing retention policy from this bucket, if it is not * locked. * * @param {SetBucketMetadataCallback} [callback] Callback function. * @param {SetBucketMetadataOptions} [options] Options, including precondition options * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const bucket = storage.bucket('albums'); * * bucket.removeRetentionPeriod(function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.removeRetentionPeriod().then(function(data) { * const apiResponse = data[0]; * }); * ``` */ removeRetentionPeriod(optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; this.setMetadata({ retentionPolicy: null, }, options, callback); } /** * Makes request and applies userProject query parameter if necessary. * * @private * * @param {object} reqOpts - The request options. * @param {function} callback - The callback function. */ request(reqOpts, callback) { if (this.userProject && (!reqOpts.qs || !reqOpts.qs.userProject)) { reqOpts.qs = { ...reqOpts.qs, userProject: this.userProject }; } return super.request(reqOpts, callback); } /** * @deprecated * @typedef {array} SetLabelsResponse * @property {object} 0 The bucket metadata. */ /** * @deprecated * @callback SetLabelsCallback * @param {?Error} err Request error, if any. * @param {object} metadata The bucket metadata. */ /** * @deprecated * @typedef {object} SetLabelsOptions Configuration options for Bucket#setLabels(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @deprecated Use setMetadata directly. * Set labels on the bucket. * * This makes an underlying call to {@link Bucket#setMetadata}, which * is a PATCH request. This means an individual label can be overwritten, but * unmentioned labels will not be touched. * * @param {object} labels Labels to set on the bucket. * @param {SetLabelsOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {SetLabelsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * const labels = { * labelone: 'labelonevalue', * labeltwo: 'labeltwovalue' * }; * * bucket.setLabels(labels, function(err, metadata) { * if (!err) { * // Labels set successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.setLabels(labels).then(function(data) { * const metadata = data[0]; * }); * ``` */ setLabels(labels, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; callback = callback || index_js_1.util.noop; this.setMetadata({ labels }, options, callback); } setMetadata(metadata, optionsOrCallback, cb) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, AvailableServiceObjectMethods.setMetadata, options); super .setMetadata(metadata, options) .then(resp => cb(null, ...resp)) .catch(cb) .finally(() => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; }); } /** * Lock all objects contained in the bucket, based on their creation time. Any * attempt to overwrite or delete objects younger than the retention period * will result in a `PERMISSION_DENIED` error. * * An unlocked retention policy can be modified or removed from the bucket via * {@link File#removeRetentionPeriod} and {@link File#setRetentionPeriod}. A * locked retention policy cannot be removed or shortened in duration for the * lifetime of the bucket. Attempting to remove or decrease period of a locked * retention policy will result in a `PERMISSION_DENIED` error. You can still * increase the policy. * * @param {*} duration In seconds, the minimum retention time for all objects * contained in this bucket. * @param {SetBucketMetadataCallback} [callback] Callback function. * @param {SetBucketMetadataCallback} [options] Options, including precondition options. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const bucket = storage.bucket('albums'); * * const DURATION_SECONDS = 15780000; // 6 months. * * //- * // Lock the objects in this bucket for 6 months. * //- * bucket.setRetentionPeriod(DURATION_SECONDS, function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.setRetentionPeriod(DURATION_SECONDS).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ setRetentionPeriod(duration, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; this.setMetadata({ retentionPolicy: { retentionPeriod: duration.toString(), }, }, options, callback); } /** * * @typedef {object} Cors * @property {number} [maxAgeSeconds] The number of seconds the browser is * allowed to make requests before it must repeat the preflight request. * @property {string[]} [method] HTTP method allowed for cross origin resource * sharing with this bucket. * @property {string[]} [origin] an origin allowed for cross origin resource * sharing with this bucket. * @property {string[]} [responseHeader] A header allowed for cross origin * resource sharing with this bucket. */ /** * This can be used to set the CORS configuration on the bucket. * * The configuration will be overwritten with the value passed into this. * * @param {Cors[]} corsConfiguration The new CORS configuration to set * @param {number} [corsConfiguration.maxAgeSeconds] The number of seconds the browser is * allowed to make requests before it must repeat the preflight request. * @param {string[]} [corsConfiguration.method] HTTP method allowed for cross origin resource * sharing with this bucket. * @param {string[]} [corsConfiguration.origin] an origin allowed for cross origin resource * sharing with this bucket. * @param {string[]} [corsConfiguration.responseHeader] A header allowed for cross origin * resource sharing with this bucket. * @param {SetBucketMetadataCallback} [callback] Callback function. * @param {SetBucketMetadataOptions} [options] Options, including precondition options. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const bucket = storage.bucket('albums'); * * const corsConfiguration = [{maxAgeSeconds: 3600}]; // 1 hour * bucket.setCorsConfiguration(corsConfiguration); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.setCorsConfiguration(corsConfiguration).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ setCorsConfiguration(corsConfiguration, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; this.setMetadata({ cors: corsConfiguration, }, options, callback); } /** * @typedef {object} SetBucketStorageClassOptions * @property {string} [userProject] - The ID of the project which will be * billed for the request. */ /** * @callback SetBucketStorageClassCallback * @param {?Error} err Request error, if any. */ /** * Set the default storage class for new files in this bucket. * * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} * * @param {string} storageClass The new storage class. (`standard`, * `nearline`, `coldline`, or `archive`). * **Note:** The storage classes `multi_regional`, `regional`, and * `durable_reduced_availability` are now legacy and will be deprecated in * the future. * @param {object} [options] Configuration options. * @param {string} [options.userProject] - The ID of the project which will be * billed for the request. * @param {SetStorageClassCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.setStorageClass('nearline', function(err, apiResponse) { * if (err) { * // Error handling omitted. * } * * // The storage class was updated successfully. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.setStorageClass('nearline').then(function() {}); * ``` */ setStorageClass(storageClass, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; // In case we get input like `storageClass`, convert to `storage_class`. storageClass = storageClass .replace(/-/g, '_') .replace(/([a-z])([A-Z])/g, (_, low, up) => { return low + '_' + up; }) .toUpperCase(); this.setMetadata({ storageClass }, options, callback); } /** * Set a user project to be billed for all requests made from this Bucket * object and any files referenced from this Bucket object. * * @param {string} userProject The user project. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * bucket.setUserProject('grape-spaceship-123'); * ``` */ setUserProject(userProject) { this.userProject = userProject; const methods = [ 'create', 'delete', 'exists', 'get', 'getMetadata', 'setMetadata', ]; methods.forEach(method => { const methodConfig = this.methods[method]; if (typeof methodConfig === 'object') { if (typeof methodConfig.reqOpts === 'object') { Object.assign(methodConfig.reqOpts.qs, { userProject }); } else { methodConfig.reqOpts = { qs: { userProject }, }; } } }); } /** * @typedef {object} UploadOptions Configuration options for Bucket#upload(). * @property {string|File} [destination] The place to save * your file. If given a string, the file will be uploaded to the bucket * using the string as a filename. When given a File object, your local * file will be uploaded to the File object's bucket and under the File * object's name. Lastly, when this argument is omitted, the file is uploaded * to your bucket using the name of the local file. * @property {string} [encryptionKey] A custom encryption key. See * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. * @property {boolean} [gzip] Automatically gzip the file. This will set * `options.metadata.contentEncoding` to `gzip`. * @property {string} [kmsKeyName] The name of the Cloud KMS key that will * be used to encrypt the object. Must be in the format: * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. * @property {object} [metadata] See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. * @property {string} [offset] The starting byte of the upload stream, for * resuming an interrupted upload. Defaults to 0. * @property {string} [predefinedAcl] Apply a predefined set of access * controls to this object. * * Acceptable values are: * - **`authenticatedRead`** - Object owner gets `OWNER` access, and * `allAuthenticatedUsers` get `READER` access. * * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and * project team owners get `OWNER` access. * * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project * team owners get `READER` access. * * - **`private`** - Object owner gets `OWNER` access. * * - **`projectPrivate`** - Object owner gets `OWNER` access, and project * team members get access according to their roles. * * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` * get `READER` access. * @property {boolean} [private] Make the uploaded file private. (Alias for * `options.predefinedAcl = 'private'`) * @property {boolean} [public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) * @property {boolean} [resumable=true] Resumable uploads are automatically * enabled and must be shut off explicitly by setting to false. * @property {number} [timeout=60000] Set the HTTP request timeout in * milliseconds. This option is not available for resumable uploads. * Default: `60000` * @property {string} [uri] The URI for an already-created resumable * upload. See {@link File#createResumableUpload}. * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {string|boolean} [validation] Possible values: `"md5"`, * `"crc32c"`, or `false`. By default, data integrity is validated with an * MD5 checksum for maximum reliability. CRC32c will provide better * performance with less reliability. You may also choose to skip * validation completely, however this is **not recommended**. */ /** * @typedef {array} UploadResponse * @property {object} 0 The uploaded {@link File}. * @property {object} 1 The full API response. */ /** * @callback UploadCallback * @param {?Error} err Request error, if any. * @param {object} file The uploaded {@link File}. * @param {object} apiResponse The full API response. */ /** * Upload a file to the bucket. This is a convenience method that wraps * {@link File#createWriteStream}. * * Resumable uploads are enabled by default * * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload#uploads| Upload Options (Simple or Resumable)} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert| Objects: insert API Documentation} * * @param {string} pathString The fully qualified path to the file you * wish to upload to your bucket. * @param {UploadOptions} [options] Configuration options. * @param {string|File} [options.destination] The place to save * your file. If given a string, the file will be uploaded to the bucket * using the string as a filename. When given a File object, your local * file will be uploaded to the File object's bucket and under the File * object's name. Lastly, when this argument is omitted, the file is uploaded * to your bucket using the name of the local file. * @param {string} [options.encryptionKey] A custom encryption key. See * {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys}. * @param {boolean} [options.gzip] Automatically gzip the file. This will set * `options.metadata.contentEncoding` to `gzip`. * @param {string} [options.kmsKeyName] The name of the Cloud KMS key that will * be used to encrypt the object. Must be in the format: * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`. * @param {object} [options.metadata] See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body}. * @param {string} [options.offset] The starting byte of the upload stream, for * resuming an interrupted upload. Defaults to 0. * @param {string} [options.predefinedAcl] Apply a predefined set of access * controls to this object. * Acceptable values are: * - **`authenticatedRead`** - Object owner gets `OWNER` access, and * `allAuthenticatedUsers` get `READER` access. * * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and * project team owners get `OWNER` access. * * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project * team owners get `READER` access. * * - **`private`** - Object owner gets `OWNER` access. * * - **`projectPrivate`** - Object owner gets `OWNER` access, and project * team members get access according to their roles. * * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` * get `READER` access. * @param {boolean} [options.private] Make the uploaded file private. (Alias for * `options.predefinedAcl = 'private'`) * @param {boolean} [options.public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) * @param {boolean} [options.resumable=true] Resumable uploads are automatically * enabled and must be shut off explicitly by setting to false. * @param {number} [options.timeout=60000] Set the HTTP request timeout in * milliseconds. This option is not available for resumable uploads. * Default: `60000` * @param {string} [options.uri] The URI for an already-created resumable * upload. See {@link File#createResumableUpload}. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {string|boolean} [options.validation] Possible values: `"md5"`, * `"crc32c"`, or `false`. By default, data integrity is validated with an * MD5 checksum for maximum reliability. CRC32c will provide better * performance with less reliability. You may also choose to skip * validation completely, however this is **not recommended**. * @param {UploadCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * * //- * // Upload a file from a local path. * //- * bucket.upload('/local/path/image.png', function(err, file, apiResponse) { * // Your bucket now contains: * // - "image.png" (with the contents of `/local/path/image.png') * * // `file` is an instance of a File object that refers to your new file. * }); * * * //- * // It's not always that easy. You will likely want to specify the filename * // used when your new file lands in your bucket. * // * // You may also want to set metadata or customize other options. * //- * const options = { * destination: 'new-image.png', * validation: 'crc32c', * metadata: { * metadata: { * event: 'Fall trip to the zoo' * } * } * }; * * bucket.upload('local-image.png', options, function(err, file) { * // Your bucket now contains: * // - "new-image.png" (with the contents of `local-image.png') * * // `file` is an instance of a File object that refers to your new file. * }); * * //- * // You can also have a file gzip'd on the fly. * //- * bucket.upload('index.html', { gzip: true }, function(err, file) { * // Your bucket now contains: * // - "index.html" (automatically compressed with gzip) * * // Downloading the file with `file.download` will automatically decode * the * // file. * }); * * //- * // You may also re-use a File object, {File}, that references * // the file you wish to create or overwrite. * //- * const options = { * destination: bucket.file('existing-file.png'), * resumable: false * }; * * bucket.upload('local-img.png', options, function(err, newFile) { * // Your bucket now contains: * // - "existing-file.png" (with the contents of `local-img.png') * * // Note: * // The `newFile` parameter is equal to `file`. * }); * * //- * // To use * // * // Customer-supplied Encryption Keys, provide the `encryptionKey` * option. * //- * const crypto = require('crypto'); * const encryptionKey = crypto.randomBytes(32); * * bucket.upload('img.png', { * encryptionKey: encryptionKey * }, function(err, newFile) { * // `img.png` was uploaded with your custom encryption key. * * // `newFile` is already configured to use the encryption key when making * // operations on the remote object. * * // However, to use your encryption key later, you must create a `File` * // instance with the `key` supplied: * const file = bucket.file('img.png', { * encryptionKey: encryptionKey * }); * * // Or with `file#setEncryptionKey`: * const file = bucket.file('img.png'); * file.setEncryptionKey(encryptionKey); * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.upload('local-image.png').then(function(data) { * const file = data[0]; * }); * * To upload a file from a URL, use {@link File#createWriteStream}. * * ``` * @example include:samples/files.js * region_tag:storage_upload_file * Another example: * * @example include:samples/encryption.js * region_tag:storage_upload_encrypted_file * Example of uploading an encrypted file: */ upload(pathString, optionsOrCallback, callback) { var _a, _b; const upload = (numberOfRetries) => { const returnValue = (0, async_retry_1.default)(async (bail) => { await new Promise((resolve, reject) => { var _a, _b; if (numberOfRetries === 0 && ((_b = (_a = newFile === null || newFile === void 0 ? void 0 : newFile.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry)) { newFile.storage.retryOptions.autoRetry = false; } const writable = newFile.createWriteStream(options); if (options.onUploadProgress) { writable.on('progress', options.onUploadProgress); } fs.createReadStream(pathString) .on('error', bail) .pipe(writable) .on('error', err => { if (this.storage.retryOptions.autoRetry && this.storage.retryOptions.retryableErrorFn(err)) { return reject(err); } else { return bail(err); } }) .on('finish', () => { return resolve(); }); }); }, { retries: numberOfRetries, factor: this.storage.retryOptions.retryDelayMultiplier, maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds }); if (!callback) { return returnValue; } else { return returnValue .then(() => { if (callback) { return callback(null, newFile, newFile.metadata); } }) .catch(callback); } }; // eslint-disable-next-line @typescript-eslint/no-explicit-any if (global['GCLOUD_SANDBOX_ENV']) { return; } let options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; options = Object.assign({ metadata: {}, }, options); // Do not retry if precondition option ifGenerationMatch is not set // because this is a file operation let maxRetries = this.storage.retryOptions.maxRetries; if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { maxRetries = 0; } let newFile; if (options.destination instanceof file_js_1.File) { newFile = options.destination; } else if (options.destination !== null && typeof options.destination === 'string') { // Use the string as the name of the file. newFile = this.file(options.destination, { encryptionKey: options.encryptionKey, kmsKeyName: options.kmsKeyName, preconditionOpts: this.instancePreconditionOpts, }); } else { // Resort to using the name of the incoming file. const destination = path.basename(pathString); newFile = this.file(destination, { encryptionKey: options.encryptionKey, kmsKeyName: options.kmsKeyName, preconditionOpts: this.instancePreconditionOpts, }); } upload(maxRetries); } /** * @private * * @typedef {object} MakeAllFilesPublicPrivateOptions * @property {boolean} [force] Suppress errors until all files have been * processed. * @property {boolean} [private] Make files private. * @property {boolean} [public] Make files public. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @private * * @callback SetBucketMetadataCallback * @param {?Error} err Request error, if any. * @param {File[]} files Files that were updated. */ /** * @typedef {array} MakeAllFilesPublicPrivateResponse * @property {File[]} 0 List of files affected. */ /** * Iterate over all of a bucket's files, calling `file.makePublic()` (public) * or `file.makePrivate()` (private) on each. * * Operations are performed in parallel, up to 10 at once. The first error * breaks the loop, and will execute the provided callback with it. Specify * `{ force: true }` to suppress the errors. * * @private * * @param {MakeAllFilesPublicPrivateOptions} [options] Configuration options. * @param {boolean} [options.force] Suppress errors until all files have been * processed. * @param {boolean} [options.private] Make files private. * @param {boolean} [options.public] Make files public. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {MakeAllFilesPublicPrivateCallback} callback Callback function. * * @return {Promise} */ makeAllFilesPublicPrivate_(optionsOrCallback, callback) { const MAX_PARALLEL_LIMIT = 10; const errors = []; const updatedFiles = []; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const processFile = async (file) => { try { await (options.public ? file.makePublic() : file.makePrivate(options)); updatedFiles.push(file); } catch (e) { if (!options.force) { throw e; } errors.push(e); } }; this.getFiles(options) .then(([files]) => { const limit = (0, p_limit_1.default)(MAX_PARALLEL_LIMIT); const promises = files.map(file => { return limit(() => processFile(file)); }); return Promise.all(promises); }) .then(() => callback(errors.length > 0 ? errors : null, updatedFiles), err => callback(err, updatedFiles)); } getId() { return this.id; } disableAutoRetryConditionallyIdempotent_( // eslint-disable-next-line @typescript-eslint/no-explicit-any coreOpts, // eslint-disable-next-line @typescript-eslint/no-explicit-any methodType, localPreconditionOptions) { var _a, _b; if (typeof coreOpts === 'object' && ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch) === undefined && (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && (methodType === AvailableServiceObjectMethods.setMetadata || methodType === AvailableServiceObjectMethods.delete) && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) { this.storage.retryOptions.autoRetry = false; } else if (this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { this.storage.retryOptions.autoRetry = false; } } } exports.Bucket = Bucket; /*! Developer Documentation * * These methods can be auto-paginated. */ paginator_1.paginator.extend(Bucket, 'getFiles'); /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Bucket, { exclude: ['cloudStorageURI', 'request', 'file', 'notification', 'restore'], }); /***/ }), /***/ 12658: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Channel = void 0; const index_js_1 = __nccwpck_require__(11325); const promisify_1 = __nccwpck_require__(60206); /** * Create a channel object to interact with a Cloud Storage channel. * * See {@link https://cloud.google.com/storage/docs/object-change-notification| Object Change Notification} * * @class * * @param {string} id The ID of the channel. * @param {string} resourceId The resource ID of the channel. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const channel = storage.channel('id', 'resource-id'); * ``` */ class Channel extends index_js_1.ServiceObject { constructor(storage, id, resourceId) { const config = { parent: storage, baseUrl: '/channels', // An ID shouldn't be included in the API requests. // RE: // https://github.com/GoogleCloudPlatform/google-cloud-node/issues/1145 id: '', methods: { // Only need `request`. }, }; super(config); this.metadata.id = id; this.metadata.resourceId = resourceId; } /** * @typedef {array} StopResponse * @property {object} 0 The full API response. */ /** * @callback StopCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Stop this channel. * * @param {StopCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const channel = storage.channel('id', 'resource-id'); * channel.stop(function(err, apiResponse) { * if (!err) { * // Channel stopped successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * channel.stop().then(function(data) { * const apiResponse = data[0]; * }); * ``` */ stop(callback) { callback = callback || index_js_1.util.noop; this.request({ method: 'POST', uri: '/stop', json: this.metadata, }, (err, apiResponse) => { callback(err, apiResponse); }); } } exports.Channel = Channel; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Channel); /***/ }), /***/ 34317: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _CRC32C_crc32c; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.CRC32C_EXTENSION_TABLE = exports.CRC32C_EXTENSIONS = exports.CRC32C_EXCEPTION_MESSAGES = exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = exports.CRC32C = void 0; const fs_1 = __nccwpck_require__(79896); /** * Ported from {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc#L16-L59 github.com/google/crc32c} */ const CRC32C_EXTENSIONS = [ 0x00000000, 0xf26b8303, 0xe13b70f7, 0x1350f3f4, 0xc79a971f, 0x35f1141c, 0x26a1e7e8, 0xd4ca64eb, 0x8ad958cf, 0x78b2dbcc, 0x6be22838, 0x9989ab3b, 0x4d43cfd0, 0xbf284cd3, 0xac78bf27, 0x5e133c24, 0x105ec76f, 0xe235446c, 0xf165b798, 0x030e349b, 0xd7c45070, 0x25afd373, 0x36ff2087, 0xc494a384, 0x9a879fa0, 0x68ec1ca3, 0x7bbcef57, 0x89d76c54, 0x5d1d08bf, 0xaf768bbc, 0xbc267848, 0x4e4dfb4b, 0x20bd8ede, 0xd2d60ddd, 0xc186fe29, 0x33ed7d2a, 0xe72719c1, 0x154c9ac2, 0x061c6936, 0xf477ea35, 0xaa64d611, 0x580f5512, 0x4b5fa6e6, 0xb93425e5, 0x6dfe410e, 0x9f95c20d, 0x8cc531f9, 0x7eaeb2fa, 0x30e349b1, 0xc288cab2, 0xd1d83946, 0x23b3ba45, 0xf779deae, 0x05125dad, 0x1642ae59, 0xe4292d5a, 0xba3a117e, 0x4851927d, 0x5b016189, 0xa96ae28a, 0x7da08661, 0x8fcb0562, 0x9c9bf696, 0x6ef07595, 0x417b1dbc, 0xb3109ebf, 0xa0406d4b, 0x522bee48, 0x86e18aa3, 0x748a09a0, 0x67dafa54, 0x95b17957, 0xcba24573, 0x39c9c670, 0x2a993584, 0xd8f2b687, 0x0c38d26c, 0xfe53516f, 0xed03a29b, 0x1f682198, 0x5125dad3, 0xa34e59d0, 0xb01eaa24, 0x42752927, 0x96bf4dcc, 0x64d4cecf, 0x77843d3b, 0x85efbe38, 0xdbfc821c, 0x2997011f, 0x3ac7f2eb, 0xc8ac71e8, 0x1c661503, 0xee0d9600, 0xfd5d65f4, 0x0f36e6f7, 0x61c69362, 0x93ad1061, 0x80fde395, 0x72966096, 0xa65c047d, 0x5437877e, 0x4767748a, 0xb50cf789, 0xeb1fcbad, 0x197448ae, 0x0a24bb5a, 0xf84f3859, 0x2c855cb2, 0xdeeedfb1, 0xcdbe2c45, 0x3fd5af46, 0x7198540d, 0x83f3d70e, 0x90a324fa, 0x62c8a7f9, 0xb602c312, 0x44694011, 0x5739b3e5, 0xa55230e6, 0xfb410cc2, 0x092a8fc1, 0x1a7a7c35, 0xe811ff36, 0x3cdb9bdd, 0xceb018de, 0xdde0eb2a, 0x2f8b6829, 0x82f63b78, 0x709db87b, 0x63cd4b8f, 0x91a6c88c, 0x456cac67, 0xb7072f64, 0xa457dc90, 0x563c5f93, 0x082f63b7, 0xfa44e0b4, 0xe9141340, 0x1b7f9043, 0xcfb5f4a8, 0x3dde77ab, 0x2e8e845f, 0xdce5075c, 0x92a8fc17, 0x60c37f14, 0x73938ce0, 0x81f80fe3, 0x55326b08, 0xa759e80b, 0xb4091bff, 0x466298fc, 0x1871a4d8, 0xea1a27db, 0xf94ad42f, 0x0b21572c, 0xdfeb33c7, 0x2d80b0c4, 0x3ed04330, 0xccbbc033, 0xa24bb5a6, 0x502036a5, 0x4370c551, 0xb11b4652, 0x65d122b9, 0x97baa1ba, 0x84ea524e, 0x7681d14d, 0x2892ed69, 0xdaf96e6a, 0xc9a99d9e, 0x3bc21e9d, 0xef087a76, 0x1d63f975, 0x0e330a81, 0xfc588982, 0xb21572c9, 0x407ef1ca, 0x532e023e, 0xa145813d, 0x758fe5d6, 0x87e466d5, 0x94b49521, 0x66df1622, 0x38cc2a06, 0xcaa7a905, 0xd9f75af1, 0x2b9cd9f2, 0xff56bd19, 0x0d3d3e1a, 0x1e6dcdee, 0xec064eed, 0xc38d26c4, 0x31e6a5c7, 0x22b65633, 0xd0ddd530, 0x0417b1db, 0xf67c32d8, 0xe52cc12c, 0x1747422f, 0x49547e0b, 0xbb3ffd08, 0xa86f0efc, 0x5a048dff, 0x8ecee914, 0x7ca56a17, 0x6ff599e3, 0x9d9e1ae0, 0xd3d3e1ab, 0x21b862a8, 0x32e8915c, 0xc083125f, 0x144976b4, 0xe622f5b7, 0xf5720643, 0x07198540, 0x590ab964, 0xab613a67, 0xb831c993, 0x4a5a4a90, 0x9e902e7b, 0x6cfbad78, 0x7fab5e8c, 0x8dc0dd8f, 0xe330a81a, 0x115b2b19, 0x020bd8ed, 0xf0605bee, 0x24aa3f05, 0xd6c1bc06, 0xc5914ff2, 0x37faccf1, 0x69e9f0d5, 0x9b8273d6, 0x88d28022, 0x7ab90321, 0xae7367ca, 0x5c18e4c9, 0x4f48173d, 0xbd23943e, 0xf36e6f75, 0x0105ec76, 0x12551f82, 0xe03e9c81, 0x34f4f86a, 0xc69f7b69, 0xd5cf889d, 0x27a40b9e, 0x79b737ba, 0x8bdcb4b9, 0x988c474d, 0x6ae7c44e, 0xbe2da0a5, 0x4c4623a6, 0x5f16d052, 0xad7d5351, ]; exports.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; const CRC32C_EXTENSION_TABLE = new Int32Array(CRC32C_EXTENSIONS); exports.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; const CRC32C_DEFAULT_VALIDATOR_GENERATOR = () => new CRC32C(); exports.CRC32C_DEFAULT_VALIDATOR_GENERATOR = CRC32C_DEFAULT_VALIDATOR_GENERATOR; const CRC32C_EXCEPTION_MESSAGES = { INVALID_INIT_BASE64_RANGE: (l) => `base64-encoded data expected to equal 4 bytes, not ${l}`, INVALID_INIT_BUFFER_LENGTH: (l) => `Buffer expected to equal 4 bytes, not ${l}`, INVALID_INIT_INTEGER: (l) => `Number expected to be a safe, unsigned 32-bit integer, not ${l}`, }; exports.CRC32C_EXCEPTION_MESSAGES = CRC32C_EXCEPTION_MESSAGES; class CRC32C { /** * Constructs a new `CRC32C` object. * * Reconstruction is recommended via the `CRC32C.from` static method. * * @param initialValue An initial CRC32C value - a signed 32-bit integer. */ constructor(initialValue = 0) { /** Current CRC32C value */ _CRC32C_crc32c.set(this, 0); __classPrivateFieldSet(this, _CRC32C_crc32c, initialValue, "f"); } /** * Calculates a CRC32C from a provided buffer. * * Implementation inspired from: * - {@link https://github.com/google/crc32c/blob/21fc8ef30415a635e7351ffa0e5d5367943d4a94/src/crc32c_portable.cc github.com/google/crc32c} * - {@link https://github.com/googleapis/python-crc32c/blob/a595e758c08df445a99c3bf132ee8e80a3ec4308/src/google_crc32c/python.py github.com/googleapis/python-crc32c} * - {@link https://github.com/googleapis/java-storage/pull/1376/files github.com/googleapis/java-storage} * * @param data The `Buffer` to generate the CRC32C from */ update(data) { let current = __classPrivateFieldGet(this, _CRC32C_crc32c, "f") ^ 0xffffffff; for (const d of data) { const tablePoly = CRC32C.CRC32C_EXTENSION_TABLE[(d ^ current) & 0xff]; current = tablePoly ^ (current >>> 8); } __classPrivateFieldSet(this, _CRC32C_crc32c, current ^ 0xffffffff, "f"); } /** * Validates a provided input to the current CRC32C value. * * @param input A Buffer, `CRC32C`-compatible object, base64-encoded data (string), or signed 32-bit integer */ validate(input) { if (typeof input === 'number') { return input === __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); } else if (typeof input === 'string') { return input === this.toString(); } else if (Buffer.isBuffer(input)) { return Buffer.compare(input, this.toBuffer()) === 0; } else { // `CRC32C`-like object return input.toString() === this.toString(); } } /** * Returns a `Buffer` representation of the CRC32C value */ toBuffer() { const buffer = Buffer.alloc(4); buffer.writeInt32BE(__classPrivateFieldGet(this, _CRC32C_crc32c, "f")); return buffer; } /** * Returns a JSON-compatible, base64-encoded representation of the CRC32C value. * * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/JSON/stringify `JSON#stringify`} */ toJSON() { return this.toString(); } /** * Returns a base64-encoded representation of the CRC32C value. * * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/toString `Object#toString`} */ toString() { return this.toBuffer().toString('base64'); } /** * Returns the `number` representation of the CRC32C value as a signed 32-bit integer * * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Object/valueOf `Object#valueOf`} */ valueOf() { return __classPrivateFieldGet(this, _CRC32C_crc32c, "f"); } /** * Generates a `CRC32C` from a compatible buffer format. * * @param value 4-byte `ArrayBufferView`/`Buffer`/`TypedArray` */ static fromBuffer(value) { let buffer; if (Buffer.isBuffer(value)) { buffer = value; } else if ('buffer' in value) { // `ArrayBufferView` buffer = Buffer.from(value.buffer); } else { // `ArrayBuffer` buffer = Buffer.from(value); } if (buffer.byteLength !== 4) { throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BUFFER_LENGTH(buffer.byteLength)); } return new CRC32C(buffer.readInt32BE()); } static async fromFile(file) { const crc32c = new CRC32C(); await new Promise((resolve, reject) => { (0, fs_1.createReadStream)(file) .on('data', (d) => { if (typeof d === 'string') { crc32c.update(Buffer.from(d)); } else { crc32c.update(d); } }) .on('end', () => resolve()) .on('error', reject); }); return crc32c; } /** * Generates a `CRC32C` from 4-byte base64-encoded data (string). * * @param value 4-byte base64-encoded data (string) */ static fromString(value) { const buffer = Buffer.from(value, 'base64'); if (buffer.byteLength !== 4) { throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_BASE64_RANGE(buffer.byteLength)); } return this.fromBuffer(buffer); } /** * Generates a `CRC32C` from a safe, unsigned 32-bit integer. * * @param value an unsigned 32-bit integer */ static fromNumber(value) { if (!Number.isSafeInteger(value) || value > 2 ** 32 || value < -(2 ** 32)) { throw new RangeError(CRC32C_EXCEPTION_MESSAGES.INVALID_INIT_INTEGER(value)); } return new CRC32C(value); } /** * Generates a `CRC32C` from a variety of compatable types. * Note: strings are treated as input, not as file paths to read from. * * @param value A number, 4-byte `ArrayBufferView`/`Buffer`/`TypedArray`, or 4-byte base64-encoded data (string) */ static from(value) { if (typeof value === 'number') { return this.fromNumber(value); } else if (typeof value === 'string') { return this.fromString(value); } else if ('byteLength' in value) { // `ArrayBuffer` | `Buffer` | `ArrayBufferView` return this.fromBuffer(value); } else { // `CRC32CValidator`/`CRC32C`-like return this.fromString(value.toString()); } } } exports.CRC32C = CRC32C; _CRC32C_crc32c = new WeakMap(); CRC32C.CRC32C_EXTENSIONS = CRC32C_EXTENSIONS; CRC32C.CRC32C_EXTENSION_TABLE = CRC32C_EXTENSION_TABLE; /***/ }), /***/ 69975: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var _File_instances, _File_validateIntegrity; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.File = exports.FileExceptionMessages = exports.RequestError = exports.STORAGE_POST_POLICY_BASE_URL = exports.ActionToHTTPMethod = void 0; const index_js_1 = __nccwpck_require__(11325); const promisify_1 = __nccwpck_require__(60206); const crypto = __importStar(__nccwpck_require__(76982)); const fs = __importStar(__nccwpck_require__(79896)); const mime_1 = __importDefault(__nccwpck_require__(94900)); const resumableUpload = __importStar(__nccwpck_require__(88043)); const stream_1 = __nccwpck_require__(2203); const zlib = __importStar(__nccwpck_require__(43106)); const storage_js_1 = __nccwpck_require__(92848); const bucket_js_1 = __nccwpck_require__(82887); const acl_js_1 = __nccwpck_require__(76637); const signer_js_1 = __nccwpck_require__(7319); const util_js_1 = __nccwpck_require__(37325); const duplexify_1 = __importDefault(__nccwpck_require__(29112)); const util_js_2 = __nccwpck_require__(74557); const crc32c_js_1 = __nccwpck_require__(34317); const hash_stream_validator_js_1 = __nccwpck_require__(14873); const async_retry_1 = __importDefault(__nccwpck_require__(45195)); var ActionToHTTPMethod; (function (ActionToHTTPMethod) { ActionToHTTPMethod["read"] = "GET"; ActionToHTTPMethod["write"] = "PUT"; ActionToHTTPMethod["delete"] = "DELETE"; ActionToHTTPMethod["resumable"] = "POST"; })(ActionToHTTPMethod || (exports.ActionToHTTPMethod = ActionToHTTPMethod = {})); /** * @deprecated - no longer used */ exports.STORAGE_POST_POLICY_BASE_URL = 'https://storage.googleapis.com'; /** * @private */ const GS_URL_REGEXP = /^gs:\/\/([a-z0-9_.-]+)\/(.+)$/; /** * @private * This regex will match compressible content types. These are primarily text/*, +json, +text, +xml content types. * This was based off of mime-db and may periodically need to be updated if new compressible content types become * standards. */ const COMPRESSIBLE_MIME_REGEX = new RegExp([ /^text\/|application\/ecmascript|application\/javascript|application\/json/, /|application\/postscript|application\/rtf|application\/toml|application\/vnd.dart/, /|application\/vnd.ms-fontobject|application\/wasm|application\/x-httpd-php|application\/x-ns-proxy-autoconfig/, /|application\/x-sh(?!ockwave-flash)|application\/x-tar|application\/x-virtualbox-hdd|application\/x-virtualbox-ova|application\/x-virtualbox-ovf/, /|^application\/x-virtualbox-vbox$|application\/x-virtualbox-vdi|application\/x-virtualbox-vhd|application\/x-virtualbox-vmdk/, /|application\/xml|application\/xml-dtd|font\/otf|font\/ttf|image\/bmp|image\/vnd.adobe.photoshop|image\/vnd.microsoft.icon/, /|image\/vnd.ms-dds|image\/x-icon|image\/x-ms-bmp|message\/rfc822|model\/gltf-binary|\+json|\+text|\+xml|\+yaml/, ] .map(r => r.source) .join(''), 'i'); class RequestError extends Error { } exports.RequestError = RequestError; const SEVEN_DAYS = 7 * 24 * 60 * 60; const GS_UTIL_URL_REGEX = /(gs):\/\/([a-z0-9_.-]+)\/(.+)/g; const HTTPS_PUBLIC_URL_REGEX = /(https):\/\/(storage\.googleapis\.com)\/([a-z0-9_.-]+)\/(.+)/g; var FileExceptionMessages; (function (FileExceptionMessages) { FileExceptionMessages["EXPIRATION_TIME_NA"] = "An expiration time is not available."; FileExceptionMessages["DESTINATION_NO_NAME"] = "Destination file should have a name."; FileExceptionMessages["INVALID_VALIDATION_FILE_RANGE"] = "Cannot use validation with file ranges (start/end)."; FileExceptionMessages["MD5_NOT_AVAILABLE"] = "MD5 verification was specified, but is not available for the requested object. MD5 is not available for composite objects."; FileExceptionMessages["EQUALS_CONDITION_TWO_ELEMENTS"] = "Equals condition must be an array of 2 elements."; FileExceptionMessages["STARTS_WITH_TWO_ELEMENTS"] = "StartsWith condition must be an array of 2 elements."; FileExceptionMessages["CONTENT_LENGTH_RANGE_MIN_MAX"] = "ContentLengthRange must have numeric min & max fields."; FileExceptionMessages["DOWNLOAD_MISMATCH"] = "The downloaded data did not match the data from the server. To be sure the content is the same, you should download the file again."; FileExceptionMessages["UPLOAD_MISMATCH_DELETE_FAIL"] = "The uploaded data did not match the data from the server.\n As a precaution, we attempted to delete the file, but it was not successful.\n To be sure the content is the same, you should try removing the file manually,\n then uploading the file again.\n \n\nThe delete attempt failed with this message:\n\n "; FileExceptionMessages["UPLOAD_MISMATCH"] = "The uploaded data did not match the data from the server.\n As a precaution, the file has been deleted.\n To be sure the content is the same, you should try uploading the file again."; FileExceptionMessages["MD5_RESUMED_UPLOAD"] = "MD5 cannot be used with a continued resumable upload as MD5 cannot be extended from an existing value"; FileExceptionMessages["MISSING_RESUME_CRC32C_FINAL_UPLOAD"] = "The CRC32C is missing for the final portion of a resumed upload, which is required for validation. Please provide `resumeCRC32C` if validation is required, or disable `validation`."; })(FileExceptionMessages || (exports.FileExceptionMessages = FileExceptionMessages = {})); /** * A File object is created from your {@link Bucket} object using * {@link Bucket#file}. * * @class */ class File extends index_js_1.ServiceObject { /** * Cloud Storage uses access control lists (ACLs) to manage object and * bucket access. ACLs are the mechanism you use to share objects with other * users and allow other users to access your buckets and objects. * * An ACL consists of one or more entries, where each entry grants permissions * to an entity. Permissions define the actions that can be performed against * an object or bucket (for example, `READ` or `WRITE`); the entity defines * who the permission applies to (for example, a specific user or group of * users). * * The `acl` object on a File instance provides methods to get you a list of * the ACLs defined on your bucket, as well as set, update, and delete them. * * See {@link http://goo.gl/6qBBPO| About Access Control lists} * * @name File#acl * @mixes Acl * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * //- * // Make a file publicly readable. * //- * const options = { * entity: 'allUsers', * role: storage.acl.READER_ROLE * }; * * file.acl.add(options, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.acl.add(options).then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ /** * The API-formatted resource description of the file. * * Note: This is not guaranteed to be up-to-date when accessed. To get the * latest record, call the `getMetadata()` method. * * @name File#metadata * @type {object} */ /** * The file's name. * @name File#name * @type {string} */ /** * @callback Crc32cGeneratorToStringCallback * A method returning the CRC32C as a base64-encoded string. * * @returns {string} * * @example * Hashing the string 'data' should return 'rth90Q==' * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.toString(); // 'rth90Q==' * ``` **/ /** * @callback Crc32cGeneratorValidateCallback * A method validating a base64-encoded CRC32C string. * * @param {string} [value] base64-encoded CRC32C string to validate * @returns {boolean} * * @example * Should return `true` if the value matches, `false` otherwise * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.validate('DkjKuA=='); // false * crc32c.validate('rth90Q=='); // true * ``` **/ /** * @callback Crc32cGeneratorUpdateCallback * A method for passing `Buffer`s for CRC32C generation. * * @param {Buffer} [data] data to update CRC32C value with * @returns {undefined} * * @example * Hashing buffers from 'some ' and 'text\n' * * ```js * const buffer1 = Buffer.from('some '); * crc32c.update(buffer1); * * const buffer2 = Buffer.from('text\n'); * crc32c.update(buffer2); * * crc32c.toString(); // 'DkjKuA==' * ``` **/ /** * @typedef {object} CRC32CValidator * @property {Crc32cGeneratorToStringCallback} * @property {Crc32cGeneratorValidateCallback} * @property {Crc32cGeneratorUpdateCallback} */ /** * @callback Crc32cGeneratorCallback * @returns {CRC32CValidator} */ /** * @typedef {object} FileOptions Options passed to the File constructor. * @property {string} [encryptionKey] A custom encryption key. * @property {number} [generation] Generation to scope the file to. * @property {string} [kmsKeyName] Cloud KMS Key used to encrypt this * object, if the object is encrypted by such a key. Limited availability; * usable only by enabled projects. * @property {string} [userProject] The ID of the project which will be * billed for all requests made from File object. * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} */ /** * Constructs a file object. * * @param {Bucket} bucket The Bucket instance this file is * attached to. * @param {string} name The name of the remote file. * @param {FileOptions} [options] Configuration options. * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * ``` */ constructor(bucket, name, options = {}) { var _a, _b; const requestQueryObject = {}; let generation; if (options.generation !== null) { if (typeof options.generation === 'string') { generation = Number(options.generation); } else { generation = options.generation; } if (!isNaN(generation)) { requestQueryObject.generation = generation; } } Object.assign(requestQueryObject, options.preconditionOpts); const userProject = options.userProject || bucket.userProject; if (typeof userProject === 'string') { requestQueryObject.userProject = userProject; } const methods = { /** * @typedef {array} DeleteFileResponse * @property {object} 0 The full API response. */ /** * @callback DeleteFileCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Delete the file. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/delete| Objects: delete API Documentation} * * @method File#delete * @param {object} [options] Configuration options. * @param {boolean} [options.ignoreNotFound = false] Ignore an error if * the file does not exist. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {DeleteFileCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * file.delete(function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.delete().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_delete_file * Another example: */ delete: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {array} FileExistsResponse * @property {boolean} 0 Whether the {@link File} exists. */ /** * @callback FileExistsCallback * @param {?Error} err Request error, if any. * @param {boolean} exists Whether the {@link File} exists. */ /** * Check if the file exists. * * @method File#exists * @param {options} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {FileExistsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * file.exists(function(err, exists) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.exists().then(function(data) { * const exists = data[0]; * }); * ``` */ exists: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {array} GetFileResponse * @property {File} 0 The {@link File}. * @property {object} 1 The full API response. */ /** * @callback GetFileCallback * @param {?Error} err Request error, if any. * @param {File} file The {@link File}. * @param {object} apiResponse The full API response. */ /** * Get a file object and its metadata if it exists. * * @method File#get * @param {options} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {number} [options.generation] The generation number to get * @param {string} [options.restoreToken] If this is a soft-deleted object in an HNS-enabled bucket, returns the restore token which will * be necessary to restore it if there's a name conflict with another object. * @param {boolean} [options.softDeleted] If true, returns the soft-deleted object. Object `generation` is required if `softDeleted` is set to True. * @param {GetFileCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * file.get(function(err, file, apiResponse) { * // file.metadata` has been populated. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.get().then(function(data) { * const file = data[0]; * const apiResponse = data[1]; * }); * ``` */ get: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {array} GetFileMetadataResponse * @property {object} 0 The {@link File} metadata. * @property {object} 1 The full API response. */ /** * @callback GetFileMetadataCallback * @param {?Error} err Request error, if any. * @param {object} metadata The {@link File} metadata. * @param {object} apiResponse The full API response. */ /** * Get the file's metadata. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/get| Objects: get API Documentation} * * @method File#getMetadata * @param {object} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetFileMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * file.getMetadata(function(err, metadata, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.getMetadata().then(function(data) { * const metadata = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_get_metadata * Another example: */ getMetadata: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {object} SetFileMetadataOptions Configuration options for File#setMetadata(). * @param {string} [userProject] The ID of the project which will be billed for the request. */ /** * @callback SetFileMetadataCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * @typedef {array} SetFileMetadataResponse * @property {object} 0 The full API response. */ /** * Merge the given metadata with the current remote file's metadata. This * will set metadata if it was previously unset or update previously set * metadata. To unset previously set metadata, set its value to null. * * You can set custom key/value pairs in the metadata key of the given * object, however the other properties outside of this object must adhere * to the {@link https://goo.gl/BOnnCK| official API documentation}. * * * See the examples below for more information. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} * * @method File#setMetadata * @param {object} [metadata] The metadata you wish to update. * @param {SetFileMetadataOptions} [options] Configuration options. * @param {SetFileMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * const metadata = { * contentType: 'application/x-font-ttf', * metadata: { * my: 'custom', * properties: 'go here' * } * }; * * file.setMetadata(metadata, function(err, apiResponse) {}); * * // Assuming current metadata = { hello: 'world', unsetMe: 'will do' } * file.setMetadata({ * metadata: { * abc: '123', // will be set. * unsetMe: null, // will be unset (deleted). * hello: 'goodbye' // will be updated from 'world' to 'goodbye'. * } * }, function(err, apiResponse) { * // metadata should now be { abc: '123', hello: 'goodbye' } * }); * * //- * // Set a temporary hold on this file from its bucket's retention period * // configuration. * // * file.setMetadata({ * temporaryHold: true * }, function(err, apiResponse) {}); * * //- * // Alternatively, you may set a temporary hold. This will follow the * // same behavior as an event-based hold, with the exception that the * // bucket's retention policy will not renew for this file from the time * // the hold is released. * //- * file.setMetadata({ * eventBasedHold: true * }, function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.setMetadata(metadata).then(function(data) { * const apiResponse = data[0]; * }); * ``` */ setMetadata: { reqOpts: { qs: requestQueryObject, }, }, }; super({ parent: bucket, baseUrl: '/o', id: encodeURIComponent(name), methods, }); _File_instances.add(this); this.bucket = bucket; // eslint-disable-next-line @typescript-eslint/no-explicit-any this.storage = bucket.parent; // @TODO Can this duplicate code from above be avoided? if (options.generation !== null) { let generation; if (typeof options.generation === 'string') { generation = Number(options.generation); } else { generation = options.generation; } if (!isNaN(generation)) { this.generation = generation; } } this.kmsKeyName = options.kmsKeyName; this.userProject = userProject; this.name = name; if (options.encryptionKey) { this.setEncryptionKey(options.encryptionKey); } this.acl = new acl_js_1.Acl({ request: this.request.bind(this), pathPrefix: '/acl', }); this.crc32cGenerator = options.crc32cGenerator || this.bucket.crc32cGenerator; this.instanceRetryValue = (_b = (_a = this.storage) === null || _a === void 0 ? void 0 : _a.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry; this.instancePreconditionOpts = options === null || options === void 0 ? void 0 : options.preconditionOpts; } /** * The object's Cloud Storage URI (`gs://`) * * @example * ```ts * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const file = bucket.file('image.png'); * * // `gs://my-bucket/image.png` * const href = file.cloudStorageURI.href; * ``` */ get cloudStorageURI() { const uri = this.bucket.cloudStorageURI; uri.pathname = this.name; return uri; } /** * A helper method for determining if a request should be retried based on preconditions. * This should only be used for methods where the idempotency is determined by * `ifGenerationMatch` * @private * * A request should not be retried under the following conditions: * - if precondition option `ifGenerationMatch` is not set OR * - if `idempotencyStrategy` is set to `RetryNever` */ shouldRetryBasedOnPreconditionAndIdempotencyStrat(options) { var _a; return !(((options === null || options === void 0 ? void 0 : options.ifGenerationMatch) === undefined && ((_a = this.instancePreconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever); } /** * @typedef {array} CopyResponse * @property {File} 0 The copied {@link File}. * @property {object} 1 The full API response. */ /** * @callback CopyCallback * @param {?Error} err Request error, if any. * @param {File} copiedFile The copied {@link File}. * @param {object} apiResponse The full API response. */ /** * @typedef {object} CopyOptions Configuration options for File#copy(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. * @property {string} [cacheControl] The cacheControl setting for the new file. * @property {string} [contentEncoding] The contentEncoding setting for the new file. * @property {string} [contentType] The contentType setting for the new file. * @property {string} [destinationKmsKeyName] Resource name of the Cloud * KMS key, of the form * `projects/my-project/locations/location/keyRings/my-kr/cryptoKeys/my-key`, * that will be used to encrypt the object. Overwrites the object * metadata's `kms_key_name` value, if any. * @property {Metadata} [metadata] Metadata to specify on the copied file. * @property {string} [predefinedAcl] Set the ACL for the new file. * @property {string} [token] A previously-returned `rewriteToken` from an * unfinished rewrite request. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * Copy this file to another file. By default, this will copy the file to the * same bucket, but you can choose to copy it to another Bucket by providing * a Bucket or File object or a URL starting with "gs://". * The generation of the file will not be preserved. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/rewrite| Objects: rewrite API Documentation} * * @throws {Error} If the destination file is not provided. * * @param {string|Bucket|File} destination Destination file. * @param {CopyOptions} [options] Configuration options. See an * @param {CopyCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // You can pass in a variety of types for the destination. * // * // For all of the below examples, assume we are working with the following * // Bucket and File objects. * //- * const bucket = storage.bucket('my-bucket'); * const file = bucket.file('my-image.png'); * * //- * // If you pass in a string for the destination, the file is copied to its * // current bucket, under the new name provided. * //- * file.copy('my-image-copy.png', function(err, copiedFile, apiResponse) { * // `my-bucket` now contains: * // - "my-image.png" * // - "my-image-copy.png" * * // `copiedFile` is an instance of a File object that refers to your new * // file. * }); * * //- * // If you pass in a string starting with "gs://" for the destination, the * // file is copied to the other bucket and under the new name provided. * //- * const newLocation = 'gs://another-bucket/my-image-copy.png'; * file.copy(newLocation, function(err, copiedFile, apiResponse) { * // `my-bucket` still contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-image-copy.png" * * // `copiedFile` is an instance of a File object that refers to your new * // file. * }); * * //- * // If you pass in a Bucket object, the file will be copied to that bucket * // using the same name. * //- * const anotherBucket = storage.bucket('another-bucket'); * file.copy(anotherBucket, function(err, copiedFile, apiResponse) { * // `my-bucket` still contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-image.png" * * // `copiedFile` is an instance of a File object that refers to your new * // file. * }); * * //- * // If you pass in a File object, you have complete control over the new * // bucket and filename. * //- * const anotherFile = anotherBucket.file('my-awesome-image.png'); * file.copy(anotherFile, function(err, copiedFile, apiResponse) { * // `my-bucket` still contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-awesome-image.png" * * // Note: * // The `copiedFile` parameter is equal to `anotherFile`. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.copy(newLocation).then(function(data) { * const newFile = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_copy_file * Another example: */ copy(destination, optionsOrCallback, callback) { var _a, _b; const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); if (!destination) { throw noDestinationError; } let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = { ...optionsOrCallback }; } callback = callback || index_js_1.util.noop; let destBucket; let destName; let newFile; if (typeof destination === 'string') { const parsedDestination = GS_URL_REGEXP.exec(destination); if (parsedDestination !== null && parsedDestination.length === 3) { destBucket = this.storage.bucket(parsedDestination[1]); destName = parsedDestination[2]; } else { destBucket = this.bucket; destName = destination; } } else if (destination instanceof bucket_js_1.Bucket) { destBucket = destination; destName = this.name; } else if (destination instanceof File) { destBucket = destination.bucket; destName = destination.name; newFile = destination; } else { throw noDestinationError; } const query = {}; if (this.generation !== undefined) { query.sourceGeneration = this.generation; } if (options.token !== undefined) { query.rewriteToken = options.token; } if (options.userProject !== undefined) { query.userProject = options.userProject; delete options.userProject; } if (options.predefinedAcl !== undefined) { query.destinationPredefinedAcl = options.predefinedAcl; delete options.predefinedAcl; } newFile = newFile || destBucket.file(destName); const headers = {}; if (this.encryptionKey !== undefined) { headers['x-goog-copy-source-encryption-algorithm'] = 'AES256'; headers['x-goog-copy-source-encryption-key'] = this.encryptionKeyBase64; headers['x-goog-copy-source-encryption-key-sha256'] = this.encryptionKeyHash; } if (newFile.encryptionKey !== undefined) { this.setEncryptionKey(newFile.encryptionKey); } else if (options.destinationKmsKeyName !== undefined) { query.destinationKmsKeyName = options.destinationKmsKeyName; delete options.destinationKmsKeyName; } else if (newFile.kmsKeyName !== undefined) { query.destinationKmsKeyName = newFile.kmsKeyName; } if (query.destinationKmsKeyName) { this.kmsKeyName = query.destinationKmsKeyName; const keyIndex = this.interceptors.indexOf(this.encryptionKeyInterceptor); if (keyIndex > -1) { this.interceptors.splice(keyIndex, 1); } } if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { this.storage.retryOptions.autoRetry = false; } if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; delete options.preconditionOpts; } this.request({ method: 'POST', uri: `/rewriteTo/b/${destBucket.name}/o/${encodeURIComponent(newFile.name)}`, qs: query, json: options, headers, }, (err, resp) => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; if (err) { callback(err, null, resp); return; } if (resp.rewriteToken) { const options = { token: resp.rewriteToken, }; if (query.userProject) { options.userProject = query.userProject; } if (query.destinationKmsKeyName) { options.destinationKmsKeyName = query.destinationKmsKeyName; } this.copy(newFile, options, callback); return; } callback(null, newFile, resp); }); } /** * @typedef {object} CreateReadStreamOptions Configuration options for File#createReadStream. * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {string|boolean} [validation] Possible values: `"md5"`, * `"crc32c"`, or `false`. By default, data integrity is validated with a * CRC32c checksum. You may use MD5 if preferred, but that hash is not * supported for composite objects. An error will be raised if MD5 is * specified but is not available. You may also choose to skip validation * completely, however this is **not recommended**. * @property {number} [start] A byte offset to begin the file's download * from. Default is 0. NOTE: Byte ranges are inclusive; that is, * `options.start = 0` and `options.end = 999` represent the first 1000 * bytes in a file or object. NOTE: when specifying a byte range, data * integrity is not available. * @property {number} [end] A byte offset to stop reading the file at. * NOTE: Byte ranges are inclusive; that is, `options.start = 0` and * `options.end = 999` represent the first 1000 bytes in a file or object. * NOTE: when specifying a byte range, data integrity is not available. * @property {boolean} [decompress=true] Disable auto decompression of the * received data. By default this option is set to `true`. * Applicable in cases where the data was uploaded with * `gzip: true` option. See {@link File#createWriteStream}. */ /** * Create a readable stream to read the contents of the remote file. It can be * piped to a writable stream or listened to for 'data' events to read a * file's contents. * * In the unlikely event there is a mismatch between what you downloaded and * the version in your Bucket, your error handler will receive an error with * code "CONTENT_DOWNLOAD_MISMATCH". If you receive this error, the best * recourse is to try downloading the file again. * * NOTE: Readable streams will emit the `end` event when the file is fully * downloaded. * * @param {CreateReadStreamOptions} [options] Configuration options. * @returns {ReadableStream} * * @example * ``` * //- * //

Downloading a File

* // * // The example below demonstrates how we can reference a remote file, then * // pipe its contents to a local file. This is effectively creating a local * // backup of your remote data. * //- * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * const fs = require('fs'); * const remoteFile = bucket.file('image.png'); * const localFilename = '/Users/stephen/Photos/image.png'; * * remoteFile.createReadStream() * .on('error', function(err) {}) * .on('response', function(response) { * // Server connected and responded with the specified status and headers. * }) * .on('end', function() { * // The file is fully downloaded. * }) * .pipe(fs.createWriteStream(localFilename)); * * //- * // To limit the downloaded data to only a byte range, pass an options * // object. * //- * const logFile = myBucket.file('access_log'); * logFile.createReadStream({ * start: 10000, * end: 20000 * }) * .on('error', function(err) {}) * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); * * //- * // To read a tail byte range, specify only `options.end` as a negative * // number. * //- * const logFile = myBucket.file('access_log'); * logFile.createReadStream({ * end: -100 * }) * .on('error', function(err) {}) * .pipe(fs.createWriteStream('/Users/stephen/logfile.txt')); * ``` */ createReadStream(options = {}) { options = Object.assign({ decompress: true }, options); const rangeRequest = typeof options.start === 'number' || typeof options.end === 'number'; const tailRequest = options.end < 0; let validateStream = undefined; let request = undefined; const throughStream = new util_js_2.PassThroughShim(); let crc32c = true; let md5 = false; if (typeof options.validation === 'string') { const value = options.validation.toLowerCase().trim(); crc32c = value === 'crc32c'; md5 = value === 'md5'; } else if (options.validation === false) { crc32c = false; } const shouldRunValidation = !rangeRequest && (crc32c || md5); if (rangeRequest) { if (typeof options.validation === 'string' || options.validation === true) { throw new Error(FileExceptionMessages.INVALID_VALIDATION_FILE_RANGE); } // Range requests can't receive data integrity checks. crc32c = false; md5 = false; } const onComplete = (err) => { if (err) { // There is an issue with node-fetch 2.x that if the stream errors the underlying socket connection is not closed. // This causes a memory leak, so cleanup the sockets manually here by destroying the agent. if (request === null || request === void 0 ? void 0 : request.agent) { request.agent.destroy(); } throughStream.destroy(err); } }; // We listen to the response event from the request stream so that we // can... // // 1) Intercept any data from going to the user if an error occurred. // 2) Calculate the hashes from the http.IncomingMessage response // stream, // which will return the bytes from the source without decompressing // gzip'd content. We then send it through decompressed, if // applicable, to the user. const onResponse = (err, _body, rawResponseStream) => { if (err) { // Get error message from the body. this.getBufferFromReadable(rawResponseStream).then(body => { err.message = body.toString('utf8'); throughStream.destroy(err); }); return; } request = rawResponseStream.request; const headers = rawResponseStream.toJSON().headers; const isCompressed = headers['content-encoding'] === 'gzip'; const hashes = {}; // The object is safe to validate if: // 1. It was stored gzip and returned to us gzip OR // 2. It was never stored as gzip const safeToValidate = (headers['x-goog-stored-content-encoding'] === 'gzip' && isCompressed) || headers['x-goog-stored-content-encoding'] === 'identity'; const transformStreams = []; if (shouldRunValidation) { // The x-goog-hash header should be set with a crc32c and md5 hash. // ex: headers['x-goog-hash'] = 'crc32c=xxxx,md5=xxxx' if (typeof headers['x-goog-hash'] === 'string') { headers['x-goog-hash'] .split(',') .forEach((hashKeyValPair) => { const delimiterIndex = hashKeyValPair.indexOf('='); const hashType = hashKeyValPair.substring(0, delimiterIndex); const hashValue = hashKeyValPair.substring(delimiterIndex + 1); hashes[hashType] = hashValue; }); } validateStream = new hash_stream_validator_js_1.HashStreamValidator({ crc32c, md5, crc32cGenerator: this.crc32cGenerator, crc32cExpected: hashes.crc32c, md5Expected: hashes.md5, }); } if (md5 && !hashes.md5) { const hashError = new RequestError(FileExceptionMessages.MD5_NOT_AVAILABLE); hashError.code = 'MD5_NOT_AVAILABLE'; throughStream.destroy(hashError); return; } if (safeToValidate && shouldRunValidation && validateStream) { transformStreams.push(validateStream); } if (isCompressed && options.decompress) { transformStreams.push(zlib.createGunzip()); } (0, stream_1.pipeline)(rawResponseStream, ...transformStreams, throughStream, onComplete); }; // Authenticate the request, then pipe the remote API request to the stream // returned to the user. const makeRequest = () => { const query = { alt: 'media' }; if (this.generation) { query.generation = this.generation; } if (options.userProject) { query.userProject = options.userProject; } const headers = { 'Accept-Encoding': 'gzip', 'Cache-Control': 'no-store', }; if (rangeRequest) { const start = typeof options.start === 'number' ? options.start : '0'; const end = typeof options.end === 'number' ? options.end : ''; headers.Range = `bytes=${tailRequest ? end : `${start}-${end}`}`; } const reqOpts = { uri: '', headers, qs: query, }; if (options[util_js_1.GCCL_GCS_CMD_KEY]) { reqOpts[util_js_1.GCCL_GCS_CMD_KEY] = options[util_js_1.GCCL_GCS_CMD_KEY]; } this.requestStream(reqOpts) .on('error', err => { throughStream.destroy(err); }) .on('response', res => { throughStream.emit('response', res); index_js_1.util.handleResp(null, res, null, onResponse); }) .resume(); }; throughStream.on('reading', makeRequest); return throughStream; } /** * @callback CreateResumableUploadCallback * @param {?Error} err Request error, if any. * @param {string} uri The resumable upload's unique session URI. */ /** * @typedef {array} CreateResumableUploadResponse * @property {string} 0 The resumable upload's unique session URI. */ /** * @typedef {object} CreateResumableUploadOptions * @property {object} [metadata] Metadata to set on the file. * @property {number} [offset] The starting byte of the upload stream for resuming an interrupted upload. * @property {string} [origin] Origin header to set for the upload. * @property {string} [predefinedAcl] Apply a predefined set of access * controls to this object. * * Acceptable values are: * - **`authenticatedRead`** - Object owner gets `OWNER` access, and * `allAuthenticatedUsers` get `READER` access. * * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and * project team owners get `OWNER` access. * * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project * team owners get `READER` access. * * - **`private`** - Object owner gets `OWNER` access. * * - **`projectPrivate`** - Object owner gets `OWNER` access, and project * team members get access according to their roles. * * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` * get `READER` access. * @property {boolean} [private] Make the uploaded file private. (Alias for * `options.predefinedAcl = 'private'`) * @property {boolean} [public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {string} [chunkSize] Create a separate request per chunk. This * value is in bytes and should be a multiple of 256 KiB (2^18). * {@link https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload| We recommend using at least 8 MiB for the chunk size.} */ /** * Create a unique resumable upload session URI. This is the first step when * performing a resumable upload. * * See the {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} * for more on how the entire process works. * *

Note

* * If you are just looking to perform a resumable upload without worrying * about any of the details, see {@link File#createWriteStream}. Resumable * uploads are performed by default. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/resumable-upload| Resumable upload guide} * * @param {CreateResumableUploadOptions} [options] Configuration options. * @param {CreateResumableUploadCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * file.createResumableUpload(function(err, uri) { * if (!err) { * // `uri` can be used to PUT data to. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.createResumableUpload().then(function(data) { * const uri = data[0]; * }); * ``` */ createResumableUpload(optionsOrCallback, callback) { var _a, _b; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const retryOptions = this.storage.retryOptions; if ((((_a = options === null || options === void 0 ? void 0 : options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) === undefined && ((_b = this.instancePreconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { retryOptions.autoRetry = false; } resumableUpload.createURI({ authClient: this.storage.authClient, apiEndpoint: this.storage.apiEndpoint, bucket: this.bucket.name, customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), file: this.name, generation: this.generation, key: this.encryptionKey, kmsKeyName: this.kmsKeyName, metadata: options.metadata, offset: options.offset, origin: options.origin, predefinedAcl: options.predefinedAcl, private: options.private, public: options.public, userProject: options.userProject || this.userProject, retryOptions: retryOptions, params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, universeDomain: this.bucket.storage.universeDomain, [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], }, callback); this.storage.retryOptions.autoRetry = this.instanceRetryValue; } /** * @typedef {object} CreateWriteStreamOptions Configuration options for File#createWriteStream(). * @property {string} [contentType] Alias for * `options.metadata.contentType`. If set to `auto`, the file name is used * to determine the contentType. * @property {string|boolean} [gzip] If true, automatically gzip the file. * If set to `auto`, the contentType is used to determine if the file * should be gzipped. This will set `options.metadata.contentEncoding` to * `gzip` if necessary. * @property {object} [metadata] See the examples below or * {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert#request_properties_JSON| Objects: insert request body} * for more details. * @property {number} [offset] The starting byte of the upload stream, for * resuming an interrupted upload. Defaults to 0. * @property {string} [predefinedAcl] Apply a predefined set of access * controls to this object. * * Acceptable values are: * - **`authenticatedRead`** - Object owner gets `OWNER` access, and * `allAuthenticatedUsers` get `READER` access. * * - **`bucketOwnerFullControl`** - Object owner gets `OWNER` access, and * project team owners get `OWNER` access. * * - **`bucketOwnerRead`** - Object owner gets `OWNER` access, and project * team owners get `READER` access. * * - **`private`** - Object owner gets `OWNER` access. * * - **`projectPrivate`** - Object owner gets `OWNER` access, and project * team members get access according to their roles. * * - **`publicRead`** - Object owner gets `OWNER` access, and `allUsers` * get `READER` access. * @property {boolean} [private] Make the uploaded file private. (Alias for * `options.predefinedAcl = 'private'`) * @property {boolean} [public] Make the uploaded file public. (Alias for * `options.predefinedAcl = 'publicRead'`) * @property {boolean} [resumable] Force a resumable upload. NOTE: When * working with streams, the file format and size is unknown until it's * completely consumed. Because of this, it's best for you to be explicit * for what makes sense given your input. * @property {number} [timeout=60000] Set the HTTP request timeout in * milliseconds. This option is not available for resumable uploads. * Default: `60000` * @property {string} [uri] The URI for an already-created resumable * upload. See {@link File#createResumableUpload}. * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {string|boolean} [validation] Possible values: `"md5"`, * `"crc32c"`, or `false`. By default, data integrity is validated with a * CRC32c checksum. You may use MD5 if preferred, but that hash is not * supported for composite objects. An error will be raised if MD5 is * specified but is not available. You may also choose to skip validation * completely, however this is **not recommended**. In addition to specifying * validation type, providing `metadata.crc32c` or `metadata.md5Hash` will * cause the server to perform validation in addition to client validation. * NOTE: Validation is automatically skipped for objects that were * uploaded using the `gzip` option and have already compressed content. */ /** * Create a writable stream to overwrite the contents of the file in your * bucket. * * A File object can also be used to create files for the first time. * * Resumable uploads are automatically enabled and must be shut off explicitly * by setting `options.resumable` to `false`. * * *

* There is some overhead when using a resumable upload that can cause * noticeable performance degradation while uploading a series of small * files. When uploading files less than 10MB, it is recommended that the * resumable feature is disabled. *

* * NOTE: Writable streams will emit the `finish` event when the file is fully * uploaded. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/how-tos/upload Upload Options (Simple or Resumable)} * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/insert Objects: insert API Documentation} * * @param {CreateWriteStreamOptions} [options] Configuration options. * @returns {WritableStream} * * @example * ``` * const fs = require('fs'); * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * //- * //

Uploading a File

* // * // Now, consider a case where we want to upload a file to your bucket. You * // have the option of using {@link Bucket#upload}, but that is just * // a convenience method which will do the following. * //- * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') * .pipe(file.createWriteStream()) * .on('error', function(err) {}) * .on('finish', function() { * // The file upload is complete. * }); * * //- * //

Uploading a File with gzip compression

* //- * fs.createReadStream('/Users/stephen/site/index.html') * .pipe(file.createWriteStream({ gzip: true })) * .on('error', function(err) {}) * .on('finish', function() { * // The file upload is complete. * }); * * //- * // Downloading the file with `createReadStream` will automatically decode * // the file. * //- * * //- * //

Uploading a File with Metadata

* // * // One last case you may run into is when you want to upload a file to your * // bucket and set its metadata at the same time. Like above, you can use * // {@link Bucket#upload} to do this, which is just a wrapper around * // the following. * //- * fs.createReadStream('/Users/stephen/Photos/birthday-at-the-zoo/panda.jpg') * .pipe(file.createWriteStream({ * metadata: { * contentType: 'image/jpeg', * metadata: { * custom: 'metadata' * } * } * })) * .on('error', function(err) {}) * .on('finish', function() { * // The file upload is complete. * }); * ``` * * //- * //

Continuing a Resumable Upload

* // * // One can capture a `uri` from a resumable upload to reuse later. * // Additionally, for validation, one can also capture and pass `crc32c`. * //- * let uri: string | undefined = undefined; * let resumeCRC32C: string | undefined = undefined; * * fs.createWriteStream() * .on('uri', link => {uri = link}) * .on('crc32', crc32c => {resumeCRC32C = crc32c}); * * // later... * fs.createWriteStream({uri, resumeCRC32C}); */ // eslint-disable-next-line @typescript-eslint/no-explicit-any createWriteStream(options = {}) { var _a; (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); if (options.contentType) { options.metadata.contentType = options.contentType; } if (!options.metadata.contentType || options.metadata.contentType === 'auto') { const detectedContentType = mime_1.default.getType(this.name); if (detectedContentType) { options.metadata.contentType = detectedContentType; } } let gzip = options.gzip; if (gzip === 'auto') { gzip = COMPRESSIBLE_MIME_REGEX.test(options.metadata.contentType || ''); } if (gzip) { options.metadata.contentEncoding = 'gzip'; } let crc32c = true; let md5 = false; if (typeof options.validation === 'string') { options.validation = options.validation.toLowerCase(); crc32c = options.validation === 'crc32c'; md5 = options.validation === 'md5'; } else if (options.validation === false) { crc32c = false; md5 = false; } if (options.offset) { if (md5) { throw new RangeError(FileExceptionMessages.MD5_RESUMED_UPLOAD); } if (crc32c && !options.isPartialUpload && !options.resumeCRC32C) { throw new RangeError(FileExceptionMessages.MISSING_RESUME_CRC32C_FINAL_UPLOAD); } } /** * A callback for determining when the underlying pipeline is complete. * It's possible the pipeline callback could error before the write stream * calls `final` so by default this will destroy the write stream unless the * write stream sets this callback via its `final` handler. * @param error An optional error */ let pipelineCallback = error => { writeStream.destroy(error || undefined); }; // A stream for consumer to write to const writeStream = new stream_1.Writable({ final(cb) { // Set the pipeline callback to this callback so the pipeline's results // can be populated to the consumer pipelineCallback = cb; emitStream.end(); }, write(chunk, encoding, cb) { emitStream.write(chunk, encoding, cb); }, }); // If the write stream, which is returned to the caller, catches an error we need to make sure that // at least one of the streams in the pipeline below gets notified so that they // all get cleaned up / destroyed. writeStream.once('error', e => { emitStream.destroy(e); }); // If the write stream is closed, cleanup the pipeline below by calling destroy on one of the streams. writeStream.once('close', () => { emitStream.destroy(); }); const transformStreams = []; if (gzip) { transformStreams.push(zlib.createGzip()); } const emitStream = new util_js_2.PassThroughShim(); let hashCalculatingStream = null; if (crc32c || md5) { const crc32cInstance = options.resumeCRC32C ? crc32c_js_1.CRC32C.from(options.resumeCRC32C) : undefined; hashCalculatingStream = new hash_stream_validator_js_1.HashStreamValidator({ crc32c, crc32cInstance, md5, crc32cGenerator: this.crc32cGenerator, updateHashesOnly: true, }); transformStreams.push(hashCalculatingStream); } const fileWriteStream = (0, duplexify_1.default)(); let fileWriteStreamMetadataReceived = false; // Handing off emitted events to users emitStream.on('reading', () => writeStream.emit('reading')); emitStream.on('writing', () => writeStream.emit('writing')); fileWriteStream.on('uri', evt => writeStream.emit('uri', evt)); fileWriteStream.on('progress', evt => writeStream.emit('progress', evt)); fileWriteStream.on('response', resp => writeStream.emit('response', resp)); fileWriteStream.once('metadata', () => { fileWriteStreamMetadataReceived = true; }); writeStream.once('writing', () => { if (options.resumable === false) { this.startSimpleUpload_(fileWriteStream, options); } else { this.startResumableUpload_(fileWriteStream, options); } (0, stream_1.pipeline)(emitStream, ...transformStreams, fileWriteStream, async (e) => { if (e) { return pipelineCallback(e); } // We want to make sure we've received the metadata from the server in order // to properly validate the object's integrity. Depending on the type of upload, // the stream could close before the response is returned. if (!fileWriteStreamMetadataReceived) { try { await new Promise((resolve, reject) => { fileWriteStream.once('metadata', resolve); fileWriteStream.once('error', reject); }); } catch (e) { return pipelineCallback(e); } } // Emit the local CRC32C value for future validation, if validation is enabled. if (hashCalculatingStream === null || hashCalculatingStream === void 0 ? void 0 : hashCalculatingStream.crc32c) { writeStream.emit('crc32c', hashCalculatingStream.crc32c); } try { // Metadata may not be ready if the upload is a partial upload, // nothing to validate yet. const metadataNotReady = options.isPartialUpload && !this.metadata; if (hashCalculatingStream && !metadataNotReady) { await __classPrivateFieldGet(this, _File_instances, "m", _File_validateIntegrity).call(this, hashCalculatingStream, { crc32c, md5, }); } pipelineCallback(); } catch (e) { pipelineCallback(e); } }); }); return writeStream; } delete(optionsOrCallback, cb) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; this.disableAutoRetryConditionallyIdempotent_(this.methods.delete, bucket_js_1.AvailableServiceObjectMethods.delete, options); super .delete(options) .then(resp => cb(null, ...resp)) .catch(cb) .finally(() => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; }); } /** * @typedef {array} DownloadResponse * @property [0] The contents of a File. */ /** * @callback DownloadCallback * @param err Request error, if any. * @param contents The contents of a File. */ /** * Convenience method to download a file into memory or to a local * destination. * * @param {object} [options] Configuration options. The arguments match those * passed to {@link File#createReadStream}. * @param {string} [options.destination] Local file path to write the file's * contents to. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {DownloadCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * //- * // Download a file into memory. The contents will be available as the * second * // argument in the demonstration below, `contents`. * //- * file.download(function(err, contents) {}); * * //- * // Download a file to a local destination. * //- * file.download({ * destination: '/Users/me/Desktop/file-backup.txt' * }, function(err) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.download().then(function(data) { * const contents = data[0]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_download_file * Another example: * * @example include:samples/encryption.js * region_tag:storage_download_encrypted_file * Example of downloading an encrypted file: * * @example include:samples/requesterPays.js * region_tag:storage_download_file_requester_pays * Example of downloading a file where the requester pays: */ download(optionsOrCallback, cb) { let options; if (typeof optionsOrCallback === 'function') { cb = optionsOrCallback; options = {}; } else { options = optionsOrCallback; } let called = false; const callback = ((...args) => { if (!called) cb(...args); called = true; }); const destination = options.destination; delete options.destination; const fileStream = this.createReadStream(options); let receivedData = false; if (destination) { fileStream .on('error', callback) .once('data', data => { receivedData = true; // We know that the file exists the server - now we can truncate/write to a file const writable = fs.createWriteStream(destination); writable.write(data); fileStream .pipe(writable) .on('error', (err) => { callback(err, Buffer.from('')); }) .on('finish', () => { callback(null, data); }); }) .on('end', () => { // In the case of an empty file no data will be received before the end event fires if (!receivedData) { const data = Buffer.alloc(0); try { fs.writeFileSync(destination, data); callback(null, data); } catch (e) { callback(e, data); } } }); } else { this.getBufferFromReadable(fileStream) .then(contents => callback === null || callback === void 0 ? void 0 : callback(null, contents)) .catch(callback); } } /** * The Storage API allows you to use a custom key for server-side encryption. * * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} * * @param {string|buffer} encryptionKey An AES-256 encryption key. * @returns {File} * * @example * ``` * const crypto = require('crypto'); * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const encryptionKey = crypto.randomBytes(32); * * const fileWithCustomEncryption = myBucket.file('my-file'); * fileWithCustomEncryption.setEncryptionKey(encryptionKey); * * const fileWithoutCustomEncryption = myBucket.file('my-file'); * * fileWithCustomEncryption.save('data', function(err) { * // Try to download with the File object that hasn't had * // `setEncryptionKey()` called: * fileWithoutCustomEncryption.download(function(err) { * // We will receive an error: * // err.message === 'Bad Request' * * // Try again with the File object we called `setEncryptionKey()` on: * fileWithCustomEncryption.download(function(err, contents) { * // contents.toString() === 'data' * }); * }); * }); * * ``` * @example include:samples/encryption.js * region_tag:storage_upload_encrypted_file * Example of uploading an encrypted file: * * @example include:samples/encryption.js * region_tag:storage_download_encrypted_file * Example of downloading an encrypted file: */ setEncryptionKey(encryptionKey) { this.encryptionKey = encryptionKey; this.encryptionKeyBase64 = Buffer.from(encryptionKey).toString('base64'); this.encryptionKeyHash = crypto .createHash('sha256') // eslint-disable-next-line @typescript-eslint/no-explicit-any .update(this.encryptionKeyBase64, 'base64') .digest('base64'); this.encryptionKeyInterceptor = { request: reqOpts => { reqOpts.headers = reqOpts.headers || {}; reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; reqOpts.headers['x-goog-encryption-key'] = this.encryptionKeyBase64; reqOpts.headers['x-goog-encryption-key-sha256'] = this.encryptionKeyHash; return reqOpts; }, }; this.interceptors.push(this.encryptionKeyInterceptor); return this; } /** * Gets a reference to a Cloud Storage {@link File} file from the provided URL in string format. * @param {string} publicUrlOrGsUrl the URL as a string. Must be of the format gs://bucket/file * or https://storage.googleapis.com/bucket/file. * @param {Storage} storageInstance an instance of a Storage object. * @param {FileOptions} [options] Configuration options * @returns {File} */ static from(publicUrlOrGsUrl, storageInstance, options) { const gsMatches = [...publicUrlOrGsUrl.matchAll(GS_UTIL_URL_REGEX)]; const httpsMatches = [...publicUrlOrGsUrl.matchAll(HTTPS_PUBLIC_URL_REGEX)]; if (gsMatches.length > 0) { const bucket = new bucket_js_1.Bucket(storageInstance, gsMatches[0][2]); return new File(bucket, gsMatches[0][3], options); } else if (httpsMatches.length > 0) { const bucket = new bucket_js_1.Bucket(storageInstance, httpsMatches[0][3]); return new File(bucket, httpsMatches[0][4], options); } else { throw new Error('URL string must be of format gs://bucket/file or https://storage.googleapis.com/bucket/file'); } } get(optionsOrCallback, cb) { // eslint-disable-next-line @typescript-eslint/no-explicit-any const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; super .get(options) .then(resp => cb(null, ...resp)) .catch(cb); } /** * @typedef {array} GetExpirationDateResponse * @property {date} 0 A Date object representing the earliest time this file's * retention policy will expire. */ /** * @callback GetExpirationDateCallback * @param {?Error} err Request error, if any. * @param {date} expirationDate A Date object representing the earliest time * this file's retention policy will expire. */ /** * If this bucket has a retention policy defined, use this method to get a * Date object representing the earliest time this file will expire. * * @param {GetExpirationDateCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const storage = require('@google-cloud/storage')(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * file.getExpirationDate(function(err, expirationDate) { * // expirationDate is a Date object. * }); * ``` */ getExpirationDate(callback) { this.getMetadata((err, metadata, apiResponse) => { if (err) { callback(err, null, apiResponse); return; } if (!metadata.retentionExpirationTime) { const error = new Error(FileExceptionMessages.EXPIRATION_TIME_NA); callback(error, null, apiResponse); return; } callback(null, new Date(metadata.retentionExpirationTime), apiResponse); }); } /** * @typedef {array} GenerateSignedPostPolicyV2Response * @property {object} 0 The document policy. */ /** * @callback GenerateSignedPostPolicyV2Callback * @param {?Error} err Request error, if any. * @param {object} policy The document policy. */ /** * Get a signed policy document to allow a user to upload data with a POST * request. * * In Google Cloud Platform environments, such as Cloud Functions and App * Engine, you usually don't provide a `keyFilename` or `credentials` during * instantiation. In those environments, we call the * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} * to create a signed policy. That API requires either the * `https://www.googleapis.com/auth/iam` or * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are * enabled. * * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-v2| POST Object with the V2 signing process} * * @throws {Error} If an expiration timestamp from the past is given. * @throws {Error} If options.equals has an array with less or more than two * members. * @throws {Error} If options.startsWith has an array with less or more than two * members. * * @param {object} options Configuration options. * @param {array|array[]} [options.equals] Array of request parameters and * their expected value (e.g. [['$', '']]). Values are * translated into equality constraints in the conditions field of the * policy document (e.g. ['eq', '$', '']). If only one * equality condition is to be specified, options.equals can be a one- * dimensional array (e.g. ['$', '']). * @param {*} options.expires - A timestamp when this policy will expire. Any * value given is passed to `new Date()`. * @param {array|array[]} [options.startsWith] Array of request parameters and * their expected prefixes (e.g. [['$', '']). Values are * translated into starts-with constraints in the conditions field of the * policy document (e.g. ['starts-with', '$', '']). If only * one prefix condition is to be specified, options.startsWith can be a * one- dimensional array (e.g. ['$', '']). * @param {string} [options.acl] ACL for the object from possibly predefined * ACLs. * @param {string} [options.successRedirect] The URL to which the user client * is redirected if the upload is successful. * @param {string} [options.successStatus] - The status of the Google Storage * response if the upload is successful (must be string). * @param {object} [options.contentLengthRange] * @param {number} [options.contentLengthRange.min] Minimum value for the * request's content length. * @param {number} [options.contentLengthRange.max] Maximum value for the * request's content length. * @param {GenerateSignedPostPolicyV2Callback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * const options = { * equals: ['$Content-Type', 'image/jpeg'], * expires: '10-25-2022', * contentLengthRange: { * min: 0, * max: 1024 * } * }; * * file.generateSignedPostPolicyV2(options, function(err, policy) { * // policy.string: the policy document in plain text. * // policy.base64: the policy document in base64. * // policy.signature: the policy signature in base64. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.generateSignedPostPolicyV2(options).then(function(data) { * const policy = data[0]; * }); * ``` */ generateSignedPostPolicyV2(optionsOrCallback, cb) { const args = (0, util_js_2.normalize)(optionsOrCallback, cb); let options = args.options; const callback = args.callback; const expires = new Date(options.expires); if (isNaN(expires.getTime())) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); } if (expires.valueOf() < Date.now()) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); } options = Object.assign({}, options); const conditions = [ ['eq', '$key', this.name], { bucket: this.bucket.name, }, ]; if (Array.isArray(options.equals)) { if (!Array.isArray(options.equals[0])) { options.equals = [options.equals]; } options.equals.forEach(condition => { if (!Array.isArray(condition) || condition.length !== 2) { throw new Error(FileExceptionMessages.EQUALS_CONDITION_TWO_ELEMENTS); } conditions.push(['eq', condition[0], condition[1]]); }); } if (Array.isArray(options.startsWith)) { if (!Array.isArray(options.startsWith[0])) { options.startsWith = [options.startsWith]; } options.startsWith.forEach(condition => { if (!Array.isArray(condition) || condition.length !== 2) { throw new Error(FileExceptionMessages.STARTS_WITH_TWO_ELEMENTS); } conditions.push(['starts-with', condition[0], condition[1]]); }); } if (options.acl) { conditions.push({ acl: options.acl, }); } if (options.successRedirect) { conditions.push({ success_action_redirect: options.successRedirect, }); } if (options.successStatus) { conditions.push({ success_action_status: options.successStatus, }); } if (options.contentLengthRange) { const min = options.contentLengthRange.min; const max = options.contentLengthRange.max; if (typeof min !== 'number' || typeof max !== 'number') { throw new Error(FileExceptionMessages.CONTENT_LENGTH_RANGE_MIN_MAX); } conditions.push(['content-length-range', min, max]); } const policy = { expiration: expires.toISOString(), conditions, }; const policyString = JSON.stringify(policy); const policyBase64 = Buffer.from(policyString).toString('base64'); this.storage.authClient.sign(policyBase64, options.signingEndpoint).then(signature => { callback(null, { string: policyString, base64: policyBase64, signature, }); }, err => { callback(new signer_js_1.SigningError(err.message)); }); } /** * @typedef {object} SignedPostPolicyV4Output * @property {string} url The request URL. * @property {object} fields The form fields to include in the POST request. */ /** * @typedef {array} GenerateSignedPostPolicyV4Response * @property {SignedPostPolicyV4Output} 0 An object containing the request URL and form fields. */ /** * @callback GenerateSignedPostPolicyV4Callback * @param {?Error} err Request error, if any. * @param {SignedPostPolicyV4Output} output An object containing the request URL and form fields. */ /** * Get a v4 signed policy document to allow a user to upload data with a POST * request. * * In Google Cloud Platform environments, such as Cloud Functions and App * Engine, you usually don't provide a `keyFilename` or `credentials` during * instantiation. In those environments, we call the * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} * to create a signed policy. That API requires either the * `https://www.googleapis.com/auth/iam` or * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are * enabled. * * See {@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument| Policy Document Reference} * * @param {object} options Configuration options. * @param {Date|number|string} options.expires - A timestamp when this policy will expire. Any * value given is passed to `new Date()`. * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style * URLs ('https://mybucket.storage.googleapis.com/...') instead of path-style * ('https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs * should generally be preferred instead of path-style URL. * Currently defaults to `false` for path-style, although this may change in a * future major-version release. * @param {string} [config.bucketBoundHostname] The bucket-bound hostname to return in * the result, e.g. "https://cdn.example.com". * @param {object} [config.fields] [Form fields]{@link https://cloud.google.com/storage/docs/xml-api/post-object#policydocument} * to include in the signed policy. Any fields with key beginning with 'x-ignore-' * will not be included in the policy to be signed. * @param {object[]} [config.conditions] [Conditions]{@link https://cloud.google.com/storage/docs/authentication/signatures#policy-document} * to include in the signed policy. All fields given in `config.fields` are * automatically included in the conditions array, adding the same entry * in both `fields` and `conditions` will result in duplicate entries. * * @param {GenerateSignedPostPolicyV4Callback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * const options = { * expires: '10-25-2022', * conditions: [ * ['eq', '$Content-Type', 'image/jpeg'], * ['content-length-range', 0, 1024], * ], * fields: { * acl: 'public-read', * 'x-goog-meta-foo': 'bar', * 'x-ignore-mykey': 'data' * } * }; * * file.generateSignedPostPolicyV4(options, function(err, response) { * // response.url The request URL * // response.fields The form fields (including the signature) to include * // to be used to upload objects by HTML forms. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.generateSignedPostPolicyV4(options).then(function(data) { * const response = data[0]; * // response.url The request URL * // response.fields The form fields (including the signature) to include * // to be used to upload objects by HTML forms. * }); * ``` */ generateSignedPostPolicyV4(optionsOrCallback, cb) { const args = (0, util_js_2.normalize)(optionsOrCallback, cb); let options = args.options; const callback = args.callback; const expires = new Date(options.expires); if (isNaN(expires.getTime())) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); } if (expires.valueOf() < Date.now()) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); } if (expires.valueOf() - Date.now() > SEVEN_DAYS * 1000) { throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); } options = Object.assign({}, options); let fields = Object.assign({}, options.fields); const now = new Date(); const nowISO = (0, util_js_2.formatAsUTCISO)(now, true); const todayISO = (0, util_js_2.formatAsUTCISO)(now); const sign = async () => { const { client_email } = await this.storage.authClient.getCredentials(); const credential = `${client_email}/${todayISO}/auto/storage/goog4_request`; fields = { ...fields, bucket: this.bucket.name, key: this.name, 'x-goog-date': nowISO, 'x-goog-credential': credential, 'x-goog-algorithm': 'GOOG4-RSA-SHA256', }; const conditions = options.conditions || []; Object.entries(fields).forEach(([key, value]) => { if (!key.startsWith('x-ignore-')) { conditions.push({ [key]: value }); } }); delete fields.bucket; const expiration = (0, util_js_2.formatAsUTCISO)(expires, true, '-', ':'); const policy = { conditions, expiration, }; const policyString = (0, util_js_2.unicodeJSONStringify)(policy); const policyBase64 = Buffer.from(policyString).toString('base64'); try { const signature = await this.storage.authClient.sign(policyBase64, options.signingEndpoint); const signatureHex = Buffer.from(signature, 'base64').toString('hex'); const universe = this.parent.storage.universeDomain; fields['policy'] = policyBase64; fields['x-goog-signature'] = signatureHex; let url; if (this.storage.customEndpoint) { url = this.storage.apiEndpoint; } else if (options.virtualHostedStyle) { url = `https://${this.bucket.name}.storage.${universe}/`; } else if (options.bucketBoundHostname) { url = `${options.bucketBoundHostname}/`; } else { url = `https://storage.${universe}/${this.bucket.name}/`; } return { url, fields, }; } catch (err) { throw new signer_js_1.SigningError(err.message); } }; sign().then(res => callback(null, res), callback); } /** * @typedef {array} GetSignedUrlResponse * @property {object} 0 The signed URL. */ /** * @callback GetSignedUrlCallback * @param {?Error} err Request error, if any. * @param {object} url The signed URL. */ /** * Get a signed URL to allow limited time access to the file. * * In Google Cloud Platform environments, such as Cloud Functions and App * Engine, you usually don't provide a `keyFilename` or `credentials` during * instantiation. In those environments, we call the * {@link https://cloud.google.com/iam/docs/reference/credentials/rest/v1/projects.serviceAccounts/signBlob| signBlob API} * to create a signed URL. That API requires either the * `https://www.googleapis.com/auth/iam` or * `https://www.googleapis.com/auth/cloud-platform` scope, so be sure they are * enabled. * * See {@link https://cloud.google.com/storage/docs/access-control/signed-urls| Signed URLs Reference} * * @throws {Error} if an expiration timestamp from the past is given. * * @param {object} config Configuration object. * @param {string} config.action "read" (HTTP: GET), "write" (HTTP: PUT), or * "delete" (HTTP: DELETE), "resumable" (HTTP: POST). * When using "resumable", the header `X-Goog-Resumable: start` has * to be sent when making a request with the signed URL. * @param {*} config.expires A timestamp when this link will expire. Any value * given is passed to `new Date()`. * Note: 'v4' supports maximum duration of 7 days (604800 seconds) from now. * See [reference]{@link https://cloud.google.com/storage/docs/access-control/signed-urls#example} * @param {string} [config.version='v2'] The signing version to use, either * 'v2' or 'v4'. * @param {boolean} [config.virtualHostedStyle=false] Use virtual hosted-style * URLs (e.g. 'https://mybucket.storage.googleapis.com/...') instead of path-style * (e.g. 'https://storage.googleapis.com/mybucket/...'). Virtual hosted-style URLs * should generally be preferred instaed of path-style URL. * Currently defaults to `false` for path-style, although this may change in a * future major-version release. * @param {string} [config.cname] The cname for this bucket, i.e., * "https://cdn.example.com". * @param {string} [config.contentMd5] The MD5 digest value in base64. Just like * if you provide this, the client must provide this HTTP header with this same * value in its request, so to if this parameter is not provided here, * the client must not provide any value for this HTTP header in its request. * @param {string} [config.contentType] Just like if you provide this, the client * must provide this HTTP header with this same value in its request, so to if * this parameter is not provided here, the client must not provide any value * for this HTTP header in its request. * @param {object} [config.extensionHeaders] If these headers are used, the * server will check to make sure that the client provides matching * values. See {@link https://cloud.google.com/storage/docs/access-control/signed-urls#about-canonical-extension-headers| Canonical extension headers} * for the requirements of this feature, most notably: * - The header name must be prefixed with `x-goog-` * - The header name must be all lowercase * * Note: Multi-valued header passed as an array in the extensionHeaders * object is converted into a string, delimited by `,` with * no space. Requests made using the signed URL will need to * delimit multi-valued headers using a single `,` as well, or * else the server will report a mismatched signature. * @param {object} [config.queryParams] Additional query parameters to include * in the signed URL. * @param {string} [config.promptSaveAs] The filename to prompt the user to * save the file as when the signed url is accessed. This is ignored if * `config.responseDisposition` is set. * @param {string} [config.responseDisposition] The * {@link http://goo.gl/yMWxQV| response-content-disposition parameter} of the * signed url. * @param {*} [config.accessibleAt=Date.now()] A timestamp when this link became usable. Any value * given is passed to `new Date()`. * Note: Use for 'v4' only. * @param {string} [config.responseType] The response-content-type parameter * of the signed url. * @param {GetSignedUrlCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * //- * // Generate a URL that allows temporary access to download your file. * //- * const request = require('request'); * * const config = { * action: 'read', * expires: '03-17-2025', * }; * * file.getSignedUrl(config, function(err, url) { * if (err) { * console.error(err); * return; * } * * // The file is now available to read from this URL. * request(url, function(err, resp) { * // resp.statusCode = 200 * }); * }); * * //- * // Generate a URL that allows temporary access to download your file. * // Access will begin at accessibleAt and end at expires. * //- * const request = require('request'); * * const config = { * action: 'read', * expires: '03-17-2025', * accessibleAt: '03-13-2025' * }; * * file.getSignedUrl(config, function(err, url) { * if (err) { * console.error(err); * return; * } * * // The file will be available to read from this URL from 03-13-2025 to 03-17-2025. * request(url, function(err, resp) { * // resp.statusCode = 200 * }); * }); * * //- * // Generate a URL to allow write permissions. This means anyone with this * URL * // can send a POST request with new data that will overwrite the file. * //- * file.getSignedUrl({ * action: 'write', * expires: '03-17-2025' * }, function(err, url) { * if (err) { * console.error(err); * return; * } * * // The file is now available to be written to. * const writeStream = request.put(url); * writeStream.end('New data'); * * writeStream.on('complete', function(resp) { * // Confirm the new content was saved. * file.download(function(err, fileContents) { * console.log('Contents:', fileContents.toString()); * // Contents: New data * }); * }); * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.getSignedUrl(config).then(function(data) { * const url = data[0]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_generate_signed_url * Another example: */ getSignedUrl(cfg, callback) { const method = ActionToHTTPMethod[cfg.action]; const extensionHeaders = (0, util_js_2.objectKeyToLowercase)(cfg.extensionHeaders || {}); if (cfg.action === 'resumable') { extensionHeaders['x-goog-resumable'] = 'start'; } const queryParams = Object.assign({}, cfg.queryParams); if (typeof cfg.responseType === 'string') { queryParams['response-content-type'] = cfg.responseType; } if (typeof cfg.promptSaveAs === 'string') { queryParams['response-content-disposition'] = 'attachment; filename="' + cfg.promptSaveAs + '"'; } if (typeof cfg.responseDisposition === 'string') { queryParams['response-content-disposition'] = cfg.responseDisposition; } if (this.generation) { queryParams['generation'] = this.generation.toString(); } const signConfig = { method, expires: cfg.expires, accessibleAt: cfg.accessibleAt, extensionHeaders, queryParams, contentMd5: cfg.contentMd5, contentType: cfg.contentType, host: cfg.host, }; if (cfg.cname) { signConfig.cname = cfg.cname; } if (cfg.version) { signConfig.version = cfg.version; } if (cfg.virtualHostedStyle) { signConfig.virtualHostedStyle = cfg.virtualHostedStyle; } if (!this.signer) { this.signer = new signer_js_1.URLSigner(this.storage.authClient, this.bucket, this, this.storage); } this.signer .getSignedUrl(signConfig) .then(signedUrl => callback(null, signedUrl), callback); } /** * @callback IsPublicCallback * @param {?Error} err Request error, if any. * @param {boolean} resp Whether file is public or not. */ /** * @typedef {array} IsPublicResponse * @property {boolean} 0 Whether file is public or not. */ /** * Check whether this file is public or not by sending * a HEAD request without credentials. * No errors from the server indicates that the current * file is public. * A 403-Forbidden error {@link https://cloud.google.com/storage/docs/json_api/v1/status-codes#403_Forbidden} * indicates that file is private. * Any other non 403 error is propagated to user. * * @param {IsPublicCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * //- * // Check whether the file is publicly accessible. * //- * file.isPublic(function(err, resp) { * if (err) { * console.error(err); * return; * } * console.log(`the file ${file.id} is public: ${resp}`) ; * }) * //- * // If the callback is omitted, we'll return a Promise. * //- * file.isPublic().then(function(data) { * const resp = data[0]; * }); * ``` */ isPublic(callback) { var _a; // Build any custom headers based on the defined interceptors on the parent // storage object and this object const storageInterceptors = ((_a = this.storage) === null || _a === void 0 ? void 0 : _a.interceptors) || []; const fileInterceptors = this.interceptors || []; const allInterceptors = storageInterceptors.concat(fileInterceptors); const headers = allInterceptors.reduce((acc, curInterceptor) => { const currentHeaders = curInterceptor.request({ uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, }); Object.assign(acc, currentHeaders.headers); return acc; }, {}); index_js_1.util.makeRequest({ method: 'GET', uri: `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`, headers, }, { retryOptions: this.storage.retryOptions, }, (err) => { if (err) { const apiError = err; if (apiError.code === 403) { callback(null, false); } else { callback(err); } } else { callback(null, true); } }); } /** * @typedef {object} MakeFilePrivateOptions Configuration options for File#makePrivate(). * @property {Metadata} [metadata] Define custom metadata properties to define * along with the operation. * @property {boolean} [strict] If true, set the file to be private to * only the owner user. Otherwise, it will be private to the project. * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @callback MakeFilePrivateCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * @typedef {array} MakeFilePrivateResponse * @property {object} 0 The full API response. */ /** * Make a file private to the project and remove all other permissions. * Set `options.strict` to true to make the file private to only the owner. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/patch| Objects: patch API Documentation} * * @param {MakeFilePrivateOptions} [options] Configuration options. * @param {MakeFilePrivateCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * //- * // Set the file private so only project maintainers can see and modify it. * //- * file.makePrivate(function(err) {}); * * //- * // Set the file private so only the owner can see and modify it. * //- * file.makePrivate({ strict: true }, function(err) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.makePrivate().then(function(data) { * const apiResponse = data[0]; * }); * ``` */ makePrivate(optionsOrCallback, callback) { var _a, _b; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const query = { predefinedAcl: options.strict ? 'private' : 'projectPrivate', // eslint-disable-next-line @typescript-eslint/no-explicit-any }; if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifMetagenerationMatch) !== undefined) { query.ifMetagenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifMetagenerationMatch; delete options.preconditionOpts; } if (options.userProject) { query.userProject = options.userProject; } // You aren't allowed to set both predefinedAcl & acl properties on a file, // so acl must explicitly be nullified, destroying all previous acls on the // file. const metadata = { ...options.metadata, acl: null }; this.setMetadata(metadata, query, callback); } /** * @typedef {array} MakeFilePublicResponse * @property {object} 0 The full API response. */ /** * @callback MakeFilePublicCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Set a file to be publicly readable and maintain all previous permissions. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objectAccessControls/insert| ObjectAccessControls: insert API Documentation} * * @param {MakeFilePublicCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * * file.makePublic(function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.makePublic().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_make_public * Another example: */ makePublic(callback) { callback = callback || index_js_1.util.noop; this.acl.add({ entity: 'allUsers', role: 'READER', }, (err, acl, resp) => { callback(err, resp); }); } /** * The public URL of this File * Use {@link File#makePublic} to enable anonymous access via the returned URL. * * @returns {string} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * const file = bucket.file('my-file'); * * // publicUrl will be "https://storage.googleapis.com/albums/my-file" * const publicUrl = file.publicUrl(); * ``` */ publicUrl() { return `${this.storage.apiEndpoint}/${this.bucket.name}/${encodeURIComponent(this.name)}`; } /** * @typedef {array} MoveFileAtomicResponse * @property {File} 0 The moved {@link File}. * @property {object} 1 The full API response. */ /** * @callback MoveFileAtomicCallback * @param {?Error} err Request error, if any. * @param {File} movedFile The moved {@link File}. * @param {object} apiResponse The full API response. */ /** * @typedef {object} MoveFileAtomicOptions Configuration options for File#moveFileAtomic(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. * @property {string} [userProject] The ID of the project which will be * billed for the request. * @property {object} [preconditionOpts] Precondition options. * @property {number} [preconditionOpts.ifGenerationMatch] Makes the operation conditional on whether the object's current generation matches the given value. */ /** * Move this file within the same HNS-enabled bucket. * The source object must exist and be a live object. * The source and destination object IDs must be different. * Overwriting the destination object is allowed by default, but can be prevented * using preconditions. * If the destination path includes non-existent parent folders, they will be created. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/move| Objects: move API Documentation} * * @throws {Error} If the destination file is not provided. * * @param {string|File} destination Destination file name or File object within the same bucket.. * @param {MoveFileAtomicOptions} [options] Configuration options. See an * @param {MoveFileAtomicCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // Assume 'my-hns-bucket' is an HNS-enabled bucket. * //- * const bucket = storage.bucket('my-hns-bucket'); * const file = bucket.file('my-image.png'); * * //- * // If you pass in a string for the destination, the file is copied to its * // current bucket, under the new name provided. * //- * file.moveFileAtomic('moved-image.png', function(err, movedFile, apiResponse) { * // `my-hns-bucket` now contains: * // - "moved-image.png" * * // `movedFile` is an instance of a File object that refers to your new * // file. * }); * * //- * // Move the file to a subdirectory, creating parent folders if necessary. * //- * file.moveFileAtomic('new-folder/subfolder/moved-image.png', function(err, movedFile, apiResponse) { * // `my-hns-bucket` now contains: * // - "new-folder/subfolder/moved-image.png" * }); * * //- * // Prevent overwriting an existing destination object using preconditions. * //- * file.moveFileAtomic('existing-destination.png', { * preconditionOpts: { * ifGenerationMatch: 0 // Fails if the destination object exists. * } * }, function(err, movedFile, apiResponse) { * if (err) { * // Handle the error (e.g., the destination object already exists). * } else { * // Move successful. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.moveFileAtomic('moved-image.png).then(function(data) { * const newFile = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_move_file_hns * Another example: */ moveFileAtomic(destination, optionsOrCallback, callback) { var _a, _b; const noDestinationError = new Error(FileExceptionMessages.DESTINATION_NO_NAME); if (!destination) { throw noDestinationError; } let options = {}; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } else if (optionsOrCallback) { options = { ...optionsOrCallback }; } callback = callback || index_js_1.util.noop; let destName; let newFile; if (typeof destination === 'string') { const parsedDestination = GS_URL_REGEXP.exec(destination); if (parsedDestination !== null && parsedDestination.length === 3) { destName = parsedDestination[2]; } else { destName = destination; } } else if (destination instanceof File) { destName = destination.name; newFile = destination; } else { throw noDestinationError; } newFile = newFile || this.bucket.file(destName); if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { this.storage.retryOptions.autoRetry = false; } const query = {}; if (options.userProject !== undefined) { query.userProject = options.userProject; delete options.userProject; } if (((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined) { query.ifGenerationMatch = (_b = options.preconditionOpts) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch; delete options.preconditionOpts; } this.request({ method: 'POST', uri: `/moveTo/o/${encodeURIComponent(newFile.name)}`, qs: query, json: options, }, (err, resp) => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; if (err) { callback(err, null, resp); return; } callback(null, newFile, resp); }); } /** * @typedef {array} MoveResponse * @property {File} 0 The destination File. * @property {object} 1 The full API response. */ /** * @callback MoveCallback * @param {?Error} err Request error, if any. * @param {?File} destinationFile The destination File. * @param {object} apiResponse The full API response. */ /** * @typedef {object} MoveOptions Configuration options for File#move(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. * @param {string} [userProject] The ID of the project which will be * billed for the request. */ /** * Move this file to another location. By default, this will rename the file * and keep it in the same bucket, but you can choose to move it to another * Bucket by providing a Bucket or File object or a URL beginning with * "gs://". * * **Warning**: * There is currently no atomic `move` method in the Cloud Storage API, * so this method is a composition of {@link File#copy} (to the new * location) and {@link File#delete} (from the old location). While * unlikely, it is possible that an error returned to your callback could be * triggered from either one of these API calls failing, which could leave a * duplicate file lingering. The error message will indicate what operation * has failed. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/objects/copy| Objects: copy API Documentation} * * @throws {Error} If the destination file is not provided. * * @param {string|Bucket|File} destination Destination file. * @param {MoveCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * //- * // You can pass in a variety of types for the destination. * // * // For all of the below examples, assume we are working with the following * // Bucket and File objects. * //- * const bucket = storage.bucket('my-bucket'); * const file = bucket.file('my-image.png'); * * //- * // If you pass in a string for the destination, the file is moved to its * // current bucket, under the new name provided. * //- * file.move('my-image-new.png', function(err, destinationFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * // but contains instead: * // - "my-image-new.png" * * // `destinationFile` is an instance of a File object that refers to your * // new file. * }); * * //- * // If you pass in a string starting with "gs://" for the destination, the * // file is copied to the other bucket and under the new name provided. * //- * const newLocation = 'gs://another-bucket/my-image-new.png'; * file.move(newLocation, function(err, destinationFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-image-new.png" * * // `destinationFile` is an instance of a File object that refers to your * // new file. * }); * * //- * // If you pass in a Bucket object, the file will be moved to that bucket * // using the same name. * //- * const anotherBucket = gcs.bucket('another-bucket'); * * file.move(anotherBucket, function(err, destinationFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-image.png" * * // `destinationFile` is an instance of a File object that refers to your * // new file. * }); * * //- * // If you pass in a File object, you have complete control over the new * // bucket and filename. * //- * const anotherFile = anotherBucket.file('my-awesome-image.png'); * * file.move(anotherFile, function(err, destinationFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * // * // `another-bucket` now contains: * // - "my-awesome-image.png" * * // Note: * // The `destinationFile` parameter is equal to `anotherFile`. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.move('my-image-new.png').then(function(data) { * const destinationFile = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/files.js * region_tag:storage_move_file * Another example: */ move(destination, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; callback = callback || index_js_1.util.noop; this.copy(destination, options, (err, destinationFile, copyApiResponse) => { if (err) { err.message = 'file#copy failed with an error - ' + err.message; callback(err, null, copyApiResponse); return; } if (this.name !== destinationFile.name || this.bucket.name !== destinationFile.bucket.name) { this.delete(options, (err, apiResponse) => { if (err) { err.message = 'file#delete failed with an error - ' + err.message; callback(err, destinationFile, apiResponse); return; } callback(null, destinationFile, copyApiResponse); }); } else { callback(null, destinationFile, copyApiResponse); } }); } /** * @typedef {array} RenameResponse * @property {File} 0 The destination File. * @property {object} 1 The full API response. */ /** * @callback RenameCallback * @param {?Error} err Request error, if any. * @param {?File} destinationFile The destination File. * @param {object} apiResponse The full API response. */ /** * @typedef {object} RenameOptions Configuration options for File#move(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. * @param {string} [userProject] The ID of the project which will be * billed for the request. */ /** * Rename this file. * * **Warning**: * There is currently no atomic `rename` method in the Cloud Storage API, * so this method is an alias of {@link File#move}, which in turn is a * composition of {@link File#copy} (to the new location) and * {@link File#delete} (from the old location). While * unlikely, it is possible that an error returned to your callback could be * triggered from either one of these API calls failing, which could leave a * duplicate file lingering. The error message will indicate what operation * has failed. * * @param {string|File} destinationFile Destination file. * @param {RenameCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // You can pass in a string or a File object. * // * // For all of the below examples, assume we are working with the following * // Bucket and File objects. * //- * * const bucket = storage.bucket('my-bucket'); * const file = bucket.file('my-image.png'); * * //- * // You can pass in a string for the destinationFile. * //- * file.rename('renamed-image.png', function(err, renamedFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * // but contains instead: * // - "renamed-image.png" * * // `renamedFile` is an instance of a File object that refers to your * // renamed file. * }); * * //- * // You can pass in a File object. * //- * const anotherFile = anotherBucket.file('my-awesome-image.png'); * * file.rename(anotherFile, function(err, renamedFile, apiResponse) { * // `my-bucket` no longer contains: * // - "my-image.png" * * // Note: * // The `renamedFile` parameter is equal to `anotherFile`. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.rename('my-renamed-image.png').then(function(data) { * const renamedFile = data[0]; * const apiResponse = data[1]; * }); * ``` */ rename(destinationFile, optionsOrCallback, callback) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; callback = callback || index_js_1.util.noop; this.move(destinationFile, options, callback); } /** * @typedef {object} RestoreOptions Options for File#restore(). See an * {@link https://cloud.google.com/storage/docs/json_api/v1/objects#resource| Object resource}. * @param {string} [userProject] The ID of the project which will be * billed for the request. * @param {number} [generation] If present, selects a specific revision of this object. * @param {string} [restoreToken] Returns an option that must be specified when getting a soft-deleted object from an HNS-enabled * bucket that has a naming and generation conflict with another object in the same bucket. * @param {string} [projection] Specifies the set of properties to return. If used, must be 'full' or 'noAcl'. * @param {string | number} [ifGenerationMatch] Request proceeds if the generation of the target resource * matches the value used in the precondition. * If the values don't match, the request fails with a 412 Precondition Failed response. * @param {string | number} [ifGenerationNotMatch] Request proceeds if the generation of the target resource does * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. * @param {string | number} [ifMetagenerationMatch] Request proceeds if the meta-generation of the target resource * matches the value used in the precondition. * If the values don't match, the request fails with a 412 Precondition Failed response. * @param {string | number} [ifMetagenerationNotMatch] Request proceeds if the meta-generation of the target resource does * not match the value used in the precondition. If the values match, the request fails with a 304 Not Modified response. */ /** * Restores a soft-deleted file * @param {RestoreOptions} options Restore options. * @returns {Promise} */ async restore(options) { const [file] = await this.request({ method: 'POST', uri: '/restore', qs: options, }); return file; } /** * Makes request and applies userProject query parameter if necessary. * * @private * * @param {object} reqOpts - The request options. * @param {function} callback - The callback function. */ request(reqOpts, callback) { return this.parent.request.call(this, reqOpts, callback); } /** * @callback RotateEncryptionKeyCallback * @extends CopyCallback */ /** * @typedef RotateEncryptionKeyResponse * @extends CopyResponse */ /** * @param {string|buffer|object} RotateEncryptionKeyOptions Configuration options * for File#rotateEncryptionKey(). * If a string or Buffer is provided, it is interpreted as an AES-256, * customer-supplied encryption key. If you'd like to use a Cloud KMS key * name, you must specify an options object with the property name: * `kmsKeyName`. * @param {string|buffer} [options.encryptionKey] An AES-256 encryption key. * @param {string} [options.kmsKeyName] A Cloud KMS key name. */ /** * This method allows you to update the encryption key associated with this * file. * * See {@link https://cloud.google.com/storage/docs/encryption#customer-supplied| Customer-supplied Encryption Keys} * * @param {RotateEncryptionKeyOptions} [options] - Configuration options. * @param {RotateEncryptionKeyCallback} [callback] * @returns {Promise} * * @example include:samples/encryption.js * region_tag:storage_rotate_encryption_key * Example of rotating the encryption key for this file: */ rotateEncryptionKey(optionsOrCallback, callback) { var _a; callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; let options = {}; if (typeof optionsOrCallback === 'string' || optionsOrCallback instanceof Buffer) { options = { encryptionKey: optionsOrCallback, }; } else if (typeof optionsOrCallback === 'object') { options = optionsOrCallback; } const newFile = this.bucket.file(this.id, options); const copyOptions = ((_a = options.preconditionOpts) === null || _a === void 0 ? void 0 : _a.ifGenerationMatch) !== undefined ? { preconditionOpts: options.preconditionOpts } : {}; this.copy(newFile, copyOptions, callback); } /** * @typedef {object} SaveOptions * @extends CreateWriteStreamOptions */ /** * @callback SaveCallback * @param {?Error} err Request error, if any. */ /** * Write strings or buffers to a file. * * *This is a convenience method which wraps {@link File#createWriteStream}.* * To upload arbitrary data to a file, please use {@link File#createWriteStream} directly. * * Resumable uploads are automatically enabled and must be shut off explicitly * by setting `options.resumable` to `false`. * * Multipart uploads with retryable error codes will be retried 3 times with exponential backoff. * *

* There is some overhead when using a resumable upload that can cause * noticeable performance degradation while uploading a series of small * files. When uploading files less than 10MB, it is recommended that the * resumable feature is disabled. *

* * @param {SaveData} data The data to write to a file. * @param {SaveOptions} [options] See {@link File#createWriteStream}'s `options` * parameter. * @param {SaveCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const file = myBucket.file('my-file'); * const contents = 'This is the contents of the file.'; * * file.save(contents, function(err) { * if (!err) { * // File written successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.save(contents).then(function() {}); * ``` */ save(data, optionsOrCallback, callback) { // tslint:enable:no-any callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; let maxRetries = this.storage.retryOptions.maxRetries; if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options === null || options === void 0 ? void 0 : options.preconditionOpts)) { maxRetries = 0; } const returnValue = (0, async_retry_1.default)(async (bail) => { return new Promise((resolve, reject) => { if (maxRetries === 0) { this.storage.retryOptions.autoRetry = false; } const writable = this.createWriteStream(options); if (options.onUploadProgress) { writable.on('progress', options.onUploadProgress); } const handleError = (err) => { if (this.storage.retryOptions.autoRetry && this.storage.retryOptions.retryableErrorFn(err)) { return reject(err); } return bail(err); }; if (typeof data === 'string' || Buffer.isBuffer(data) || data instanceof Uint8Array) { writable .on('error', handleError) .on('finish', () => resolve()) .end(data); } else { (0, stream_1.pipeline)(data, writable, err => { if (err) { if (typeof data !== 'function') { // Only PipelineSourceFunction can be retried. Async-iterables // and Readable streams can only be consumed once. return bail(err); } handleError(err); } else { resolve(); } }); } }); }, { retries: maxRetries, factor: this.storage.retryOptions.retryDelayMultiplier, maxTimeout: this.storage.retryOptions.maxRetryDelay * 1000, //convert to milliseconds maxRetryTime: this.storage.retryOptions.totalTimeout * 1000, //convert to milliseconds }); if (!callback) { return returnValue; } else { return returnValue .then(() => { if (callback) { return callback(); } }) .catch(callback); } } setMetadata(metadata, optionsOrCallback, cb) { // eslint-disable-next-line @typescript-eslint/no-explicit-any const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; this.disableAutoRetryConditionallyIdempotent_(this.methods.setMetadata, bucket_js_1.AvailableServiceObjectMethods.setMetadata, options); super .setMetadata(metadata, options) .then(resp => cb(null, ...resp)) .catch(cb) .finally(() => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; }); } /** * @typedef {array} SetStorageClassResponse * @property {object} 0 The full API response. */ /** * @typedef {object} SetStorageClassOptions Configuration options for File#setStorageClass(). * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @callback SetStorageClassCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Set the storage class for this file. * * See {@link https://cloud.google.com/storage/docs/per-object-storage-class| Per-Object Storage Class} * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} * * @param {string} storageClass The new storage class. (`standard`, * `nearline`, `coldline`, or `archive`) * **Note:** The storage classes `multi_regional` and `regional` * are now legacy and will be deprecated in the future. * @param {SetStorageClassOptions} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {SetStorageClassCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * file.setStorageClass('nearline', function(err, apiResponse) { * if (err) { * // Error handling omitted. * } * * // The storage class was updated successfully. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * file.setStorageClass('nearline').then(function() {}); * ``` */ setStorageClass(storageClass, optionsOrCallback, callback) { callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : callback; const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const req = { ...options, // In case we get input like `storageClass`, convert to `storage_class`. storageClass: storageClass .replace(/-/g, '_') .replace(/([a-z])([A-Z])/g, (_, low, up) => { return low + '_' + up; }) .toUpperCase(), }; this.copy(this, req, (err, file, apiResponse) => { if (err) { callback(err, apiResponse); return; } this.metadata = file.metadata; callback(null, apiResponse); }); } /** * Set a user project to be billed for all requests made from this File * object. * * @param {string} userProject The user project. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('albums'); * const file = bucket.file('my-file'); * * file.setUserProject('grape-spaceship-123'); * ``` */ setUserProject(userProject) { this.bucket.setUserProject.call(this, userProject); } /** * This creates a resumable-upload upload stream. * * @param {Duplexify} stream - Duplexify stream of data to pipe to the file. * @param {object=} options - Configuration object. * * @private */ startResumableUpload_(dup, options = {}) { var _a; (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); const retryOptions = this.storage.retryOptions; if (!this.shouldRetryBasedOnPreconditionAndIdempotencyStrat(options.preconditionOpts)) { retryOptions.autoRetry = false; } const cfg = { authClient: this.storage.authClient, apiEndpoint: this.storage.apiEndpoint, bucket: this.bucket.name, customRequestOptions: this.getRequestInterceptors().reduce((reqOpts, interceptorFn) => interceptorFn(reqOpts), {}), file: this.name, generation: this.generation, isPartialUpload: options.isPartialUpload, key: this.encryptionKey, kmsKeyName: this.kmsKeyName, metadata: options.metadata, offset: options.offset, predefinedAcl: options.predefinedAcl, private: options.private, public: options.public, uri: options.uri, userProject: options.userProject || this.userProject, retryOptions: { ...retryOptions }, params: (options === null || options === void 0 ? void 0 : options.preconditionOpts) || this.instancePreconditionOpts, chunkSize: options === null || options === void 0 ? void 0 : options.chunkSize, highWaterMark: options === null || options === void 0 ? void 0 : options.highWaterMark, universeDomain: this.bucket.storage.universeDomain, [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], }; let uploadStream; try { uploadStream = resumableUpload.upload(cfg); } catch (error) { dup.destroy(error); this.storage.retryOptions.autoRetry = this.instanceRetryValue; return; } uploadStream .on('response', resp => { dup.emit('response', resp); }) .on('uri', uri => { dup.emit('uri', uri); }) .on('metadata', metadata => { this.metadata = metadata; dup.emit('metadata'); }) .on('finish', () => { dup.emit('complete'); }) .on('progress', evt => dup.emit('progress', evt)); dup.setWritable(uploadStream); this.storage.retryOptions.autoRetry = this.instanceRetryValue; } /** * Takes a readable stream and pipes it to a remote file. Unlike * `startResumableUpload_`, which uses the resumable upload technique, this * method uses a simple upload (all or nothing). * * @param {Duplexify} dup - Duplexify stream of data to pipe to the file. * @param {object=} options - Configuration object. * * @private */ startSimpleUpload_(dup, options = {}) { var _a; (_a = options.metadata) !== null && _a !== void 0 ? _a : (options.metadata = {}); const apiEndpoint = this.storage.apiEndpoint; const bucketName = this.bucket.name; const uri = `${apiEndpoint}/upload/storage/v1/b/${bucketName}/o`; const reqOpts = { qs: { name: this.name, }, uri: uri, [util_js_1.GCCL_GCS_CMD_KEY]: options[util_js_1.GCCL_GCS_CMD_KEY], }; if (this.generation !== undefined) { reqOpts.qs.ifGenerationMatch = this.generation; } if (this.kmsKeyName !== undefined) { reqOpts.qs.kmsKeyName = this.kmsKeyName; } if (typeof options.timeout === 'number') { reqOpts.timeout = options.timeout; } if (options.userProject || this.userProject) { reqOpts.qs.userProject = options.userProject || this.userProject; } if (options.predefinedAcl) { reqOpts.qs.predefinedAcl = options.predefinedAcl; } else if (options.private) { reqOpts.qs.predefinedAcl = 'private'; } else if (options.public) { reqOpts.qs.predefinedAcl = 'publicRead'; } Object.assign(reqOpts.qs, this.instancePreconditionOpts, options.preconditionOpts); index_js_1.util.makeWritableStream(dup, { makeAuthenticatedRequest: (reqOpts) => { this.request(reqOpts, (err, body, resp) => { if (err) { dup.destroy(err); return; } this.metadata = body; dup.emit('metadata', body); dup.emit('response', resp); dup.emit('complete'); }); }, metadata: options.metadata, request: reqOpts, }); } disableAutoRetryConditionallyIdempotent_( // eslint-disable-next-line @typescript-eslint/no-explicit-any coreOpts, methodType, localPreconditionOptions) { var _a, _b, _c, _d; if ((typeof coreOpts === 'object' && ((_b = (_a = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _a === void 0 ? void 0 : _a.qs) === null || _b === void 0 ? void 0 : _b.ifGenerationMatch) === undefined && (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifGenerationMatch) === undefined && methodType === bucket_js_1.AvailableServiceObjectMethods.delete && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { this.storage.retryOptions.autoRetry = false; } if ((typeof coreOpts === 'object' && ((_d = (_c = coreOpts === null || coreOpts === void 0 ? void 0 : coreOpts.reqOpts) === null || _c === void 0 ? void 0 : _c.qs) === null || _d === void 0 ? void 0 : _d.ifMetagenerationMatch) === undefined && (localPreconditionOptions === null || localPreconditionOptions === void 0 ? void 0 : localPreconditionOptions.ifMetagenerationMatch) === undefined && methodType === bucket_js_1.AvailableServiceObjectMethods.setMetadata && this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryConditional) || this.storage.retryOptions.idempotencyStrategy === storage_js_1.IdempotencyStrategy.RetryNever) { this.storage.retryOptions.autoRetry = false; } } async getBufferFromReadable(readable) { const buf = []; for await (const chunk of readable) { buf.push(chunk); } return Buffer.concat(buf); } } exports.File = File; _File_instances = new WeakSet(), _File_validateIntegrity = /** * * @param hashCalculatingStream * @param verify * @returns {boolean} Returns `true` if valid, throws with error otherwise */ async function _File_validateIntegrity(hashCalculatingStream, verify = {}) { const metadata = this.metadata; // If we're doing validation, assume the worst let dataMismatch = !!(verify.crc32c || verify.md5); if (verify.crc32c && metadata.crc32c) { dataMismatch = !hashCalculatingStream.test('crc32c', metadata.crc32c); } if (verify.md5 && metadata.md5Hash) { dataMismatch = !hashCalculatingStream.test('md5', metadata.md5Hash); } if (dataMismatch) { const errors = []; let code = ''; let message = ''; try { await this.delete(); if (verify.md5 && !metadata.md5Hash) { code = 'MD5_NOT_AVAILABLE'; message = FileExceptionMessages.MD5_NOT_AVAILABLE; } else { code = 'FILE_NO_UPLOAD'; message = FileExceptionMessages.UPLOAD_MISMATCH; } } catch (e) { const error = e; code = 'FILE_NO_UPLOAD_DELETE'; message = `${FileExceptionMessages.UPLOAD_MISMATCH_DELETE_FAIL}${error.message}`; errors.push(error); } const error = new RequestError(message); error.code = code; error.errors = errors; throw error; } return true; }; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(File, { exclude: [ 'cloudStorageURI', 'publicUrl', 'request', 'save', 'setEncryptionKey', 'shouldRetryBasedOnPreconditionAndIdempotencyStrat', 'getBufferFromReadable', 'restore', ], }); /***/ }), /***/ 14873: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var _HashStreamValidator_crc32cHash, _HashStreamValidator_md5Hash, _HashStreamValidator_md5Digest; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HashStreamValidator = void 0; const crypto_1 = __nccwpck_require__(76982); const stream_1 = __nccwpck_require__(2203); const crc32c_js_1 = __nccwpck_require__(34317); const file_js_1 = __nccwpck_require__(69975); class HashStreamValidator extends stream_1.Transform { constructor(options = {}) { super(); this.updateHashesOnly = false; _HashStreamValidator_crc32cHash.set(this, undefined); _HashStreamValidator_md5Hash.set(this, undefined); _HashStreamValidator_md5Digest.set(this, ''); this.crc32cEnabled = !!options.crc32c; this.md5Enabled = !!options.md5; this.updateHashesOnly = !!options.updateHashesOnly; this.crc32cExpected = options.crc32cExpected; this.md5Expected = options.md5Expected; if (this.crc32cEnabled) { if (options.crc32cInstance) { __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, options.crc32cInstance, "f"); } else { const crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; __classPrivateFieldSet(this, _HashStreamValidator_crc32cHash, crc32cGenerator(), "f"); } } if (this.md5Enabled) { __classPrivateFieldSet(this, _HashStreamValidator_md5Hash, (0, crypto_1.createHash)('md5'), "f"); } } /** * Return the current CRC32C value, if available. */ get crc32c() { var _a; return (_a = __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) === null || _a === void 0 ? void 0 : _a.toString(); } _flush(callback) { if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { __classPrivateFieldSet(this, _HashStreamValidator_md5Digest, __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").digest('base64'), "f"); } if (this.updateHashesOnly) { callback(); return; } // If we're doing validation, assume the worst-- a data integrity // mismatch. If not, these tests won't be performed, and we can assume // the best. // We must check if the server decompressed the data on serve because hash // validation is not possible in this case. let failed = this.crc32cEnabled || this.md5Enabled; if (this.crc32cEnabled && this.crc32cExpected) { failed = !this.test('crc32c', this.crc32cExpected); } if (this.md5Enabled && this.md5Expected) { failed = !this.test('md5', this.md5Expected); } if (failed) { const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; callback(mismatchError); } else { callback(); } } _transform(chunk, encoding, callback) { this.push(chunk, encoding); try { if (__classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").update(chunk); if (__classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f").update(chunk); callback(); } catch (e) { callback(e); } } test(hash, sum) { const check = Buffer.isBuffer(sum) ? sum.toString('base64') : sum; if (hash === 'crc32c' && __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f")) { return __classPrivateFieldGet(this, _HashStreamValidator_crc32cHash, "f").validate(check); } if (hash === 'md5' && __classPrivateFieldGet(this, _HashStreamValidator_md5Hash, "f")) { return __classPrivateFieldGet(this, _HashStreamValidator_md5Digest, "f") === check; } return false; } } exports.HashStreamValidator = HashStreamValidator; _HashStreamValidator_crc32cHash = new WeakMap(), _HashStreamValidator_md5Hash = new WeakMap(), _HashStreamValidator_md5Digest = new WeakMap(); /***/ }), /***/ 45891: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.HmacKey = void 0; const index_js_1 = __nccwpck_require__(11325); const storage_js_1 = __nccwpck_require__(92848); const promisify_1 = __nccwpck_require__(60206); /** * The API-formatted resource description of the HMAC key. * * Note: This is not guaranteed to be up-to-date when accessed. To get the * latest record, call the `getMetadata()` method. * * @name HmacKey#metadata * @type {object} */ /** * An HmacKey object contains metadata of an HMAC key created from a * service account through the {@link Storage} client using * {@link Storage#createHmacKey}. * * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} * * @class */ class HmacKey extends index_js_1.ServiceObject { /** * @typedef {object} HmacKeyOptions * @property {string} [projectId] The project ID of the project that owns * the service account of the requested HMAC key. If not provided, * the project ID used to instantiate the Storage client will be used. */ /** * Constructs an HmacKey object. * * Note: this only create a local reference to an HMAC key, to create * an HMAC key, use {@link Storage#createHmacKey}. * * @param {Storage} storage The Storage instance this HMAC key is * attached to. * @param {string} accessId The unique accessId for this HMAC key. * @param {HmacKeyOptions} options Constructor configurations. * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const hmacKey = storage.hmacKey('access-id'); * ``` */ constructor(storage, accessId, options) { const methods = { /** * @typedef {object} DeleteHmacKeyOptions * @property {string} [userProject] This parameter is currently ignored. */ /** * @typedef {array} DeleteHmacKeyResponse * @property {object} 0 The full API response. */ /** * @callback DeleteHmacKeyCallback * @param {?Error} err Request error, if any. * @param {object} apiResponse The full API response. */ /** * Deletes an HMAC key. * Key state must be set to `INACTIVE` prior to deletion. * Caution: HMAC keys cannot be recovered once you delete them. * * The authenticated user must have `storage.hmacKeys.delete` permission for the project in which the key exists. * * @method HmacKey#delete * @param {DeleteHmacKeyOptions} [options] Configuration options. * @param {DeleteHmacKeyCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // Delete HMAC key after making the key inactive. * //- * const hmacKey = storage.hmacKey('ACCESS_ID'); * hmacKey.setMetadata({state: 'INACTIVE'}, (err, hmacKeyMetadata) => { * if (err) { * // The request was an error. * console.error(err); * return; * } * hmacKey.delete((err) => { * if (err) { * console.error(err); * return; * } * // The HMAC key is deleted. * }); * }); * * //- * // If the callback is omitted, a promise is returned. * //- * const hmacKey = storage.hmacKey('ACCESS_ID'); * hmacKey * .setMetadata({state: 'INACTIVE'}) * .then(() => { * return hmacKey.delete(); * }); * ``` */ delete: true, /** * @callback GetHmacKeyCallback * @param {?Error} err Request error, if any. * @param {HmacKey} hmacKey this {@link HmacKey} instance. * @param {object} apiResponse The full API response. */ /** * @typedef {array} GetHmacKeyResponse * @property {HmacKey} 0 This {@link HmacKey} instance. * @property {object} 1 The full API response. */ /** * @typedef {object} GetHmacKeyOptions * @property {string} [userProject] This parameter is currently ignored. */ /** * Retrieves and populate an HMAC key's metadata, and return * this {@link HmacKey} instance. * * HmacKey.get() does not give the HMAC key secret, as * it is only returned on creation. * * The authenticated user must have `storage.hmacKeys.get` permission * for the project in which the key exists. * * @method HmacKey#get * @param {GetHmacKeyOptions} [options] Configuration options. * @param {GetHmacKeyCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // Get the HmacKey's Metadata. * //- * storage.hmacKey('ACCESS_ID') * .get((err, hmacKey) => { * if (err) { * // The request was an error. * console.error(err); * return; * } * // do something with the returned HmacKey object. * }); * * //- * // If the callback is omitted, a promise is returned. * //- * storage.hmacKey('ACCESS_ID') * .get() * .then((data) => { * const hmacKey = data[0]; * }); * ``` */ get: true, /** * @typedef {object} GetHmacKeyMetadataOptions * @property {string} [userProject] This parameter is currently ignored. */ /** * Retrieves and populate an HMAC key's metadata, and return * the HMAC key's metadata as an object. * * HmacKey.getMetadata() does not give the HMAC key secret, as * it is only returned on creation. * * The authenticated user must have `storage.hmacKeys.get` permission * for the project in which the key exists. * * @method HmacKey#getMetadata * @param {GetHmacKeyMetadataOptions} [options] Configuration options. * @param {HmacKeyMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * //- * // Get the HmacKey's metadata and populate to the metadata property. * //- * storage.hmacKey('ACCESS_ID') * .getMetadata((err, hmacKeyMetadata) => { * if (err) { * // The request was an error. * console.error(err); * return; * } * console.log(hmacKeyMetadata); * }); * * //- * // If the callback is omitted, a promise is returned. * //- * storage.hmacKey('ACCESS_ID') * .getMetadata() * .then((data) => { * const hmacKeyMetadata = data[0]; * console.log(hmacKeyMetadata); * }); * ``` */ getMetadata: true, /** * @typedef {object} SetHmacKeyMetadata Subset of {@link HmacKeyMetadata} to update. * @property {string} state New state of the HmacKey. Either 'ACTIVE' or 'INACTIVE'. * @property {string} [etag] Include an etag from a previous get HMAC key request * to perform safe read-modify-write. */ /** * @typedef {object} SetHmacKeyMetadataOptions * @property {string} [userProject] This parameter is currently ignored. */ /** * @callback HmacKeyMetadataCallback * @param {?Error} err Request error, if any. * @param {HmacKeyMetadata} metadata The updated {@link HmacKeyMetadata} object. * @param {object} apiResponse The full API response. */ /** * @typedef {array} HmacKeyMetadataResponse * @property {HmacKeyMetadata} 0 The updated {@link HmacKeyMetadata} object. * @property {object} 1 The full API response. */ /** * Updates the state of an HMAC key. See {@link SetHmacKeyMetadata} for * valid states. * * @method HmacKey#setMetadata * @param {SetHmacKeyMetadata} metadata The new metadata. * @param {SetHmacKeyMetadataOptions} [options] Configuration options. * @param {HmacKeyMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * const metadata = { * state: 'INACTIVE', * }; * * storage.hmacKey('ACCESS_ID') * .setMetadata(metadata, (err, hmacKeyMetadata) => { * if (err) { * // The request was an error. * console.error(err); * return; * } * console.log(hmacKeyMetadata); * }); * * //- * // If the callback is omitted, a promise is returned. * //- * storage.hmacKey('ACCESS_ID') * .setMetadata(metadata) * .then((data) => { * const hmacKeyMetadata = data[0]; * console.log(hmacKeyMetadata); * }); * ``` */ setMetadata: { reqOpts: { method: 'PUT', }, }, }; const projectId = (options && options.projectId) || storage.projectId; super({ parent: storage, id: accessId, baseUrl: `/projects/${projectId}/hmacKeys`, methods, }); this.storage = storage; this.instanceRetryValue = storage.retryOptions.autoRetry; } setMetadata(metadata, optionsOrCallback, cb) { // ETag preconditions are not currently supported. Retries should be disabled if the idempotency strategy is not set to RetryAlways if (this.storage.retryOptions.idempotencyStrategy !== storage_js_1.IdempotencyStrategy.RetryAlways) { this.storage.retryOptions.autoRetry = false; } const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; cb = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; super .setMetadata(metadata, options) .then(resp => cb(null, ...resp)) .catch(cb) .finally(() => { this.storage.retryOptions.autoRetry = this.instanceRetryValue; }); } } exports.HmacKey = HmacKey; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(HmacKey); /***/ }), /***/ 52880: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Iam = exports.IAMExceptionMessages = void 0; const promisify_1 = __nccwpck_require__(60206); const util_js_1 = __nccwpck_require__(74557); var IAMExceptionMessages; (function (IAMExceptionMessages) { IAMExceptionMessages["POLICY_OBJECT_REQUIRED"] = "A policy object is required."; IAMExceptionMessages["PERMISSIONS_REQUIRED"] = "Permissions are required."; })(IAMExceptionMessages || (exports.IAMExceptionMessages = IAMExceptionMessages = {})); /** * Get and set IAM policies for your Cloud Storage bucket. * * See {@link https://cloud.google.com/storage/docs/access-control/iam#short_title_iam_management| Cloud Storage IAM Management} * See {@link https://cloud.google.com/iam/docs/granting-changing-revoking-access| Granting, Changing, and Revoking Access} * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} * * @constructor Iam * * @param {Bucket} bucket The parent instance. * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * // bucket.iam * ``` */ class Iam { constructor(bucket) { this.request_ = bucket.request.bind(bucket); this.resourceId_ = 'buckets/' + bucket.getId(); } /** * @typedef {object} GetPolicyOptions Requested options for IAM#getPolicy(). * @property {number} [requestedPolicyVersion] The version of IAM policies to * request. If a policy with a condition is requested without setting * this, the server will return an error. This must be set to a value * of 3 to retrieve IAM policies containing conditions. This is to * prevent client code that isn't aware of IAM conditions from * interpreting and modifying policies incorrectly. The service might * return a policy with version lower than the one that was requested, * based on the feature syntax in the policy fetched. * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} * @property {string} [userProject] The ID of the project which will be * billed for the request. */ /** * @typedef {array} GetPolicyResponse * @property {Policy} 0 The policy. * @property {object} 1 The full API response. */ /** * @typedef {object} Policy * @property {PolicyBinding[]} policy.bindings Bindings associate members with roles. * @property {string} [policy.etag] Etags are used to perform a read-modify-write. * @property {number} [policy.version] The syntax schema version of the Policy. * To set an IAM policy with conditional binding, this field must be set to * 3 or greater. * See {@link https://cloud.google.com/iam/docs/policies#versions| IAM Policy versions} */ /** * @typedef {object} PolicyBinding * @property {string} role Role that is assigned to members. * @property {string[]} members Specifies the identities requesting access for the bucket. * @property {Expr} [condition] The condition that is associated with this binding. */ /** * @typedef {object} Expr * @property {string} [title] An optional title for the expression, i.e. a * short string describing its purpose. This can be used e.g. in UIs * which allow to enter the expression. * @property {string} [description] An optional description of the * expression. This is a longer text which describes the expression, * e.g. when hovered over it in a UI. * @property {string} expression Textual representation of an expression in * Common Expression Language syntax. The application context of the * containing message determines which well-known feature set of CEL * is supported.The condition that is associated with this binding. * * @see [Condition] https://cloud.google.com/storage/docs/access-control/iam#conditions */ /** * Get the IAM policy. * * @param {GetPolicyOptions} [options] Request options. * @param {GetPolicyCallback} [callback] Callback function. * @returns {Promise} * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/getIamPolicy| Buckets: setIamPolicy API Documentation} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * bucket.iam.getPolicy( * {requestedPolicyVersion: 3}, * function(err, policy, apiResponse) { * * }, * ); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.iam.getPolicy({requestedPolicyVersion: 3}) * .then(function(data) { * const policy = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/iam.js * region_tag:storage_view_bucket_iam_members * Example of retrieving a bucket's IAM policy: */ getPolicy(optionsOrCallback, callback) { const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); const qs = {}; if (options.userProject) { qs.userProject = options.userProject; } if (options.requestedPolicyVersion !== null && options.requestedPolicyVersion !== undefined) { qs.optionsRequestedPolicyVersion = options.requestedPolicyVersion; } this.request_({ uri: '/iam', qs, }, cb); } /** * Set the IAM policy. * * @throws {Error} If no policy is provided. * * @param {Policy} policy The policy. * @param {SetPolicyOptions} [options] Configuration options. * @param {SetPolicyCallback} callback Callback function. * @returns {Promise} * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/setIamPolicy| Buckets: setIamPolicy API Documentation} * See {@link https://cloud.google.com/iam/docs/understanding-roles| IAM Roles} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * const myPolicy = { * bindings: [ * { * role: 'roles/storage.admin', * members: * ['serviceAccount:myotherproject@appspot.gserviceaccount.com'] * } * ] * }; * * bucket.iam.setPolicy(myPolicy, function(err, policy, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.iam.setPolicy(myPolicy).then(function(data) { * const policy = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/iam.js * region_tag:storage_add_bucket_iam_member * Example of adding to a bucket's IAM policy: * * @example include:samples/iam.js * region_tag:storage_remove_bucket_iam_member * Example of removing from a bucket's IAM policy: */ setPolicy(policy, optionsOrCallback, callback) { if (policy === null || typeof policy !== 'object') { throw new Error(IAMExceptionMessages.POLICY_OBJECT_REQUIRED); } const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); let maxRetries; if (policy.etag === undefined) { maxRetries = 0; } this.request_({ method: 'PUT', uri: '/iam', maxRetries, json: Object.assign({ resourceId: this.resourceId_, }, policy), qs: options, }, cb); } /** * Test a set of permissions for a resource. * * @throws {Error} If permissions are not provided. * * @param {string|string[]} permissions The permission(s) to test for. * @param {TestIamPermissionsOptions} [options] Configuration object. * @param {TestIamPermissionsCallback} [callback] Callback function. * @returns {Promise} * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/testIamPermissions| Buckets: testIamPermissions API Documentation} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * * //- * // Test a single permission. * //- * const test = 'storage.buckets.delete'; * * bucket.iam.testPermissions(test, function(err, permissions, apiResponse) { * console.log(permissions); * // { * // "storage.buckets.delete": true * // } * }); * * //- * // Test several permissions at once. * //- * const tests = [ * 'storage.buckets.delete', * 'storage.buckets.get' * ]; * * bucket.iam.testPermissions(tests, function(err, permissions) { * console.log(permissions); * // { * // "storage.buckets.delete": false, * // "storage.buckets.get": true * // } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * bucket.iam.testPermissions(test).then(function(data) { * const permissions = data[0]; * const apiResponse = data[1]; * }); * ``` */ testPermissions(permissions, optionsOrCallback, callback) { if (!Array.isArray(permissions) && typeof permissions !== 'string') { throw new Error(IAMExceptionMessages.PERMISSIONS_REQUIRED); } const { options, callback: cb } = (0, util_js_1.normalize)(optionsOrCallback, callback); const permissionsArray = Array.isArray(permissions) ? permissions : [permissions]; const req = Object.assign({ permissions: permissionsArray, }, options); this.request_({ uri: '/iam/testPermissions', qs: req, useQuerystring: true, }, (err, resp) => { if (err) { cb(err, null, resp); return; } const availablePermissions = Array.isArray(resp.permissions) ? resp.permissions : []; const permissionsHash = permissionsArray.reduce((acc, permission) => { acc[permission] = availablePermissions.indexOf(permission) > -1; return acc; }, {}); cb(null, permissionsHash, resp); }); } } exports.Iam = Iam; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Iam); /***/ }), /***/ 28525: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __exportStar = (this && this.__exportStar) || function(m, exports) { for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Notification = exports.Iam = exports.HmacKey = exports.File = exports.Channel = exports.Bucket = exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.IdempotencyStrategy = exports.ApiError = void 0; /** * The `@google-cloud/storage` package has a single named export which is the * {@link Storage} (ES6) class, which should be instantiated with `new`. * * See {@link Storage} and {@link ClientConfig} for client methods and * configuration options. * * @module {Storage} @google-cloud/storage * @alias nodejs-storage * * @example * Install the client library with npm: * ``` * npm install --save @google-cloud/storage * ``` * * @example * Import the client library * ``` * const {Storage} = require('@google-cloud/storage'); * ``` * * @example * Create a client that uses Application * Default Credentials (ADC): * ``` * const storage = new Storage(); * ``` * * @example * Create a client with explicit * credentials: * ``` * const storage = new Storage({ projectId: * 'your-project-id', keyFilename: '/path/to/keyfile.json' * }); * ``` * * @example include:samples/quickstart.js * region_tag:storage_quickstart * Full quickstart example: */ var index_js_1 = __nccwpck_require__(11325); Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return index_js_1.ApiError; } })); var storage_js_1 = __nccwpck_require__(92848); Object.defineProperty(exports, "IdempotencyStrategy", ({ enumerable: true, get: function () { return storage_js_1.IdempotencyStrategy; } })); Object.defineProperty(exports, "RETRYABLE_ERR_FN_DEFAULT", ({ enumerable: true, get: function () { return storage_js_1.RETRYABLE_ERR_FN_DEFAULT; } })); Object.defineProperty(exports, "Storage", ({ enumerable: true, get: function () { return storage_js_1.Storage; } })); var bucket_js_1 = __nccwpck_require__(82887); Object.defineProperty(exports, "Bucket", ({ enumerable: true, get: function () { return bucket_js_1.Bucket; } })); __exportStar(__nccwpck_require__(34317), exports); var channel_js_1 = __nccwpck_require__(12658); Object.defineProperty(exports, "Channel", ({ enumerable: true, get: function () { return channel_js_1.Channel; } })); var file_js_1 = __nccwpck_require__(69975); Object.defineProperty(exports, "File", ({ enumerable: true, get: function () { return file_js_1.File; } })); __exportStar(__nccwpck_require__(14873), exports); var hmacKey_js_1 = __nccwpck_require__(45891); Object.defineProperty(exports, "HmacKey", ({ enumerable: true, get: function () { return hmacKey_js_1.HmacKey; } })); var iam_js_1 = __nccwpck_require__(52880); Object.defineProperty(exports, "Iam", ({ enumerable: true, get: function () { return iam_js_1.Iam; } })); var notification_js_1 = __nccwpck_require__(18598); Object.defineProperty(exports, "Notification", ({ enumerable: true, get: function () { return notification_js_1.Notification; } })); __exportStar(__nccwpck_require__(30004), exports); /***/ }), /***/ 11325: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.util = exports.ApiError = exports.ServiceObject = exports.Service = void 0; var service_js_1 = __nccwpck_require__(8542); Object.defineProperty(exports, "Service", ({ enumerable: true, get: function () { return service_js_1.Service; } })); var service_object_js_1 = __nccwpck_require__(72908); Object.defineProperty(exports, "ServiceObject", ({ enumerable: true, get: function () { return service_object_js_1.ServiceObject; } })); var util_js_1 = __nccwpck_require__(37325); Object.defineProperty(exports, "ApiError", ({ enumerable: true, get: function () { return util_js_1.ApiError; } })); Object.defineProperty(exports, "util", ({ enumerable: true, get: function () { return util_js_1.util; } })); /***/ }), /***/ 72908: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.ServiceObject = void 0; /*! * Copyright 2022 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const promisify_1 = __nccwpck_require__(60206); const events_1 = __nccwpck_require__(24434); const util_js_1 = __nccwpck_require__(37325); /** * ServiceObject is a base class, meant to be inherited from by a "service * object," like a BigQuery dataset or Storage bucket. * * Most of the time, these objects share common functionality; they can be * created or deleted, and you can get or set their metadata. * * By inheriting from this class, a service object will be extended with these * shared behaviors. Note that any method can be overridden when the service * object requires specific behavior. */ // eslint-disable-next-line @typescript-eslint/no-explicit-any class ServiceObject extends events_1.EventEmitter { /* * @constructor * @alias module:common/service-object * * @private * * @param {object} config - Configuration object. * @param {string} config.baseUrl - The base URL to make API requests to. * @param {string} config.createMethod - The method which creates this object. * @param {string=} config.id - The identifier of the object. For example, the * name of a Storage bucket or Pub/Sub topic. * @param {object=} config.methods - A map of each method name that should be inherited. * @param {object} config.methods[].reqOpts - Default request options for this * particular method. A common use case is when `setMetadata` requires a * `PUT` method to override the default `PATCH`. * @param {object} config.parent - The parent service instance. For example, an * instance of Storage if the object is Bucket. */ constructor(config) { super(); this.metadata = {}; this.baseUrl = config.baseUrl; this.parent = config.parent; // Parent class. this.id = config.id; // Name or ID (e.g. dataset ID, bucket name, etc). this.createMethod = config.createMethod; this.methods = config.methods || {}; this.interceptors = []; this.projectId = config.projectId; if (config.methods) { // This filters the ServiceObject instance (e.g. a "File") to only have // the configured methods. We make a couple of exceptions for core- // functionality ("request()" and "getRequestInterceptors()") Object.getOwnPropertyNames(ServiceObject.prototype) .filter(methodName => { return ( // All ServiceObjects need `request` and `getRequestInterceptors`. // clang-format off !/^request/.test(methodName) && !/^getRequestInterceptors/.test(methodName) && // clang-format on // The ServiceObject didn't redefine the method. // eslint-disable-next-line @typescript-eslint/no-explicit-any this[methodName] === // eslint-disable-next-line @typescript-eslint/no-explicit-any ServiceObject.prototype[methodName] && // This method isn't wanted. !config.methods[methodName]); }) .forEach(methodName => { // eslint-disable-next-line @typescript-eslint/no-explicit-any this[methodName] = undefined; }); } } create(optionsOrCallback, callback) { // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; const args = [this.id]; if (typeof optionsOrCallback === 'function') { callback = optionsOrCallback; } if (typeof optionsOrCallback === 'object') { args.push(optionsOrCallback); } // Wrap the callback to return *this* instance of the object, not the // newly-created one. // tslint: disable-next-line no-any function onCreate(...args) { const [err, instance] = args; if (!err) { self.metadata = instance.metadata; if (self.id && instance.metadata) { self.id = instance.metadata.id; } args[1] = self; // replace the created `instance` with this one. } callback(...args); } args.push(onCreate); // eslint-disable-next-line prefer-spread this.createMethod.apply(null, args); } delete(optionsOrCallback, cb) { var _a; const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); const ignoreNotFound = options.ignoreNotFound; delete options.ignoreNotFound; const methodConfig = (typeof this.methods.delete === 'object' && this.methods.delete) || {}; const reqOpts = { method: 'DELETE', uri: '', ...methodConfig.reqOpts, qs: { ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, ...options, }, }; // The `request` method may have been overridden to hold any special // behavior. Ensure we call the original `request` method. ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { if (err) { if (err.code === 404 && ignoreNotFound) { err = null; } } callback(err, res); }); } exists(optionsOrCallback, cb) { const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); this.get(options, err => { if (err) { if (err.code === 404) { callback(null, false); } else { callback(err); } return; } callback(null, true); }); } get(optionsOrCallback, cb) { // eslint-disable-next-line @typescript-eslint/no-this-alias const self = this; const [opts, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); const options = Object.assign({}, opts); const autoCreate = options.autoCreate && typeof this.create === 'function'; delete options.autoCreate; function onCreate(err, instance, apiResponse) { if (err) { if (err.code === 409) { self.get(options, callback); return; } callback(err, null, apiResponse); return; } callback(null, instance, apiResponse); } this.getMetadata(options, (err, metadata) => { if (err) { if (err.code === 404 && autoCreate) { const args = []; if (Object.keys(options).length > 0) { args.push(options); } args.push(onCreate); self.create(...args); return; } callback(err, null, metadata); return; } callback(null, self, metadata); }); } getMetadata(optionsOrCallback, cb) { var _a; const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); const methodConfig = (typeof this.methods.getMetadata === 'object' && this.methods.getMetadata) || {}; const reqOpts = { uri: '', ...methodConfig.reqOpts, qs: { ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.qs, ...options, }, }; // The `request` method may have been overridden to hold any special // behavior. Ensure we call the original `request` method. ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { this.metadata = body; callback(err, this.metadata, res); }); } /** * Return the user's custom request interceptors. */ getRequestInterceptors() { // Interceptors should be returned in the order they were assigned. const localInterceptors = this.interceptors .filter(interceptor => typeof interceptor.request === 'function') .map(interceptor => interceptor.request); return this.parent.getRequestInterceptors().concat(localInterceptors); } setMetadata(metadata, optionsOrCallback, cb) { var _a, _b; const [options, callback] = util_js_1.util.maybeOptionsOrCallback(optionsOrCallback, cb); const methodConfig = (typeof this.methods.setMetadata === 'object' && this.methods.setMetadata) || {}; const reqOpts = { method: 'PATCH', uri: '', ...methodConfig.reqOpts, json: { ...(_a = methodConfig.reqOpts) === null || _a === void 0 ? void 0 : _a.json, ...metadata, }, qs: { ...(_b = methodConfig.reqOpts) === null || _b === void 0 ? void 0 : _b.qs, ...options, }, }; // The `request` method may have been overridden to hold any special // behavior. Ensure we call the original `request` method. ServiceObject.prototype.request.call(this, reqOpts, (err, body, res) => { this.metadata = body; callback(err, this.metadata, res); }); } request_(reqOpts, callback) { reqOpts = { ...reqOpts }; if (this.projectId) { reqOpts.projectId = this.projectId; } const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; const uriComponents = [this.baseUrl, this.id || '', reqOpts.uri]; if (isAbsoluteUrl) { uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); } reqOpts.uri = uriComponents .filter(x => x.trim()) // Limit to non-empty strings. .map(uriComponent => { const trimSlashesRegex = /^\/*|\/*$/g; return uriComponent.replace(trimSlashesRegex, ''); }) .join('/'); const childInterceptors = Array.isArray(reqOpts.interceptors_) ? reqOpts.interceptors_ : []; const localInterceptors = [].slice.call(this.interceptors); reqOpts.interceptors_ = childInterceptors.concat(localInterceptors); if (reqOpts.shouldReturnStream) { return this.parent.requestStream(reqOpts); } this.parent.request(reqOpts, callback); } request(reqOpts, callback) { this.request_(reqOpts, callback); } /** * Make an authenticated API request. * * @param {object} reqOpts - Request options that are passed to `request`. * @param {string} reqOpts.uri - A URI relative to the baseUrl. */ requestStream(reqOpts) { const opts = { ...reqOpts, shouldReturnStream: true }; return this.request_(opts); } } exports.ServiceObject = ServiceObject; (0, promisify_1.promisifyAll)(ServiceObject, { exclude: ['getRequestInterceptors'] }); /***/ }), /***/ 8542: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Service = exports.DEFAULT_PROJECT_ID_TOKEN = void 0; /*! * Copyright 2022 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ const google_auth_library_1 = __nccwpck_require__(492); const uuid = __importStar(__nccwpck_require__(20607)); const util_js_1 = __nccwpck_require__(37325); const util_js_2 = __nccwpck_require__(74557); exports.DEFAULT_PROJECT_ID_TOKEN = '{{projectId}}'; class Service { /** * Service is a base class, meant to be inherited from by a "service," like * BigQuery or Storage. * * This handles making authenticated requests by exposing a `makeReq_` * function. * * @constructor * @alias module:common/service * * @param {object} config - Configuration object. * @param {string} config.baseUrl - The base URL to make API requests to. * @param {string[]} config.scopes - The scopes required for the request. * @param {object=} options - [Configuration object](#/docs). */ constructor(config, options = {}) { this.baseUrl = config.baseUrl; this.apiEndpoint = config.apiEndpoint; this.timeout = options.timeout; this.globalInterceptors = Array.isArray(options.interceptors_) ? options.interceptors_ : []; this.interceptors = []; this.packageJson = config.packageJson; this.projectId = options.projectId || exports.DEFAULT_PROJECT_ID_TOKEN; this.projectIdRequired = config.projectIdRequired !== false; this.providedUserAgent = options.userAgent; this.universeDomain = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; this.customEndpoint = config.customEndpoint || false; this.makeAuthenticatedRequest = util_js_1.util.makeAuthenticatedRequestFactory({ ...config, projectIdRequired: this.projectIdRequired, projectId: this.projectId, authClient: options.authClient || config.authClient, credentials: options.credentials, keyFile: options.keyFilename, email: options.email, clientOptions: { universeDomain: options.universeDomain, ...options.clientOptions, }, }); this.authClient = this.makeAuthenticatedRequest.authClient; const isCloudFunctionEnv = !!process.env.FUNCTION_NAME; if (isCloudFunctionEnv) { this.interceptors.push({ request(reqOpts) { reqOpts.forever = false; return reqOpts; }, }); } } /** * Return the user's custom request interceptors. */ getRequestInterceptors() { // Interceptors should be returned in the order they were assigned. return [].slice .call(this.globalInterceptors) .concat(this.interceptors) .filter(interceptor => typeof interceptor.request === 'function') .map(interceptor => interceptor.request); } getProjectId(callback) { if (!callback) { return this.getProjectIdAsync(); } this.getProjectIdAsync().then(p => callback(null, p), callback); } async getProjectIdAsync() { const projectId = await this.authClient.getProjectId(); if (this.projectId === exports.DEFAULT_PROJECT_ID_TOKEN && projectId) { this.projectId = projectId; } return this.projectId; } request_(reqOpts, callback) { reqOpts = { ...reqOpts, timeout: this.timeout }; const isAbsoluteUrl = reqOpts.uri.indexOf('http') === 0; const uriComponents = [this.baseUrl]; if (this.projectIdRequired) { if (reqOpts.projectId) { uriComponents.push('projects'); uriComponents.push(reqOpts.projectId); } else { uriComponents.push('projects'); uriComponents.push(this.projectId); } } uriComponents.push(reqOpts.uri); if (isAbsoluteUrl) { uriComponents.splice(0, uriComponents.indexOf(reqOpts.uri)); } reqOpts.uri = uriComponents .map(uriComponent => { const trimSlashesRegex = /^\/*|\/*$/g; return uriComponent.replace(trimSlashesRegex, ''); }) .join('/') // Some URIs have colon separators. // Bad: https://.../projects/:list // Good: https://.../projects:list .replace(/\/:/g, ':'); const requestInterceptors = this.getRequestInterceptors(); const interceptorArray = Array.isArray(reqOpts.interceptors_) ? reqOpts.interceptors_ : []; interceptorArray.forEach(interceptor => { if (typeof interceptor.request === 'function') { requestInterceptors.push(interceptor.request); } }); requestInterceptors.forEach(requestInterceptor => { reqOpts = requestInterceptor(reqOpts); }); delete reqOpts.interceptors_; const pkg = this.packageJson; let userAgent = (0, util_js_2.getUserAgentString)(); if (this.providedUserAgent) { userAgent = `${this.providedUserAgent} ${userAgent}`; } reqOpts.headers = { ...reqOpts.headers, 'User-Agent': userAgent, 'x-goog-api-client': `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${pkg.version}-${(0, util_js_2.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, }; if (reqOpts[util_js_1.GCCL_GCS_CMD_KEY]) { reqOpts.headers['x-goog-api-client'] += ` gccl-gcs-cmd/${reqOpts[util_js_1.GCCL_GCS_CMD_KEY]}`; } if (reqOpts.shouldReturnStream) { return this.makeAuthenticatedRequest(reqOpts); } else { this.makeAuthenticatedRequest(reqOpts, callback); } } /** * Make an authenticated API request. * * @param {object} reqOpts - Request options that are passed to `request`. * @param {string} reqOpts.uri - A URI relative to the baseUrl. * @param {function} callback - The callback function passed to `request`. */ request(reqOpts, callback) { Service.prototype.request_.call(this, reqOpts, callback); } /** * Make an authenticated API request. * * @param {object} reqOpts - Request options that are passed to `request`. * @param {string} reqOpts.uri - A URI relative to the baseUrl. */ requestStream(reqOpts) { const opts = { ...reqOpts, shouldReturnStream: true }; return Service.prototype.request_.call(this, opts); } } exports.Service = Service; /***/ }), /***/ 37325: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /*! * Copyright 2022 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.util = exports.Util = exports.PartialFailureError = exports.ApiError = exports.GCCL_GCS_CMD_KEY = void 0; /*! * @module common/util */ const projectify_1 = __nccwpck_require__(87635); const htmlEntities = __importStar(__nccwpck_require__(13660)); const google_auth_library_1 = __nccwpck_require__(492); const retry_request_1 = __importDefault(__nccwpck_require__(67842)); const stream_1 = __nccwpck_require__(2203); const teeny_request_1 = __nccwpck_require__(80321); const uuid = __importStar(__nccwpck_require__(20607)); const service_js_1 = __nccwpck_require__(8542); const util_js_1 = __nccwpck_require__(74557); const duplexify_1 = __importDefault(__nccwpck_require__(29112)); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore const package_json_helper_cjs_1 = __nccwpck_require__(41969); const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); /** * A unique symbol for providing a `gccl-gcs-cmd` value * for the `X-Goog-API-Client` header. * * E.g. the `V` in `X-Goog-API-Client: gccl-gcs-cmd/V` **/ exports.GCCL_GCS_CMD_KEY = Symbol.for('GCCL_GCS_CMD'); const requestDefaults = { timeout: 60000, gzip: true, forever: true, pool: { maxSockets: Infinity, }, }; /** * Default behavior: Automatically retry retriable server errors. * * @const {boolean} * @private */ const AUTO_RETRY_DEFAULT = true; /** * Default behavior: Only attempt to retry retriable errors 3 times. * * @const {number} * @private */ const MAX_RETRY_DEFAULT = 3; /** * Custom error type for API errors. * * @param {object} errorBody - Error object. */ class ApiError extends Error { constructor(errorBodyOrMessage) { super(); if (typeof errorBodyOrMessage !== 'object') { this.message = errorBodyOrMessage || ''; return; } const errorBody = errorBodyOrMessage; this.code = errorBody.code; this.errors = errorBody.errors; this.response = errorBody.response; try { this.errors = JSON.parse(this.response.body).error.errors; } catch (e) { this.errors = errorBody.errors; } this.message = ApiError.createMultiErrorMessage(errorBody, this.errors); Error.captureStackTrace(this); } /** * Pieces together an error message by combining all unique error messages * returned from a single GoogleError * * @private * * @param {GoogleErrorBody} err The original error. * @param {GoogleInnerError[]} [errors] Inner errors, if any. * @returns {string} */ static createMultiErrorMessage(err, errors) { const messages = new Set(); if (err.message) { messages.add(err.message); } if (errors && errors.length) { errors.forEach(({ message }) => messages.add(message)); } else if (err.response && err.response.body) { messages.add(htmlEntities.decode(err.response.body.toString())); } else if (!err.message) { messages.add('A failure occurred during this request.'); } let messageArr = Array.from(messages); if (messageArr.length > 1) { messageArr = messageArr.map((message, i) => ` ${i + 1}. ${message}`); messageArr.unshift('Multiple errors occurred during the request. Please see the `errors` array for complete details.\n'); messageArr.push('\n'); } return messageArr.join('\n'); } } exports.ApiError = ApiError; /** * Custom error type for partial errors returned from the API. * * @param {object} b - Error object. */ class PartialFailureError extends Error { constructor(b) { super(); const errorObject = b; this.errors = errorObject.errors; this.name = 'PartialFailureError'; this.response = errorObject.response; this.message = ApiError.createMultiErrorMessage(errorObject, this.errors); } } exports.PartialFailureError = PartialFailureError; class Util { constructor() { this.ApiError = ApiError; this.PartialFailureError = PartialFailureError; } /** * No op. * * @example * function doSomething(callback) { * callback = callback || noop; * } */ noop() { } /** * Uniformly process an API response. * * @param {*} err - Error value. * @param {*} resp - Response value. * @param {*} body - Body value. * @param {function} callback - The callback function. */ handleResp(err, resp, body, callback) { callback = callback || util.noop; const parsedResp = { err: err || null, ...(resp && util.parseHttpRespMessage(resp)), ...(body && util.parseHttpRespBody(body)), }; // Assign the parsed body to resp.body, even if { json: false } was passed // as a request option. // We assume that nobody uses the previously unparsed value of resp.body. if (!parsedResp.err && resp && typeof parsedResp.body === 'object') { parsedResp.resp.body = parsedResp.body; } if (parsedResp.err && resp) { parsedResp.err.response = resp; } callback(parsedResp.err, parsedResp.body, parsedResp.resp); } /** * Sniff an incoming HTTP response message for errors. * * @param {object} httpRespMessage - An incoming HTTP response message from `request`. * @return {object} parsedHttpRespMessage - The parsed response. * @param {?error} parsedHttpRespMessage.err - An error detected. * @param {object} parsedHttpRespMessage.resp - The original response object. */ parseHttpRespMessage(httpRespMessage) { const parsedHttpRespMessage = { resp: httpRespMessage, }; if (httpRespMessage.statusCode < 200 || httpRespMessage.statusCode > 299) { // Unknown error. Format according to ApiError standard. parsedHttpRespMessage.err = new ApiError({ errors: new Array(), code: httpRespMessage.statusCode, message: httpRespMessage.statusMessage, response: httpRespMessage, }); } return parsedHttpRespMessage; } /** * Parse the response body from an HTTP request. * * @param {object} body - The response body. * @return {object} parsedHttpRespMessage - The parsed response. * @param {?error} parsedHttpRespMessage.err - An error detected. * @param {object} parsedHttpRespMessage.body - The original body value provided * will try to be JSON.parse'd. If it's successful, the parsed value will * be returned here, otherwise the original value and an error will be returned. */ parseHttpRespBody(body) { const parsedHttpRespBody = { body, }; if (typeof body === 'string') { try { parsedHttpRespBody.body = JSON.parse(body); } catch (err) { parsedHttpRespBody.body = body; } } if (parsedHttpRespBody.body && parsedHttpRespBody.body.error) { // Error from JSON API. parsedHttpRespBody.err = new ApiError(parsedHttpRespBody.body.error); } return parsedHttpRespBody; } /** * Take a Duplexify stream, fetch an authenticated connection header, and * create an outgoing writable stream. * * @param {Duplexify} dup - Duplexify stream. * @param {object} options - Configuration object. * @param {module:common/connection} options.connection - A connection instance used to get a token with and send the request through. * @param {object} options.metadata - Metadata to send at the head of the request. * @param {object} options.request - Request object, in the format of a standard Node.js http.request() object. * @param {string=} options.request.method - Default: "POST". * @param {string=} options.request.qs.uploadType - Default: "multipart". * @param {string=} options.streamContentType - Default: "application/octet-stream". * @param {function} onComplete - Callback, executed after the writable Request stream has completed. */ makeWritableStream(dup, options, onComplete) { var _a; onComplete = onComplete || util.noop; const writeStream = new ProgressStream(); writeStream.on('progress', evt => dup.emit('progress', evt)); dup.setWritable(writeStream); const defaultReqOpts = { method: 'POST', qs: { uploadType: 'multipart', }, timeout: 0, maxRetries: 0, }; const metadata = options.metadata || {}; const reqOpts = { ...defaultReqOpts, ...options.request, qs: { ...defaultReqOpts.qs, ...(_a = options.request) === null || _a === void 0 ? void 0 : _a.qs, }, multipart: [ { 'Content-Type': 'application/json', body: JSON.stringify(metadata), }, { 'Content-Type': metadata.contentType || 'application/octet-stream', body: writeStream, }, ], }; options.makeAuthenticatedRequest(reqOpts, { onAuthenticated(err, authenticatedReqOpts) { if (err) { dup.destroy(err); return; } requestDefaults.headers = util._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); const request = teeny_request_1.teenyRequest.defaults(requestDefaults); request(authenticatedReqOpts, (err, resp, body) => { util.handleResp(err, resp, body, (err, data) => { if (err) { dup.destroy(err); return; } dup.emit('response', resp); onComplete(data); }); }); }, }); } /** * Returns true if the API request should be retried, given the error that was * given the first time the request was attempted. This is used for rate limit * related errors as well as intermittent server errors. * * @param {error} err - The API error to check if it is appropriate to retry. * @return {boolean} True if the API request should be retried, false otherwise. */ shouldRetryRequest(err) { if (err) { if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { return true; } if (err.errors) { for (const e of err.errors) { const reason = e.reason; if (reason === 'rateLimitExceeded') { return true; } if (reason === 'userRateLimitExceeded') { return true; } if (reason && reason.includes('EAI_AGAIN')) { return true; } } } } return false; } /** * Get a function for making authenticated requests. * * @param {object} config - Configuration object. * @param {boolean=} config.autoRetry - Automatically retry requests if the * response is related to rate limits or certain intermittent server * errors. We will exponentially backoff subsequent requests by default. * (default: true) * @param {object=} config.credentials - Credentials object. * @param {boolean=} config.customEndpoint - If true, just return the provided request options. Default: false. * @param {boolean=} config.useAuthWithCustomEndpoint - If true, will authenticate when using a custom endpoint. Default: false. * @param {string=} config.email - Account email address, required for PEM/P12 usage. * @param {number=} config.maxRetries - Maximum number of automatic retries attempted before returning the error. (default: 3) * @param {string=} config.keyFile - Path to a .json, .pem, or .p12 keyfile. * @param {array} config.scopes - Array of scopes required for the API. */ makeAuthenticatedRequestFactory(config) { const googleAutoAuthConfig = { ...config }; if (googleAutoAuthConfig.projectId === service_js_1.DEFAULT_PROJECT_ID_TOKEN) { delete googleAutoAuthConfig.projectId; } let authClient; if (googleAutoAuthConfig.authClient instanceof google_auth_library_1.GoogleAuth) { // Use an existing `GoogleAuth` authClient = googleAutoAuthConfig.authClient; } else { // Pass an `AuthClient` & `clientOptions` to `GoogleAuth`, if available authClient = new google_auth_library_1.GoogleAuth({ ...googleAutoAuthConfig, authClient: googleAutoAuthConfig.authClient, clientOptions: googleAutoAuthConfig.clientOptions, }); } function makeAuthenticatedRequest(reqOpts, optionsOrCallback) { let stream; let projectId; const reqConfig = { ...config }; let activeRequest_; if (!optionsOrCallback) { stream = (0, duplexify_1.default)(); reqConfig.stream = stream; } const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : undefined; const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : undefined; async function setProjectId() { projectId = await authClient.getProjectId(); } const onAuthenticated = async (err, authenticatedReqOpts) => { const authLibraryError = err; const autoAuthFailed = err && typeof err.message === 'string' && err.message.indexOf('Could not load the default credentials') > -1; if (autoAuthFailed) { // Even though authentication failed, the API might not actually // care. authenticatedReqOpts = reqOpts; } if (!err || autoAuthFailed) { try { // Try with existing `projectId` value authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); err = null; } catch (e) { if (e instanceof projectify_1.MissingProjectIdError) { // A `projectId` was required, but we don't have one. try { // Attempt to get the `projectId` await setProjectId(); authenticatedReqOpts = util.decorateRequest(authenticatedReqOpts, projectId); err = null; } catch (e) { // Re-use the "Could not load the default credentials error" if // auto auth failed. err = err || e; } } else { // Some other error unrelated to missing `projectId` err = err || e; } } } if (err) { if (stream) { stream.destroy(err); } else { const fn = options && options.onAuthenticated ? options.onAuthenticated : callback; fn(err); } return; } if (options && options.onAuthenticated) { options.onAuthenticated(null, authenticatedReqOpts); } else { activeRequest_ = util.makeRequest(authenticatedReqOpts, reqConfig, (apiResponseError, ...params) => { if (apiResponseError && apiResponseError.code === 401 && authLibraryError) { // Re-use the "Could not load the default credentials error" if // the API request failed due to missing credentials. apiResponseError = authLibraryError; } callback(apiResponseError, ...params); }); } }; const prepareRequest = async () => { try { const getProjectId = async () => { if (config.projectId && config.projectId !== service_js_1.DEFAULT_PROJECT_ID_TOKEN) { // The user provided a project ID. We don't need to check with the // auth client, it could be incorrect. return config.projectId; } if (config.projectIdRequired === false) { // A projectId is not required. Return the default. return service_js_1.DEFAULT_PROJECT_ID_TOKEN; } return setProjectId(); }; const authorizeRequest = async () => { if (reqConfig.customEndpoint && !reqConfig.useAuthWithCustomEndpoint) { // Using a custom API override. Do not use `google-auth-library` for // authentication. (ex: connecting to a local Datastore server) return reqOpts; } else { return authClient.authorizeRequest(reqOpts); } }; const [_projectId, authorizedReqOpts] = await Promise.all([ getProjectId(), authorizeRequest(), ]); if (_projectId) { projectId = _projectId; } return onAuthenticated(null, authorizedReqOpts); } catch (e) { return onAuthenticated(e); } }; prepareRequest(); if (stream) { return stream; } return { abort() { setImmediate(() => { if (activeRequest_) { activeRequest_.abort(); activeRequest_ = null; } }); }, }; } const mar = makeAuthenticatedRequest; mar.getCredentials = authClient.getCredentials.bind(authClient); mar.authClient = authClient; return mar; } /** * Make a request through the `retryRequest` module with built-in error * handling and exponential back off. * * @param {object} reqOpts - Request options in the format `request` expects. * @param {object=} config - Configuration object. * @param {boolean=} config.autoRetry - Automatically retry requests if the * response is related to rate limits or certain intermittent server * errors. We will exponentially backoff subsequent requests by default. * (default: true) * @param {number=} config.maxRetries - Maximum number of automatic retries * attempted before returning the error. (default: 3) * @param {object=} config.request - HTTP module for request calls. * @param {function} callback - The callback function. */ makeRequest(reqOpts, config, callback) { var _a, _b, _c, _d, _e; let autoRetryValue = AUTO_RETRY_DEFAULT; if (config.autoRetry !== undefined) { autoRetryValue = config.autoRetry; } else if (((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined) { autoRetryValue = config.retryOptions.autoRetry; } let maxRetryValue = MAX_RETRY_DEFAULT; if (config.maxRetries !== undefined) { maxRetryValue = config.maxRetries; } else if (((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.maxRetries) !== undefined) { maxRetryValue = config.retryOptions.maxRetries; } requestDefaults.headers = this._getDefaultHeaders(reqOpts[exports.GCCL_GCS_CMD_KEY]); const options = { request: teeny_request_1.teenyRequest.defaults(requestDefaults), retries: autoRetryValue !== false ? maxRetryValue : 0, noResponseRetries: autoRetryValue !== false ? maxRetryValue : 0, shouldRetryFn(httpRespMessage) { var _a, _b; const err = util.parseHttpRespMessage(httpRespMessage).err; if ((_a = config.retryOptions) === null || _a === void 0 ? void 0 : _a.retryableErrorFn) { return err && ((_b = config.retryOptions) === null || _b === void 0 ? void 0 : _b.retryableErrorFn(err)); } return err && util.shouldRetryRequest(err); }, maxRetryDelay: (_c = config.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetryDelay, retryDelayMultiplier: (_d = config.retryOptions) === null || _d === void 0 ? void 0 : _d.retryDelayMultiplier, totalTimeout: (_e = config.retryOptions) === null || _e === void 0 ? void 0 : _e.totalTimeout, }; if (typeof reqOpts.maxRetries === 'number') { options.retries = reqOpts.maxRetries; options.noResponseRetries = reqOpts.maxRetries; } if (!config.stream) { return (0, retry_request_1.default)(reqOpts, options, // eslint-disable-next-line @typescript-eslint/no-explicit-any (err, response, body) => { util.handleResp(err, response, body, callback); }); } const dup = config.stream; // eslint-disable-next-line @typescript-eslint/no-explicit-any let requestStream; const isGetRequest = (reqOpts.method || 'GET').toUpperCase() === 'GET'; if (isGetRequest) { requestStream = (0, retry_request_1.default)(reqOpts, options); dup.setReadable(requestStream); } else { // Streaming writable HTTP requests cannot be retried. requestStream = options.request(reqOpts); dup.setWritable(requestStream); } // Replay the Request events back to the stream. requestStream .on('error', dup.destroy.bind(dup)) .on('response', dup.emit.bind(dup, 'response')) .on('complete', dup.emit.bind(dup, 'complete')); dup.abort = requestStream.abort; return dup; } /** * Decorate the options about to be made in a request. * * @param {object} reqOpts - The options to be passed to `request`. * @param {string} projectId - The project ID. * @return {object} reqOpts - The decorated reqOpts. */ decorateRequest(reqOpts, projectId) { delete reqOpts.autoPaginate; delete reqOpts.autoPaginateVal; delete reqOpts.objectMode; if (reqOpts.qs !== null && typeof reqOpts.qs === 'object') { delete reqOpts.qs.autoPaginate; delete reqOpts.qs.autoPaginateVal; reqOpts.qs = (0, projectify_1.replaceProjectIdToken)(reqOpts.qs, projectId); } if (Array.isArray(reqOpts.multipart)) { reqOpts.multipart = reqOpts.multipart.map(part => { return (0, projectify_1.replaceProjectIdToken)(part, projectId); }); } if (reqOpts.json !== null && typeof reqOpts.json === 'object') { delete reqOpts.json.autoPaginate; delete reqOpts.json.autoPaginateVal; reqOpts.json = (0, projectify_1.replaceProjectIdToken)(reqOpts.json, projectId); } reqOpts.uri = (0, projectify_1.replaceProjectIdToken)(reqOpts.uri, projectId); return reqOpts; } // eslint-disable-next-line @typescript-eslint/no-explicit-any isCustomType(unknown, module) { function getConstructorName(obj) { return obj.constructor && obj.constructor.name.toLowerCase(); } const moduleNameParts = module.split('/'); const parentModuleName = moduleNameParts[0] && moduleNameParts[0].toLowerCase(); const subModuleName = moduleNameParts[1] && moduleNameParts[1].toLowerCase(); if (subModuleName && getConstructorName(unknown) !== subModuleName) { return false; } let walkingModule = unknown; // eslint-disable-next-line no-constant-condition while (true) { if (getConstructorName(walkingModule) === parentModuleName) { return true; } walkingModule = walkingModule.parent; if (!walkingModule) { return false; } } } /** * Given two parameters, figure out if this is either: * - Just a callback function * - An options object, and then a callback function * @param optionsOrCallback An options object or callback. * @param cb A potentially undefined callback. */ maybeOptionsOrCallback(optionsOrCallback, cb) { return typeof optionsOrCallback === 'function' ? [{}, optionsOrCallback] : [optionsOrCallback, cb]; } _getDefaultHeaders(gcclGcsCmd) { const headers = { 'User-Agent': (0, util_js_1.getUserAgentString)(), 'x-goog-api-client': `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${uuid.v4()}`, }; if (gcclGcsCmd) { headers['x-goog-api-client'] += ` gccl-gcs-cmd/${gcclGcsCmd}`; } return headers; } } exports.Util = Util; /** * Basic Passthrough Stream that records the number of bytes read * every time the cursor is moved. */ class ProgressStream extends stream_1.Transform { constructor() { super(...arguments); this.bytesRead = 0; } // eslint-disable-next-line @typescript-eslint/no-explicit-any _transform(chunk, encoding, callback) { this.bytesRead += chunk.length; this.emit('progress', { bytesWritten: this.bytesRead, contentLength: '*' }); this.push(chunk); callback(); } } const util = new Util(); exports.util = util; /***/ }), /***/ 18598: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Notification = void 0; const index_js_1 = __nccwpck_require__(11325); const promisify_1 = __nccwpck_require__(60206); /** * The API-formatted resource description of the notification. * * Note: This is not guaranteed to be up-to-date when accessed. To get the * latest record, call the `getMetadata()` method. * * @name Notification#metadata * @type {object} */ /** * A Notification object is created from your {@link Bucket} object using * {@link Bucket#notification}. Use it to interact with Cloud Pub/Sub * notifications. * * See {@link https://cloud.google.com/storage/docs/pubsub-notifications| Cloud Pub/Sub Notifications for Google Cloud Storage} * * @class * @hideconstructor * * @param {Bucket} bucket The bucket instance this notification is attached to. * @param {string} id The ID of the notification. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * * const notification = myBucket.notification('1'); * ``` */ class Notification extends index_js_1.ServiceObject { constructor(bucket, id) { const requestQueryObject = {}; const methods = { /** * Creates a notification subscription for the bucket. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/insert| Notifications: insert} * @method Notification#create * * @param {Topic|string} topic The Cloud PubSub topic to which this * subscription publishes. If the project ID is omitted, the current * project ID will be used. * * Acceptable formats are: * - `projects/grape-spaceship-123/topics/my-topic` * * - `my-topic` * @param {CreateNotificationRequest} [options] Metadata to set for * the notification. * @param {CreateNotificationCallback} [callback] Callback function. * @returns {Promise} * @throws {Error} If a valid topic is not provided. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * const notification = myBucket.notification('1'); * * notification.create(function(err, notification, apiResponse) { * if (!err) { * // The notification was created successfully. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * notification.create().then(function(data) { * const notification = data[0]; * const apiResponse = data[1]; * }); * ``` */ create: true, /** * @typedef {array} DeleteNotificationResponse * @property {object} 0 The full API response. */ /** * Permanently deletes a notification subscription. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/delete| Notifications: delete API Documentation} * * @param {object} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {DeleteNotificationCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * const notification = myBucket.notification('1'); * * notification.delete(function(err, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * notification.delete().then(function(data) { * const apiResponse = data[0]; * }); * * ``` * @example include:samples/deleteNotification.js * region_tag:storage_delete_bucket_notification * Another example: */ delete: { reqOpts: { qs: requestQueryObject, }, }, /** * Get a notification and its metadata if it exists. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} * * @param {object} [options] Configuration options. * See {@link Bucket#createNotification} for create options. * @param {boolean} [options.autoCreate] Automatically create the object if * it does not exist. Default: `false`. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetNotificationCallback} [callback] Callback function. * @return {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * const notification = myBucket.notification('1'); * * notification.get(function(err, notification, apiResponse) { * // `notification.metadata` has been populated. * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * notification.get().then(function(data) { * const notification = data[0]; * const apiResponse = data[1]; * }); * ``` */ get: { reqOpts: { qs: requestQueryObject, }, }, /** * Get the notification's metadata. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/notifications/get| Notifications: get API Documentation} * * @param {object} [options] Configuration options. * @param {string} [options.userProject] The ID of the project which will be * billed for the request. * @param {GetNotificationMetadataCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * const notification = myBucket.notification('1'); * * notification.getMetadata(function(err, metadata, apiResponse) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * notification.getMetadata().then(function(data) { * const metadata = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/getMetadataNotifications.js * region_tag:storage_print_pubsub_bucket_notification * Another example: */ getMetadata: { reqOpts: { qs: requestQueryObject, }, }, /** * @typedef {array} NotificationExistsResponse * @property {boolean} 0 Whether the notification exists or not. */ /** * @callback NotificationExistsCallback * @param {?Error} err Request error, if any. * @param {boolean} exists Whether the notification exists or not. */ /** * Check if the notification exists. * * @method Notification#exists * @param {NotificationExistsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const myBucket = storage.bucket('my-bucket'); * const notification = myBucket.notification('1'); * * notification.exists(function(err, exists) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * notification.exists().then(function(data) { * const exists = data[0]; * }); * ``` */ exists: true, }; super({ parent: bucket, baseUrl: '/notificationConfigs', id: id.toString(), createMethod: bucket.createNotification.bind(bucket), methods, }); } } exports.Notification = Notification; /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Notification); /***/ }), /***/ 88043: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2022 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) { if (kind === "m") throw new TypeError("Private method is not writable"); if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a setter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot write private member to an object whose class did not declare it"); return (kind === "a" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value; }; var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var _Upload_instances, _Upload_gcclGcsCmd, _Upload_resetLocalBuffersCache, _Upload_addLocalBufferCache; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Upload = exports.PROTOCOL_REGEX = void 0; exports.upload = upload; exports.createURI = createURI; exports.checkUploadStatus = checkUploadStatus; const abort_controller_1 = __importDefault(__nccwpck_require__(17413)); const crypto_1 = __nccwpck_require__(76982); const gaxios = __importStar(__nccwpck_require__(97003)); const google_auth_library_1 = __nccwpck_require__(492); const stream_1 = __nccwpck_require__(2203); const async_retry_1 = __importDefault(__nccwpck_require__(45195)); const uuid = __importStar(__nccwpck_require__(20607)); const util_js_1 = __nccwpck_require__(74557); const util_js_2 = __nccwpck_require__(37325); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore const package_json_helper_cjs_1 = __nccwpck_require__(41969); const NOT_FOUND_STATUS_CODE = 404; const RESUMABLE_INCOMPLETE_STATUS_CODE = 308; const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); exports.PROTOCOL_REGEX = /^(\w*):\/\//; class Upload extends stream_1.Writable { constructor(cfg) { var _a; super(cfg); _Upload_instances.add(this); this.numBytesWritten = 0; this.numRetries = 0; this.currentInvocationId = { checkUploadStatus: uuid.v4(), chunk: uuid.v4(), uri: uuid.v4(), }; /** * A cache of buffers written to this instance, ready for consuming */ this.writeBuffers = []; this.numChunksReadInRequest = 0; /** * An array of buffers used for caching the most recent upload chunk. * We should not assume that the server received all bytes sent in the request. * - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload */ this.localWriteCache = []; this.localWriteCacheByteLength = 0; this.upstreamEnded = false; _Upload_gcclGcsCmd.set(this, void 0); cfg = cfg || {}; if (!cfg.bucket || !cfg.file) { throw new Error('A bucket and file name are required'); } if (cfg.offset && !cfg.uri) { throw new RangeError('Cannot provide an `offset` without providing a `uri`'); } if (cfg.isPartialUpload && !cfg.chunkSize) { throw new RangeError('Cannot set `isPartialUpload` without providing a `chunkSize`'); } cfg.authConfig = cfg.authConfig || {}; cfg.authConfig.scopes = [ 'https://www.googleapis.com/auth/devstorage.full_control', ]; this.authClient = cfg.authClient || new google_auth_library_1.GoogleAuth(cfg.authConfig); const universe = cfg.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; this.apiEndpoint = `https://storage.${universe}`; if (cfg.apiEndpoint && cfg.apiEndpoint !== this.apiEndpoint) { this.apiEndpoint = this.sanitizeEndpoint(cfg.apiEndpoint); const hostname = new URL(this.apiEndpoint).hostname; // check if it is a domain of a known universe const isDomain = hostname === universe; const isDefaultUniverseDomain = hostname === google_auth_library_1.DEFAULT_UNIVERSE; // check if it is a subdomain of a known universe // by checking a last (universe's length + 1) of a hostname const isSubDomainOfUniverse = hostname.slice(-(universe.length + 1)) === `.${universe}`; const isSubDomainOfDefaultUniverse = hostname.slice(-(google_auth_library_1.DEFAULT_UNIVERSE.length + 1)) === `.${google_auth_library_1.DEFAULT_UNIVERSE}`; if (!isDomain && !isDefaultUniverseDomain && !isSubDomainOfUniverse && !isSubDomainOfDefaultUniverse) { // a custom, non-universe domain, // use gaxios this.authClient = gaxios; } } this.baseURI = `${this.apiEndpoint}/upload/storage/v1/b`; this.bucket = cfg.bucket; const cacheKeyElements = [cfg.bucket, cfg.file]; if (typeof cfg.generation === 'number') { cacheKeyElements.push(`${cfg.generation}`); } this.cacheKey = cacheKeyElements.join('/'); this.customRequestOptions = cfg.customRequestOptions || {}; this.file = cfg.file; this.generation = cfg.generation; this.kmsKeyName = cfg.kmsKeyName; this.metadata = cfg.metadata || {}; this.offset = cfg.offset; this.origin = cfg.origin; this.params = cfg.params || {}; this.userProject = cfg.userProject; this.chunkSize = cfg.chunkSize; this.retryOptions = cfg.retryOptions; this.isPartialUpload = (_a = cfg.isPartialUpload) !== null && _a !== void 0 ? _a : false; if (cfg.key) { if (typeof cfg.key === 'string') { const base64Key = Buffer.from(cfg.key).toString('base64'); this.encryption = { key: base64Key, hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), }; } else { const base64Key = cfg.key.toString('base64'); this.encryption = { key: base64Key, hash: (0, crypto_1.createHash)('sha256').update(cfg.key).digest('base64'), }; } } this.predefinedAcl = cfg.predefinedAcl; if (cfg.private) this.predefinedAcl = 'private'; if (cfg.public) this.predefinedAcl = 'publicRead'; const autoRetry = cfg.retryOptions.autoRetry; this.uriProvidedManually = !!cfg.uri; this.uri = cfg.uri; if (this.offset) { // we're resuming an incomplete upload this.numBytesWritten = this.offset; } this.numRetries = 0; // counter for number of retries currently executed if (!autoRetry) { cfg.retryOptions.maxRetries = 0; } this.timeOfFirstRequest = Date.now(); const contentLength = cfg.metadata ? Number(cfg.metadata.contentLength) : NaN; this.contentLength = isNaN(contentLength) ? '*' : contentLength; __classPrivateFieldSet(this, _Upload_gcclGcsCmd, cfg[util_js_2.GCCL_GCS_CMD_KEY], "f"); this.once('writing', () => { if (this.uri) { this.continueUploading(); } else { this.createURI(err => { if (err) { return this.destroy(err); } this.startUploading(); return; }); } }); } /** * Prevent 'finish' event until the upload has succeeded. * * @param fireFinishEvent The finish callback */ _final(fireFinishEvent = () => { }) { this.upstreamEnded = true; this.once('uploadFinished', fireFinishEvent); process.nextTick(() => { this.emit('upstreamFinished'); // it's possible `_write` may not be called - namely for empty object uploads this.emit('writing'); }); } /** * Handles incoming data from upstream * * @param chunk The chunk to append to the buffer * @param encoding The encoding of the chunk * @param readCallback A callback for when the buffer has been read downstream */ _write(chunk, encoding, readCallback = () => { }) { // Backwards-compatible event this.emit('writing'); this.writeBuffers.push(typeof chunk === 'string' ? Buffer.from(chunk, encoding) : chunk); this.once('readFromChunkBuffer', readCallback); process.nextTick(() => this.emit('wroteToChunkBuffer')); } /** * Prepends the local buffer to write buffer and resets it. * * @param keepLastBytes number of bytes to keep from the end of the local buffer. */ prependLocalBufferToUpstream(keepLastBytes) { // Typically, the upstream write buffers should be smaller than the local // cache, so we can save time by setting the local cache as the new // upstream write buffer array and appending the old array to it let initialBuffers = []; if (keepLastBytes) { // we only want the last X bytes let bytesKept = 0; while (keepLastBytes > bytesKept) { // load backwards because we want the last X bytes // note: `localWriteCacheByteLength` is reset below let buf = this.localWriteCache.pop(); if (!buf) break; bytesKept += buf.byteLength; if (bytesKept > keepLastBytes) { // we have gone over the amount desired, let's keep the last X bytes // of this buffer const diff = bytesKept - keepLastBytes; buf = buf.subarray(diff); bytesKept -= diff; } initialBuffers.unshift(buf); } } else { // we're keeping all of the local cache, simply use it as the initial buffer initialBuffers = this.localWriteCache; } // Append the old upstream to the new const append = this.writeBuffers; this.writeBuffers = initialBuffers; for (const buf of append) { this.writeBuffers.push(buf); } // reset last buffers sent __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); } /** * Retrieves data from upstream's buffer. * * @param limit The maximum amount to return from the buffer. */ *pullFromChunkBuffer(limit) { while (limit) { const buf = this.writeBuffers.shift(); if (!buf) break; let bufToYield = buf; if (buf.byteLength > limit) { bufToYield = buf.subarray(0, limit); this.writeBuffers.unshift(buf.subarray(limit)); limit = 0; } else { limit -= buf.byteLength; } yield bufToYield; // Notify upstream we've read from the buffer and we're able to consume // more. It can also potentially send more data down as we're currently // iterating. this.emit('readFromChunkBuffer'); } } /** * A handler for determining if data is ready to be read from upstream. * * @returns If there will be more chunks to read in the future */ async waitForNextChunk() { const willBeMoreChunks = await new Promise(resolve => { // There's data available - it should be digested if (this.writeBuffers.length) { return resolve(true); } // The upstream writable ended, we shouldn't expect any more data. if (this.upstreamEnded) { return resolve(false); } // Nothing immediate seems to be determined. We need to prepare some // listeners to determine next steps... const wroteToChunkBufferCallback = () => { removeListeners(); return resolve(true); }; const upstreamFinishedCallback = () => { removeListeners(); // this should be the last chunk, if there's anything there if (this.writeBuffers.length) return resolve(true); return resolve(false); }; // Remove listeners when we're ready to callback. const removeListeners = () => { this.removeListener('wroteToChunkBuffer', wroteToChunkBufferCallback); this.removeListener('upstreamFinished', upstreamFinishedCallback); }; // If there's data recently written it should be digested this.once('wroteToChunkBuffer', wroteToChunkBufferCallback); // If the upstream finishes let's see if there's anything to grab this.once('upstreamFinished', upstreamFinishedCallback); }); return willBeMoreChunks; } /** * Reads data from upstream up to the provided `limit`. * Ends when the limit has reached or no data is expected to be pushed from upstream. * * @param limit The most amount of data this iterator should return. `Infinity` by default. */ async *upstreamIterator(limit = Infinity) { // read from upstream chunk buffer while (limit && (await this.waitForNextChunk())) { // read until end or limit has been reached for (const chunk of this.pullFromChunkBuffer(limit)) { limit -= chunk.byteLength; yield chunk; } } } createURI(callback) { if (!callback) { return this.createURIAsync(); } this.createURIAsync().then(r => callback(null, r), callback); } async createURIAsync() { const metadata = { ...this.metadata }; const headers = {}; // Delete content length and content type from metadata if they exist. // These are headers and should not be sent as part of the metadata. if (metadata.contentLength) { headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); delete metadata.contentLength; } if (metadata.contentType) { headers['X-Upload-Content-Type'] = metadata.contentType; delete metadata.contentType; } let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.uri}`; if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; } // Check if headers already exist before creating new ones const reqOpts = { method: 'POST', url: [this.baseURI, this.bucket, 'o'].join('/'), params: Object.assign({ name: this.file, uploadType: 'resumable', }, this.params), data: metadata, headers: { 'User-Agent': (0, util_js_1.getUserAgentString)(), 'x-goog-api-client': googAPIClient, ...headers, }, }; if (metadata.contentLength) { reqOpts.headers['X-Upload-Content-Length'] = metadata.contentLength.toString(); } if (metadata.contentType) { reqOpts.headers['X-Upload-Content-Type'] = metadata.contentType; } if (typeof this.generation !== 'undefined') { reqOpts.params.ifGenerationMatch = this.generation; } if (this.kmsKeyName) { reqOpts.params.kmsKeyName = this.kmsKeyName; } if (this.predefinedAcl) { reqOpts.params.predefinedAcl = this.predefinedAcl; } if (this.origin) { reqOpts.headers.Origin = this.origin; } const uri = await (0, async_retry_1.default)(async (bail) => { var _a, _b, _c; try { const res = await this.makeRequest(reqOpts); // We have successfully got a URI we can now create a new invocation id this.currentInvocationId.uri = uuid.v4(); return res.headers.location; } catch (err) { const e = err; const apiError = { code: (_a = e.response) === null || _a === void 0 ? void 0 : _a.status, name: (_b = e.response) === null || _b === void 0 ? void 0 : _b.statusText, message: (_c = e.response) === null || _c === void 0 ? void 0 : _c.statusText, errors: [ { reason: e.code, }, ], }; if (this.retryOptions.maxRetries > 0 && this.retryOptions.retryableErrorFn(apiError)) { throw e; } else { return bail(e); } } }, { retries: this.retryOptions.maxRetries, factor: this.retryOptions.retryDelayMultiplier, maxTimeout: this.retryOptions.maxRetryDelay * 1000, //convert to milliseconds maxRetryTime: this.retryOptions.totalTimeout * 1000, //convert to milliseconds }); this.uri = uri; this.offset = 0; // emit the newly generated URI for future reuse, if necessary. this.emit('uri', uri); return uri; } async continueUploading() { var _a; (_a = this.offset) !== null && _a !== void 0 ? _a : (await this.getAndSetOffset()); return this.startUploading(); } async startUploading() { const multiChunkMode = !!this.chunkSize; let responseReceived = false; this.numChunksReadInRequest = 0; if (!this.offset) { this.offset = 0; } // Check if the offset (server) is too far behind the current stream if (this.offset < this.numBytesWritten) { const delta = this.numBytesWritten - this.offset; const message = `The offset is lower than the number of bytes written. The server has ${this.offset} bytes and while ${this.numBytesWritten} bytes has been uploaded - thus ${delta} bytes are missing. Stopping as this could result in data loss. Initiate a new upload to continue.`; this.emit('error', new RangeError(message)); return; } // Check if we should 'fast-forward' to the relevant data to upload if (this.numBytesWritten < this.offset) { // 'fast-forward' to the byte where we need to upload. // only push data from the byte after the one we left off on const fastForwardBytes = this.offset - this.numBytesWritten; for await (const _chunk of this.upstreamIterator(fastForwardBytes)) { _chunk; // discard the data up until the point we want } this.numBytesWritten = this.offset; } let expectedUploadSize = undefined; // Set `expectedUploadSize` to `contentLength - this.numBytesWritten`, if available if (typeof this.contentLength === 'number') { expectedUploadSize = this.contentLength - this.numBytesWritten; } // `expectedUploadSize` should be no more than the `chunkSize`. // It's possible this is the last chunk request for a multiple // chunk upload, thus smaller than the chunk size. if (this.chunkSize) { expectedUploadSize = expectedUploadSize ? Math.min(this.chunkSize, expectedUploadSize) : this.chunkSize; } // A queue for the upstream data const upstreamQueue = this.upstreamIterator(expectedUploadSize); // The primary read stream for this request. This stream retrieves no more // than the exact requested amount from upstream. const requestStream = new stream_1.Readable({ read: async () => { // Don't attempt to retrieve data upstream if we already have a response if (responseReceived) requestStream.push(null); const result = await upstreamQueue.next(); if (result.value) { this.numChunksReadInRequest++; if (multiChunkMode) { // save ever buffer used in the request in multi-chunk mode __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); } else { __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, result.value); } this.numBytesWritten += result.value.byteLength; this.emit('progress', { bytesWritten: this.numBytesWritten, contentLength: this.contentLength, }); requestStream.push(result.value); } if (result.done) { requestStream.push(null); } }, }); let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.chunk}`; if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; } const headers = { 'User-Agent': (0, util_js_1.getUserAgentString)(), 'x-goog-api-client': googAPIClient, }; // If using multiple chunk upload, set appropriate header if (multiChunkMode) { // We need to know how much data is available upstream to set the `Content-Range` header. // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload for await (const chunk of this.upstreamIterator(expectedUploadSize)) { // This will conveniently track and keep the size of the buffers. // We will reach either the expected upload size or the remainder of the stream. __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_addLocalBufferCache).call(this, chunk); } // This is the sum from the `#addLocalBufferCache` calls const bytesToUpload = this.localWriteCacheByteLength; // Important: we want to know if the upstream has ended and the queue is empty before // unshifting data back into the queue. This way we will know if this is the last request or not. const isLastChunkOfUpload = !(await this.waitForNextChunk()); // Important: put the data back in the queue for the actual upload this.prependLocalBufferToUpstream(); let totalObjectSize = this.contentLength; if (typeof this.contentLength !== 'number' && isLastChunkOfUpload && !this.isPartialUpload) { // Let's let the server know this is the last chunk of the object since we didn't set it before. totalObjectSize = bytesToUpload + this.numBytesWritten; } // `- 1` as the ending byte is inclusive in the request. const endingByte = bytesToUpload + this.numBytesWritten - 1; // `Content-Length` for multiple chunk uploads is the size of the chunk, // not the overall object headers['Content-Length'] = bytesToUpload; headers['Content-Range'] = `bytes ${this.offset}-${endingByte}/${totalObjectSize}`; } else { headers['Content-Range'] = `bytes ${this.offset}-*/${this.contentLength}`; } const reqOpts = { method: 'PUT', url: this.uri, headers, body: requestStream, }; try { const resp = await this.makeRequestStream(reqOpts); if (resp) { responseReceived = true; await this.responseHandler(resp); } } catch (e) { const err = e; if (this.retryOptions.retryableErrorFn(err)) { this.attemptDelayedRetry({ status: NaN, data: err, }); return; } this.destroy(err); } } // Process the API response to look for errors that came in // the response body. async responseHandler(resp) { if (resp.data.error) { this.destroy(resp.data.error); return; } // At this point we can safely create a new id for the chunk this.currentInvocationId.chunk = uuid.v4(); const moreDataToUpload = await this.waitForNextChunk(); const shouldContinueWithNextMultiChunkRequest = this.chunkSize && resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && resp.headers.range && moreDataToUpload; /** * This is true when we're expecting to upload more data in a future request, * yet the upstream for the upload session has been exhausted. */ const shouldContinueUploadInAnotherRequest = this.isPartialUpload && resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE && !moreDataToUpload; if (shouldContinueWithNextMultiChunkRequest) { // Use the upper value in this header to determine where to start the next chunk. // We should not assume that the server received all bytes sent in the request. // https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload const range = resp.headers.range; this.offset = Number(range.split('-')[1]) + 1; // We should not assume that the server received all bytes sent in the request. // - https://cloud.google.com/storage/docs/performing-resumable-uploads#chunked-upload const missingBytes = this.numBytesWritten - this.offset; if (missingBytes) { // As multi-chunk uploads send one chunk per request and pulls one // chunk into the pipeline, prepending the missing bytes back should // be fine for the next request. this.prependLocalBufferToUpstream(missingBytes); this.numBytesWritten -= missingBytes; } else { // No bytes missing - no need to keep the local cache __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); } // continue uploading next chunk this.continueUploading(); } else if (!this.isSuccessfulResponse(resp.status) && !shouldContinueUploadInAnotherRequest) { const err = new Error('Upload failed'); err.code = resp.status; err.name = 'Upload failed'; if (resp === null || resp === void 0 ? void 0 : resp.data) { err.errors = [resp === null || resp === void 0 ? void 0 : resp.data]; } this.destroy(err); } else { // no need to keep the cache __classPrivateFieldGet(this, _Upload_instances, "m", _Upload_resetLocalBuffersCache).call(this); if (resp && resp.data) { resp.data.size = Number(resp.data.size); } this.emit('metadata', resp.data); // Allow the object (Upload) to continue naturally so the user's // "finish" event fires. this.emit('uploadFinished'); } } /** * Check the status of an existing resumable upload. * * @param cfg A configuration to use. `uri` is required. * @returns the current upload status */ async checkUploadStatus(config = {}) { let googAPIClient = `${(0, util_js_1.getRuntimeTrackingString)()} gccl/${packageJson.version}-${(0, util_js_1.getModuleFormat)()} gccl-invocation-id/${this.currentInvocationId.checkUploadStatus}`; if (__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")) { googAPIClient += ` gccl-gcs-cmd/${__classPrivateFieldGet(this, _Upload_gcclGcsCmd, "f")}`; } const opts = { method: 'PUT', url: this.uri, headers: { 'Content-Length': 0, 'Content-Range': 'bytes */*', 'User-Agent': (0, util_js_1.getUserAgentString)(), 'x-goog-api-client': googAPIClient, }, }; try { const resp = await this.makeRequest(opts); // Successfully got the offset we can now create a new offset invocation id this.currentInvocationId.checkUploadStatus = uuid.v4(); return resp; } catch (e) { if (config.retry === false || !(e instanceof Error) || !this.retryOptions.retryableErrorFn(e)) { throw e; } const retryDelay = this.getRetryDelay(); if (retryDelay <= 0) { throw e; } await new Promise(res => setTimeout(res, retryDelay)); return this.checkUploadStatus(config); } } async getAndSetOffset() { try { // we want to handle retries in this method. const resp = await this.checkUploadStatus({ retry: false }); if (resp.status === RESUMABLE_INCOMPLETE_STATUS_CODE) { if (typeof resp.headers.range === 'string') { this.offset = Number(resp.headers.range.split('-')[1]) + 1; return; } } this.offset = 0; } catch (e) { const err = e; if (this.retryOptions.retryableErrorFn(err)) { this.attemptDelayedRetry({ status: NaN, data: err, }); return; } this.destroy(err); } } async makeRequest(reqOpts) { if (this.encryption) { reqOpts.headers = reqOpts.headers || {}; reqOpts.headers['x-goog-encryption-algorithm'] = 'AES256'; reqOpts.headers['x-goog-encryption-key'] = this.encryption.key.toString(); reqOpts.headers['x-goog-encryption-key-sha256'] = this.encryption.hash.toString(); } if (this.userProject) { reqOpts.params = reqOpts.params || {}; reqOpts.params.userProject = this.userProject; } // Let gaxios know we will handle a 308 error code ourselves. reqOpts.validateStatus = (status) => { return (this.isSuccessfulResponse(status) || status === RESUMABLE_INCOMPLETE_STATUS_CODE); }; const combinedReqOpts = { ...this.customRequestOptions, ...reqOpts, headers: { ...this.customRequestOptions.headers, ...reqOpts.headers, }, }; const res = await this.authClient.request(combinedReqOpts); if (res.data && res.data.error) { throw res.data.error; } return res; } async makeRequestStream(reqOpts) { const controller = new abort_controller_1.default(); const errorCallback = () => controller.abort(); this.once('error', errorCallback); if (this.userProject) { reqOpts.params = reqOpts.params || {}; reqOpts.params.userProject = this.userProject; } reqOpts.signal = controller.signal; reqOpts.validateStatus = () => true; const combinedReqOpts = { ...this.customRequestOptions, ...reqOpts, headers: { ...this.customRequestOptions.headers, ...reqOpts.headers, }, }; const res = await this.authClient.request(combinedReqOpts); const successfulRequest = this.onResponse(res); this.removeListener('error', errorCallback); return successfulRequest ? res : null; } /** * @return {bool} is the request good? */ onResponse(resp) { if (resp.status !== 200 && this.retryOptions.retryableErrorFn({ code: resp.status, message: resp.statusText, name: resp.statusText, })) { this.attemptDelayedRetry(resp); return false; } this.emit('response', resp); return true; } /** * @param resp GaxiosResponse object from previous attempt */ attemptDelayedRetry(resp) { if (this.numRetries < this.retryOptions.maxRetries) { if (resp.status === NOT_FOUND_STATUS_CODE && this.numChunksReadInRequest === 0) { this.startUploading(); } else { const retryDelay = this.getRetryDelay(); if (retryDelay <= 0) { this.destroy(new Error(`Retry total time limit exceeded - ${JSON.stringify(resp.data)}`)); return; } // Unshift the local cache back in case it's needed for the next request. this.numBytesWritten -= this.localWriteCacheByteLength; this.prependLocalBufferToUpstream(); // We don't know how much data has been received by the server. // `continueUploading` will recheck the offset via `getAndSetOffset`. // If `offset` < `numberBytesReceived` then we will raise a RangeError // as we've streamed too much data that has been missed - this should // not be the case for multi-chunk uploads as `lastChunkSent` is the // body of the entire request. this.offset = undefined; setTimeout(this.continueUploading.bind(this), retryDelay); } this.numRetries++; } else { this.destroy(new Error(`Retry limit exceeded - ${JSON.stringify(resp.data)}`)); } } /** * The amount of time to wait before retrying the request, in milliseconds. * If negative, do not retry. * * @returns the amount of time to wait, in milliseconds. */ getRetryDelay() { const randomMs = Math.round(Math.random() * 1000); const waitTime = Math.pow(this.retryOptions.retryDelayMultiplier, this.numRetries) * 1000 + randomMs; const maxAllowableDelayMs = this.retryOptions.totalTimeout * 1000 - (Date.now() - this.timeOfFirstRequest); const maxRetryDelayMs = this.retryOptions.maxRetryDelay * 1000; return Math.min(waitTime, maxRetryDelayMs, maxAllowableDelayMs); } /* * Prepare user-defined API endpoint for compatibility with our API. */ sanitizeEndpoint(url) { if (!exports.PROTOCOL_REGEX.test(url)) { url = `https://${url}`; } return url.replace(/\/+$/, ''); // Remove trailing slashes } /** * Check if a given status code is 2xx * * @param status The status code to check * @returns if the status is 2xx */ isSuccessfulResponse(status) { return status >= 200 && status < 300; } } exports.Upload = Upload; _Upload_gcclGcsCmd = new WeakMap(), _Upload_instances = new WeakSet(), _Upload_resetLocalBuffersCache = function _Upload_resetLocalBuffersCache() { this.localWriteCache = []; this.localWriteCacheByteLength = 0; }, _Upload_addLocalBufferCache = function _Upload_addLocalBufferCache(buf) { this.localWriteCache.push(buf); this.localWriteCacheByteLength += buf.byteLength; }; function upload(cfg) { return new Upload(cfg); } function createURI(cfg, callback) { const up = new Upload(cfg); if (!callback) { return up.createURI(); } up.createURI().then(r => callback(null, r), callback); } /** * Check the status of an existing resumable upload. * * @param cfg A configuration to use. `uri` is required. * @returns the current upload status */ function checkUploadStatus(cfg) { const up = new Upload(cfg); return up.checkUploadStatus(); } /***/ }), /***/ 7319: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2020 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.SigningError = exports.URLSigner = exports.PATH_STYLED_HOST = exports.SignerExceptionMessages = void 0; const crypto = __importStar(__nccwpck_require__(76982)); const url = __importStar(__nccwpck_require__(87016)); const storage_js_1 = __nccwpck_require__(92848); const util_js_1 = __nccwpck_require__(74557); var SignerExceptionMessages; (function (SignerExceptionMessages) { SignerExceptionMessages["ACCESSIBLE_DATE_INVALID"] = "The accessible at date provided was invalid."; SignerExceptionMessages["EXPIRATION_BEFORE_ACCESSIBLE_DATE"] = "An expiration date cannot be before accessible date."; SignerExceptionMessages["X_GOOG_CONTENT_SHA256"] = "The header X-Goog-Content-SHA256 must be a hexadecimal string."; })(SignerExceptionMessages || (exports.SignerExceptionMessages = SignerExceptionMessages = {})); /* * Default signing version for getSignedUrl is 'v2'. */ const DEFAULT_SIGNING_VERSION = 'v2'; const SEVEN_DAYS = 7 * 24 * 60 * 60; /** * @const {string} * @deprecated - unused */ exports.PATH_STYLED_HOST = 'https://storage.googleapis.com'; class URLSigner { constructor(auth, bucket, file, /** * A {@link Storage} object. * * @privateRemarks * * Technically this is a required field, however it would be a breaking change to * move it before optional properties. In the next major we should refactor the * constructor of this class to only accept a config object. */ storage = new storage_js_1.Storage()) { this.auth = auth; this.bucket = bucket; this.file = file; this.storage = storage; } getSignedUrl(cfg) { const expiresInSeconds = this.parseExpires(cfg.expires); const method = cfg.method; const accessibleAtInSeconds = this.parseAccessibleAt(cfg.accessibleAt); if (expiresInSeconds < accessibleAtInSeconds) { throw new Error(SignerExceptionMessages.EXPIRATION_BEFORE_ACCESSIBLE_DATE); } let customHost; // Default style is `path`. const isVirtualHostedStyle = cfg.virtualHostedStyle || false; if (cfg.cname) { customHost = cfg.cname; } else if (isVirtualHostedStyle) { customHost = `https://${this.bucket.name}.storage.${this.storage.universeDomain}`; } const secondsToMilliseconds = 1000; const config = Object.assign({}, cfg, { method, expiration: expiresInSeconds, accessibleAt: new Date(secondsToMilliseconds * accessibleAtInSeconds), bucket: this.bucket.name, file: this.file ? (0, util_js_1.encodeURI)(this.file.name, false) : undefined, }); if (customHost) { config.cname = customHost; } const version = cfg.version || DEFAULT_SIGNING_VERSION; let promise; if (version === 'v2') { promise = this.getSignedUrlV2(config); } else if (version === 'v4') { promise = this.getSignedUrlV4(config); } else { throw new Error(`Invalid signed URL version: ${version}. Supported versions are 'v2' and 'v4'.`); } return promise.then(query => { var _a; query = Object.assign(query, cfg.queryParams); const signedUrl = new url.URL(((_a = cfg.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); signedUrl.pathname = this.getResourcePath(!!config.cname, this.bucket.name, config.file); // eslint-disable-next-line @typescript-eslint/no-explicit-any signedUrl.search = (0, util_js_1.qsStringify)(query); return signedUrl.href; }); } getSignedUrlV2(config) { const canonicalHeadersString = this.getCanonicalHeaders(config.extensionHeaders || {}); const resourcePath = this.getResourcePath(false, config.bucket, config.file); const blobToSign = [ config.method, config.contentMd5 || '', config.contentType || '', config.expiration, canonicalHeadersString + resourcePath, ].join('\n'); const sign = async () => { var _a; const auth = this.auth; try { const signature = await auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); const credentials = await auth.getCredentials(); return { GoogleAccessId: credentials.client_email, Expires: config.expiration, Signature: signature, }; } catch (err) { const error = err; const signingErr = new SigningError(error.message); signingErr.stack = error.stack; throw signingErr; } }; return sign(); } getSignedUrlV4(config) { var _a; config.accessibleAt = config.accessibleAt ? config.accessibleAt : new Date(); const millisecondsToSeconds = 1.0 / 1000.0; const expiresPeriodInSeconds = config.expiration - config.accessibleAt.valueOf() * millisecondsToSeconds; // v4 limit expiration to be 7 days maximum if (expiresPeriodInSeconds > SEVEN_DAYS) { throw new Error(`Max allowed expiration is seven days (${SEVEN_DAYS} seconds).`); } const extensionHeaders = Object.assign({}, config.extensionHeaders); const fqdn = new url.URL(((_a = config.host) === null || _a === void 0 ? void 0 : _a.toString()) || config.cname || this.storage.apiEndpoint); extensionHeaders.host = fqdn.hostname; if (config.contentMd5) { extensionHeaders['content-md5'] = config.contentMd5; } if (config.contentType) { extensionHeaders['content-type'] = config.contentType; } let contentSha256; const sha256Header = extensionHeaders['x-goog-content-sha256']; if (sha256Header) { if (typeof sha256Header !== 'string' || !/[A-Fa-f0-9]{40}/.test(sha256Header)) { throw new Error(SignerExceptionMessages.X_GOOG_CONTENT_SHA256); } contentSha256 = sha256Header; } const signedHeaders = Object.keys(extensionHeaders) .map(header => header.toLowerCase()) .sort() .join(';'); const extensionHeadersString = this.getCanonicalHeaders(extensionHeaders); const datestamp = (0, util_js_1.formatAsUTCISO)(config.accessibleAt); const credentialScope = `${datestamp}/auto/storage/goog4_request`; const sign = async () => { var _a; const credentials = await this.auth.getCredentials(); const credential = `${credentials.client_email}/${credentialScope}`; const dateISO = (0, util_js_1.formatAsUTCISO)(config.accessibleAt ? config.accessibleAt : new Date(), true); const queryParams = { 'X-Goog-Algorithm': 'GOOG4-RSA-SHA256', 'X-Goog-Credential': credential, 'X-Goog-Date': dateISO, 'X-Goog-Expires': expiresPeriodInSeconds.toString(10), 'X-Goog-SignedHeaders': signedHeaders, ...(config.queryParams || {}), }; // eslint-disable-next-line @typescript-eslint/no-explicit-any const canonicalQueryParams = this.getCanonicalQueryParams(queryParams); const canonicalRequest = this.getCanonicalRequest(config.method, this.getResourcePath(!!config.cname, config.bucket, config.file), canonicalQueryParams, extensionHeadersString, signedHeaders, contentSha256); const hash = crypto .createHash('sha256') .update(canonicalRequest) .digest('hex'); const blobToSign = [ 'GOOG4-RSA-SHA256', dateISO, credentialScope, hash, ].join('\n'); try { const signature = await this.auth.sign(blobToSign, (_a = config.signingEndpoint) === null || _a === void 0 ? void 0 : _a.toString()); const signatureHex = Buffer.from(signature, 'base64').toString('hex'); const signedQuery = Object.assign({}, queryParams, { 'X-Goog-Signature': signatureHex, }); return signedQuery; } catch (err) { const error = err; const signingErr = new SigningError(error.message); signingErr.stack = error.stack; throw signingErr; } }; return sign(); } /** * Create canonical headers for signing v4 url. * * The canonical headers for v4-signing a request demands header names are * first lowercased, followed by sorting the header names. * Then, construct the canonical headers part of the request: * + ":" + Trim() + "\n" * .. * + ":" + Trim() + "\n" * * @param headers * @private */ getCanonicalHeaders(headers) { // Sort headers by their lowercased names const sortedHeaders = (0, util_js_1.objectEntries)(headers) // Convert header names to lowercase .map(([headerName, value]) => [ headerName.toLowerCase(), value, ]) .sort((a, b) => a[0].localeCompare(b[0])); return sortedHeaders .filter(([, value]) => value !== undefined) .map(([headerName, value]) => { // - Convert Array (multi-valued header) into string, delimited by // ',' (no space). // - Trim leading and trailing spaces. // - Convert sequential (2+) spaces into a single space const canonicalValue = `${value}`.trim().replace(/\s{2,}/g, ' '); return `${headerName}:${canonicalValue}\n`; }) .join(''); } getCanonicalRequest(method, path, query, headers, signedHeaders, contentSha256) { return [ method, path, query, headers, signedHeaders, contentSha256 || 'UNSIGNED-PAYLOAD', ].join('\n'); } getCanonicalQueryParams(query) { return (0, util_js_1.objectEntries)(query) .map(([key, value]) => [(0, util_js_1.encodeURI)(key, true), (0, util_js_1.encodeURI)(value, true)]) .sort((a, b) => (a[0] < b[0] ? -1 : 1)) .map(([key, value]) => `${key}=${value}`) .join('&'); } getResourcePath(cname, bucket, file) { if (cname) { return '/' + (file || ''); } else if (file) { return `/${bucket}/${file}`; } else { return `/${bucket}`; } } parseExpires(expires, current = new Date()) { const expiresInMSeconds = new Date(expires).valueOf(); if (isNaN(expiresInMSeconds)) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_INVALID); } if (expiresInMSeconds < current.valueOf()) { throw new Error(storage_js_1.ExceptionMessages.EXPIRATION_DATE_PAST); } return Math.floor(expiresInMSeconds / 1000); // The API expects seconds. } parseAccessibleAt(accessibleAt) { const accessibleAtInMSeconds = new Date(accessibleAt || new Date()).valueOf(); if (isNaN(accessibleAtInMSeconds)) { throw new Error(SignerExceptionMessages.ACCESSIBLE_DATE_INVALID); } return Math.floor(accessibleAtInMSeconds / 1000); // The API expects seconds. } } exports.URLSigner = URLSigner; /** * Custom error type for errors related to getting signed errors and policies. * * @private */ class SigningError extends Error { constructor() { super(...arguments); this.name = 'SigningError'; } } exports.SigningError = SigningError; /***/ }), /***/ 92848: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. Object.defineProperty(exports, "__esModule", ({ value: true })); exports.Storage = exports.RETRYABLE_ERR_FN_DEFAULT = exports.MAX_RETRY_DELAY_DEFAULT = exports.TOTAL_TIMEOUT_DEFAULT = exports.RETRY_DELAY_MULTIPLIER_DEFAULT = exports.MAX_RETRY_DEFAULT = exports.AUTO_RETRY_DEFAULT = exports.PROTOCOL_REGEX = exports.StorageExceptionMessages = exports.ExceptionMessages = exports.IdempotencyStrategy = void 0; const index_js_1 = __nccwpck_require__(11325); const paginator_1 = __nccwpck_require__(99469); const promisify_1 = __nccwpck_require__(60206); const stream_1 = __nccwpck_require__(2203); const bucket_js_1 = __nccwpck_require__(82887); const channel_js_1 = __nccwpck_require__(12658); const file_js_1 = __nccwpck_require__(69975); const util_js_1 = __nccwpck_require__(74557); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore const package_json_helper_cjs_1 = __nccwpck_require__(41969); const hmacKey_js_1 = __nccwpck_require__(45891); const crc32c_js_1 = __nccwpck_require__(34317); const google_auth_library_1 = __nccwpck_require__(492); var IdempotencyStrategy; (function (IdempotencyStrategy) { IdempotencyStrategy[IdempotencyStrategy["RetryAlways"] = 0] = "RetryAlways"; IdempotencyStrategy[IdempotencyStrategy["RetryConditional"] = 1] = "RetryConditional"; IdempotencyStrategy[IdempotencyStrategy["RetryNever"] = 2] = "RetryNever"; })(IdempotencyStrategy || (exports.IdempotencyStrategy = IdempotencyStrategy = {})); var ExceptionMessages; (function (ExceptionMessages) { ExceptionMessages["EXPIRATION_DATE_INVALID"] = "The expiration date provided was invalid."; ExceptionMessages["EXPIRATION_DATE_PAST"] = "An expiration date cannot be in the past."; })(ExceptionMessages || (exports.ExceptionMessages = ExceptionMessages = {})); var StorageExceptionMessages; (function (StorageExceptionMessages) { StorageExceptionMessages["BUCKET_NAME_REQUIRED"] = "A bucket name is needed to use Cloud Storage."; StorageExceptionMessages["BUCKET_NAME_REQUIRED_CREATE"] = "A name is required to create a bucket."; StorageExceptionMessages["HMAC_SERVICE_ACCOUNT"] = "The first argument must be a service account email to create an HMAC key."; StorageExceptionMessages["HMAC_ACCESS_ID"] = "An access ID is needed to create an HmacKey object."; })(StorageExceptionMessages || (exports.StorageExceptionMessages = StorageExceptionMessages = {})); exports.PROTOCOL_REGEX = /^(\w*):\/\//; /** * Default behavior: Automatically retry retriable server errors. * * @const {boolean} */ exports.AUTO_RETRY_DEFAULT = true; /** * Default behavior: Only attempt to retry retriable errors 3 times. * * @const {number} */ exports.MAX_RETRY_DEFAULT = 3; /** * Default behavior: Wait twice as long as previous retry before retrying. * * @const {number} */ exports.RETRY_DELAY_MULTIPLIER_DEFAULT = 2; /** * Default behavior: If the operation doesn't succeed after 600 seconds, * stop retrying. * * @const {number} */ exports.TOTAL_TIMEOUT_DEFAULT = 600; /** * Default behavior: Wait no more than 64 seconds between retries. * * @const {number} */ exports.MAX_RETRY_DELAY_DEFAULT = 64; /** * Default behavior: Retry conditionally idempotent operations if correct preconditions are set. * * @const {enum} * @private */ const IDEMPOTENCY_STRATEGY_DEFAULT = IdempotencyStrategy.RetryConditional; /** * Returns true if the API request should be retried, given the error that was * given the first time the request was attempted. * @const * @param {error} err - The API error to check if it is appropriate to retry. * @return {boolean} True if the API request should be retried, false otherwise. */ const RETRYABLE_ERR_FN_DEFAULT = function (err) { var _a; const isConnectionProblem = (reason) => { return (reason.includes('eai_again') || // DNS lookup error reason === 'econnreset' || reason === 'unexpected connection closure' || reason === 'epipe' || reason === 'socket connection timeout'); }; if (err) { if ([408, 429, 500, 502, 503, 504].indexOf(err.code) !== -1) { return true; } if (typeof err.code === 'string') { if (['408', '429', '500', '502', '503', '504'].indexOf(err.code) !== -1) { return true; } const reason = err.code.toLowerCase(); if (isConnectionProblem(reason)) { return true; } } if (err.errors) { for (const e of err.errors) { const reason = (_a = e === null || e === void 0 ? void 0 : e.reason) === null || _a === void 0 ? void 0 : _a.toString().toLowerCase(); if (reason && isConnectionProblem(reason)) { return true; } } } } return false; }; exports.RETRYABLE_ERR_FN_DEFAULT = RETRYABLE_ERR_FN_DEFAULT; /*! Developer Documentation * * Invoke this method to create a new Storage object bound with pre-determined * configuration options. For each object that can be created (e.g., a bucket), * there is an equivalent static and instance method. While they are classes, * they can be instantiated without use of the `new` keyword. */ /** * Cloud Storage uses access control lists (ACLs) to manage object and * bucket access. ACLs are the mechanism you use to share objects with other * users and allow other users to access your buckets and objects. * * This object provides constants to refer to the three permission levels that * can be granted to an entity: * * - `gcs.acl.OWNER_ROLE` - ("OWNER") * - `gcs.acl.READER_ROLE` - ("READER") * - `gcs.acl.WRITER_ROLE` - ("WRITER") * * See {@link https://cloud.google.com/storage/docs/access-control/lists| About Access Control Lists} * * @name Storage#acl * @type {object} * @property {string} OWNER_ROLE * @property {string} READER_ROLE * @property {string} WRITER_ROLE * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const albums = storage.bucket('albums'); * * //- * // Make all of the files currently in a bucket publicly readable. * //- * const options = { * entity: 'allUsers', * role: storage.acl.READER_ROLE * }; * * albums.acl.add(options, function(err, aclObject) {}); * * //- * // Make any new objects added to a bucket publicly readable. * //- * albums.acl.default.add(options, function(err, aclObject) {}); * * //- * // Grant a user ownership permissions to a bucket. * //- * albums.acl.add({ * entity: 'user-useremail@example.com', * role: storage.acl.OWNER_ROLE * }, function(err, aclObject) {}); * * //- * // If the callback is omitted, we'll return a Promise. * //- * albums.acl.add(options).then(function(data) { * const aclObject = data[0]; * const apiResponse = data[1]; * }); * ``` */ /** * Get {@link Bucket} objects for all of the buckets in your project as * a readable object stream. * * @method Storage#getBucketsStream * @param {GetBucketsRequest} [query] Query object for listing buckets. * @returns {ReadableStream} A readable stream that emits {@link Bucket} * instances. * * @example * ``` * storage.getBucketsStream() * .on('error', console.error) * .on('data', function(bucket) { * // bucket is a Bucket object. * }) * .on('end', function() { * // All buckets retrieved. * }); * * //- * // If you anticipate many results, you can end a stream early to prevent * // unnecessary processing and API requests. * //- * storage.getBucketsStream() * .on('data', function(bucket) { * this.end(); * }); * ``` */ /** * Get {@link HmacKey} objects for all of the HMAC keys in the project in a * readable object stream. * * @method Storage#getHmacKeysStream * @param {GetHmacKeysOptions} [options] Configuration options. * @returns {ReadableStream} A readable stream that emits {@link HmacKey} * instances. * * @example * ``` * storage.getHmacKeysStream() * .on('error', console.error) * .on('data', function(hmacKey) { * // hmacKey is an HmacKey object. * }) * .on('end', function() { * // All HmacKey retrieved. * }); * * //- * // If you anticipate many results, you can end a stream early to prevent * // unnecessary processing and API requests. * //- * storage.getHmacKeysStream() * .on('data', function(bucket) { * this.end(); * }); * ``` */ /** *

ACLs

* Cloud Storage uses access control lists (ACLs) to manage object and * bucket access. ACLs are the mechanism you use to share files with other users * and allow other users to access your buckets and files. * * To learn more about ACLs, read this overview on * {@link https://cloud.google.com/storage/docs/access-control| Access Control}. * * See {@link https://cloud.google.com/storage/docs/overview| Cloud Storage overview} * See {@link https://cloud.google.com/storage/docs/access-control| Access Control} * * @class */ class Storage extends index_js_1.Service { getBucketsStream() { // placeholder body, overwritten in constructor return new stream_1.Readable(); } getHmacKeysStream() { // placeholder body, overwritten in constructor return new stream_1.Readable(); } /** * @callback Crc32cGeneratorToStringCallback * A method returning the CRC32C as a base64-encoded string. * * @returns {string} * * @example * Hashing the string 'data' should return 'rth90Q==' * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.toString(); // 'rth90Q==' * ``` **/ /** * @callback Crc32cGeneratorValidateCallback * A method validating a base64-encoded CRC32C string. * * @param {string} [value] base64-encoded CRC32C string to validate * @returns {boolean} * * @example * Should return `true` if the value matches, `false` otherwise * * ```js * const buffer = Buffer.from('data'); * crc32c.update(buffer); * crc32c.validate('DkjKuA=='); // false * crc32c.validate('rth90Q=='); // true * ``` **/ /** * @callback Crc32cGeneratorUpdateCallback * A method for passing `Buffer`s for CRC32C generation. * * @param {Buffer} [data] data to update CRC32C value with * @returns {undefined} * * @example * Hashing buffers from 'some ' and 'text\n' * * ```js * const buffer1 = Buffer.from('some '); * crc32c.update(buffer1); * * const buffer2 = Buffer.from('text\n'); * crc32c.update(buffer2); * * crc32c.toString(); // 'DkjKuA==' * ``` **/ /** * @typedef {object} CRC32CValidator * @property {Crc32cGeneratorToStringCallback} * @property {Crc32cGeneratorValidateCallback} * @property {Crc32cGeneratorUpdateCallback} */ /** * @callback Crc32cGeneratorCallback * @returns {CRC32CValidator} */ /** * @typedef {object} StorageOptions * @property {string} [projectId] The project ID from the Google Developer's * Console, e.g. 'grape-spaceship-123'. We will also check the environment * variable `GCLOUD_PROJECT` for your project ID. If your app is running * in an environment which supports {@link * https://cloud.google.com/docs/authentication/production#providing_credentials_to_your_application * Application Default Credentials}, your project ID will be detected * automatically. * @property {string} [keyFilename] Full path to the a .json, .pem, or .p12 key * downloaded from the Google Developers Console. If you provide a path to * a JSON file, the `projectId` option above is not necessary. NOTE: .pem and * .p12 require you to specify the `email` option as well. * @property {string} [email] Account email address. Required when using a .pem * or .p12 keyFilename. * @property {object} [credentials] Credentials object. * @property {string} [credentials.client_email] * @property {string} [credentials.private_key] * @property {object} [retryOptions] Options for customizing retries. Retriable server errors * will be retried with exponential delay between them dictated by the formula * max(maxRetryDelay, retryDelayMultiplier*retryNumber) until maxRetries or totalTimeout * has been reached. Retries will only happen if autoRetry is set to true. * @property {boolean} [retryOptions.autoRetry=true] Automatically retry requests if the * response is related to rate limits or certain intermittent server * errors. We will exponentially backoff subsequent requests by default. * @property {number} [retryOptions.retryDelayMultiplier = 2] the multiplier by which to * increase the delay time between the completion of failed requests, and the * initiation of the subsequent retrying request. * @property {number} [retryOptions.totalTimeout = 600] The total time, starting from * when the initial request is sent, after which an error will * be returned, regardless of the retrying attempts made meanwhile. * @property {number} [retryOptions.maxRetryDelay = 64] The maximum delay time between requests. * When this value is reached, ``retryDelayMultiplier`` will no longer be used to * increase delay time. * @property {number} [retryOptions.maxRetries=3] Maximum number of automatic retries * attempted before returning the error. * @property {function} [retryOptions.retryableErrorFn] Function that returns true if a given * error should be retried and false otherwise. * @property {enum} [retryOptions.idempotencyStrategy=IdempotencyStrategy.RetryConditional] Enumeration * controls how conditionally idempotent operations are retried. Possible values are: RetryAlways - * will respect other retry settings and attempt to retry conditionally idempotent operations. RetryConditional - * will retry conditionally idempotent operations if the correct preconditions are set. RetryNever - never * retry a conditionally idempotent operation. * @property {string} [userAgent] The value to be prepended to the User-Agent * header in API requests. * @property {object} [authClient] `AuthClient` or `GoogleAuth` client to reuse instead of creating a new one. * @property {number} [timeout] The amount of time in milliseconds to wait per http request before timing out. * @property {object[]} [interceptors_] Array of custom request interceptors to be returned in the order they were assigned. * @property {string} [apiEndpoint = storage.google.com] The API endpoint of the service used to make requests. * @property {boolean} [useAuthWithCustomEndpoint = false] Controls whether or not to use authentication when using a custom endpoint. * @property {Crc32cGeneratorCallback} [callback] A function that generates a CRC32C Validator. Defaults to {@link CRC32C} */ /** * Constructs the Storage client. * * @example * Create a client that uses Application Default Credentials * (ADC) * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * ``` * * @example * Create a client with explicit credentials * ``` * const storage = new Storage({ * projectId: 'your-project-id', * keyFilename: '/path/to/keyfile.json' * }); * ``` * * @example * Create a client with credentials passed * by value as a JavaScript object * ``` * const storage = new Storage({ * projectId: 'your-project-id', * credentials: { * type: 'service_account', * project_id: 'xxxxxxx', * private_key_id: 'xxxx', * private_key:'-----BEGIN PRIVATE KEY-----xxxxxxx\n-----END PRIVATE KEY-----\n', * client_email: 'xxxx', * client_id: 'xxx', * auth_uri: 'https://accounts.google.com/o/oauth2/auth', * token_uri: 'https://oauth2.googleapis.com/token', * auth_provider_x509_cert_url: 'https://www.googleapis.com/oauth2/v1/certs', * client_x509_cert_url: 'xxx', * } * }); * ``` * * @example * Create a client with credentials passed * by loading a JSON file directly from disk * ``` * const storage = new Storage({ * projectId: 'your-project-id', * credentials: require('/path/to-keyfile.json') * }); * ``` * * @example * Create a client with an `AuthClient` (e.g. `DownscopedClient`) * ``` * const {DownscopedClient} = require('google-auth-library'); * const authClient = new DownscopedClient({...}); * * const storage = new Storage({authClient}); * ``` * * Additional samples: * - https://github.com/googleapis/google-auth-library-nodejs#sample-usage-1 * - https://github.com/googleapis/google-auth-library-nodejs/blob/main/samples/downscopedclient.js * * @param {StorageOptions} [options] Configuration options. */ constructor(options = {}) { var _a, _b, _c, _d, _e, _f, _g, _h, _j, _k, _l, _m, _o, _p; const universe = options.universeDomain || google_auth_library_1.DEFAULT_UNIVERSE; let apiEndpoint = `https://storage.${universe}`; let customEndpoint = false; // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. const EMULATOR_HOST = process.env.STORAGE_EMULATOR_HOST; if (typeof EMULATOR_HOST === 'string') { apiEndpoint = Storage.sanitizeEndpoint(EMULATOR_HOST); customEndpoint = true; } if (options.apiEndpoint && options.apiEndpoint !== apiEndpoint) { apiEndpoint = Storage.sanitizeEndpoint(options.apiEndpoint); customEndpoint = true; } options = Object.assign({}, options, { apiEndpoint }); // Note: EMULATOR_HOST is an experimental configuration variable. Use apiEndpoint instead. const baseUrl = EMULATOR_HOST || `${options.apiEndpoint}/storage/v1`; const config = { apiEndpoint: options.apiEndpoint, retryOptions: { autoRetry: ((_a = options.retryOptions) === null || _a === void 0 ? void 0 : _a.autoRetry) !== undefined ? (_b = options.retryOptions) === null || _b === void 0 ? void 0 : _b.autoRetry : exports.AUTO_RETRY_DEFAULT, maxRetries: ((_c = options.retryOptions) === null || _c === void 0 ? void 0 : _c.maxRetries) ? (_d = options.retryOptions) === null || _d === void 0 ? void 0 : _d.maxRetries : exports.MAX_RETRY_DEFAULT, retryDelayMultiplier: ((_e = options.retryOptions) === null || _e === void 0 ? void 0 : _e.retryDelayMultiplier) ? (_f = options.retryOptions) === null || _f === void 0 ? void 0 : _f.retryDelayMultiplier : exports.RETRY_DELAY_MULTIPLIER_DEFAULT, totalTimeout: ((_g = options.retryOptions) === null || _g === void 0 ? void 0 : _g.totalTimeout) ? (_h = options.retryOptions) === null || _h === void 0 ? void 0 : _h.totalTimeout : exports.TOTAL_TIMEOUT_DEFAULT, maxRetryDelay: ((_j = options.retryOptions) === null || _j === void 0 ? void 0 : _j.maxRetryDelay) ? (_k = options.retryOptions) === null || _k === void 0 ? void 0 : _k.maxRetryDelay : exports.MAX_RETRY_DELAY_DEFAULT, retryableErrorFn: ((_l = options.retryOptions) === null || _l === void 0 ? void 0 : _l.retryableErrorFn) ? (_m = options.retryOptions) === null || _m === void 0 ? void 0 : _m.retryableErrorFn : exports.RETRYABLE_ERR_FN_DEFAULT, idempotencyStrategy: ((_o = options.retryOptions) === null || _o === void 0 ? void 0 : _o.idempotencyStrategy) !== undefined ? (_p = options.retryOptions) === null || _p === void 0 ? void 0 : _p.idempotencyStrategy : IDEMPOTENCY_STRATEGY_DEFAULT, }, baseUrl, customEndpoint, useAuthWithCustomEndpoint: options === null || options === void 0 ? void 0 : options.useAuthWithCustomEndpoint, projectIdRequired: false, scopes: [ 'https://www.googleapis.com/auth/iam', 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/devstorage.full_control', ], packageJson: (0, package_json_helper_cjs_1.getPackageJSON)(), }; super(config, options); /** * Reference to {@link Storage.acl}. * * @name Storage#acl * @see Storage.acl */ this.acl = Storage.acl; this.crc32cGenerator = options.crc32cGenerator || crc32c_js_1.CRC32C_DEFAULT_VALIDATOR_GENERATOR; this.retryOptions = config.retryOptions; this.getBucketsStream = paginator_1.paginator.streamify('getBuckets'); this.getHmacKeysStream = paginator_1.paginator.streamify('getHmacKeys'); } static sanitizeEndpoint(url) { if (!exports.PROTOCOL_REGEX.test(url)) { url = `https://${url}`; } return url.replace(/\/+$/, ''); // Remove trailing slashes } /** * Get a reference to a Cloud Storage bucket. * * @param {string} name Name of the bucket. * @param {object} [options] Configuration object. * @param {string} [options.kmsKeyName] A Cloud KMS key that will be used to * encrypt objects inserted into this bucket, if no encryption method is * specified. * @param {string} [options.userProject] User project to be billed for all * requests made from this Bucket object. * @returns {Bucket} * @see Bucket * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const albums = storage.bucket('albums'); * const photos = storage.bucket('photos'); * ``` */ bucket(name, options) { if (!name) { throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED); } return new bucket_js_1.Bucket(this, name, options); } /** * Reference a channel to receive notifications about changes to your bucket. * * @param {string} id The ID of the channel. * @param {string} resourceId The resource ID of the channel. * @returns {Channel} * @see Channel * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const channel = storage.channel('id', 'resource-id'); * ``` */ channel(id, resourceId) { return new channel_js_1.Channel(this, id, resourceId); } /** * @typedef {array} CreateBucketResponse * @property {Bucket} 0 The new {@link Bucket}. * @property {object} 1 The full API response. */ /** * @callback CreateBucketCallback * @param {?Error} err Request error, if any. * @param {Bucket} bucket The new {@link Bucket}. * @param {object} apiResponse The full API response. */ /** * Metadata to set for the bucket. * * @typedef {object} CreateBucketRequest * @property {boolean} [archive=false] Specify the storage class as Archive. * @property {object} [autoclass.enabled=false] Specify whether Autoclass is * enabled for the bucket. * @property {object} [autoclass.terminalStorageClass='NEARLINE'] The storage class that objects in an Autoclass bucket eventually transition to if * they are not read for a certain length of time. Valid values are NEARLINE and ARCHIVE. * @property {boolean} [coldline=false] Specify the storage class as Coldline. * @property {Cors[]} [cors=[]] Specify the CORS configuration to use. * @property {CustomPlacementConfig} [customPlacementConfig={}] Specify the bucket's regions for dual-region buckets. * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. * @property {boolean} [dra=false] Specify the storage class as Durable Reduced * Availability. * @property {boolean} [enableObjectRetention=false] Specifiy whether or not object retention should be enabled on this bucket. * @property {object} [hierarchicalNamespace.enabled=false] Specify whether or not to enable hierarchical namespace on this bucket. * @property {string} [location] Specify the bucket's location. If specifying * a dual-region, the `customPlacementConfig` property should be set in conjunction. * For more information, see {@link https://cloud.google.com/storage/docs/locations| Bucket Locations}. * @property {boolean} [multiRegional=false] Specify the storage class as * Multi-Regional. * @property {boolean} [nearline=false] Specify the storage class as Nearline. * @property {boolean} [regional=false] Specify the storage class as Regional. * @property {boolean} [requesterPays=false] Force the use of the User Project metadata field to assign operational * costs when an operation is made on a Bucket and its objects. * @property {string} [rpo] For dual-region buckets, controls whether turbo * replication is enabled (`ASYNC_TURBO`) or disabled (`DEFAULT`). * @property {boolean} [standard=true] Specify the storage class as Standard. * @property {string} [storageClass] The new storage class. (`standard`, * `nearline`, `coldline`, or `archive`). * **Note:** The storage classes `multi_regional`, `regional`, and * `durable_reduced_availability` are now legacy and will be deprecated in * the future. * @property {Versioning} [versioning=undefined] Specify the versioning status. * @property {string} [userProject] The ID of the project which will be billed * for the request. */ /** * Create a bucket. * * Cloud Storage uses a flat namespace, so you can't create a bucket with * a name that is already in use. For more information, see * {@link https://cloud.google.com/storage/docs/bucketnaming.html#requirements| Bucket Naming Guidelines}. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/insert| Buckets: insert API Documentation} * See {@link https://cloud.google.com/storage/docs/storage-classes| Storage Classes} * * @param {string} name Name of the bucket to create. * @param {CreateBucketRequest} [metadata] Metadata to set for the bucket. * @param {CreateBucketCallback} [callback] Callback function. * @returns {Promise} * @throws {Error} If a name is not provided. * @see Bucket#create * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const callback = function(err, bucket, apiResponse) { * // `bucket` is a Bucket object. * }; * * storage.createBucket('new-bucket', callback); * * //- * // Create a bucket in a specific location and region. See the * // Official JSON API docs for complete details on the `location` * option. * // * //- * const metadata = { * location: 'US-CENTRAL1', * regional: true * }; * * storage.createBucket('new-bucket', metadata, callback); * * //- * // Create a bucket with a retention policy of 6 months. * //- * const metadata = { * retentionPolicy: { * retentionPeriod: 15780000 // 6 months in seconds. * } * }; * * storage.createBucket('new-bucket', metadata, callback); * * //- * // Enable versioning on a new bucket. * //- * const metadata = { * versioning: { * enabled: true * } * }; * * storage.createBucket('new-bucket', metadata, callback); * * //- * // If the callback is omitted, we'll return a Promise. * //- * storage.createBucket('new-bucket').then(function(data) { * const bucket = data[0]; * const apiResponse = data[1]; * }); * * ``` * @example include:samples/buckets.js * region_tag:storage_create_bucket * Another example: */ createBucket(name, metadataOrCallback, callback) { if (!name) { throw new Error(StorageExceptionMessages.BUCKET_NAME_REQUIRED_CREATE); } let metadata; if (!callback) { callback = metadataOrCallback; metadata = {}; } else { metadata = metadataOrCallback; } const body = { ...metadata, name, }; const storageClasses = { archive: 'ARCHIVE', coldline: 'COLDLINE', dra: 'DURABLE_REDUCED_AVAILABILITY', multiRegional: 'MULTI_REGIONAL', nearline: 'NEARLINE', regional: 'REGIONAL', standard: 'STANDARD', }; const storageClassKeys = Object.keys(storageClasses); for (const storageClass of storageClassKeys) { if (body[storageClass]) { if (metadata.storageClass && metadata.storageClass !== storageClass) { throw new Error(`Both \`${storageClass}\` and \`storageClass\` were provided.`); } body.storageClass = storageClasses[storageClass]; delete body[storageClass]; } } if (body.requesterPays) { body.billing = { requesterPays: body.requesterPays, }; delete body.requesterPays; } const query = { project: this.projectId, }; if (body.userProject) { query.userProject = body.userProject; delete body.userProject; } if (body.enableObjectRetention) { query.enableObjectRetention = body.enableObjectRetention; delete body.enableObjectRetention; } if (body.predefinedAcl) { query.predefinedAcl = body.predefinedAcl; delete body.predefinedAcl; } if (body.predefinedDefaultObjectAcl) { query.predefinedDefaultObjectAcl = body.predefinedDefaultObjectAcl; delete body.predefinedDefaultObjectAcl; } if (body.projection) { query.projection = body.projection; delete body.projection; } this.request({ method: 'POST', uri: '/b', qs: query, json: body, }, (err, resp) => { if (err) { callback(err, null, resp); return; } const bucket = this.bucket(name); bucket.metadata = resp; callback(null, bucket, resp); }); } /** * @typedef {object} CreateHmacKeyOptions * @property {string} [projectId] The project ID of the project that owns * the service account of the requested HMAC key. If not provided, * the project ID used to instantiate the Storage client will be used. * @property {string} [userProject] This parameter is currently ignored. */ /** * @typedef {object} HmacKeyMetadata * @property {string} accessId The access id identifies which HMAC key was * used to sign a request when authenticating with HMAC. * @property {string} etag Used to perform a read-modify-write of the key. * @property {string} id The resource name of the HMAC key. * @property {string} projectId The project ID. * @property {string} serviceAccountEmail The service account's email this * HMAC key is created for. * @property {string} state The state of this HMAC key. One of "ACTIVE", * "INACTIVE" or "DELETED". * @property {string} timeCreated The creation time of the HMAC key in * RFC 3339 format. * @property {string} [updated] The time this HMAC key was last updated in * RFC 3339 format. */ /** * @typedef {array} CreateHmacKeyResponse * @property {HmacKey} 0 The HmacKey instance created from API response. * @property {string} 1 The HMAC key's secret used to access the XML API. * @property {object} 3 The raw API response. */ /** * @callback CreateHmacKeyCallback Callback function. * @param {?Error} err Request error, if any. * @param {HmacKey} hmacKey The HmacKey instance created from API response. * @param {string} secret The HMAC key's secret used to access the XML API. * @param {object} apiResponse The raw API response. */ /** * Create an HMAC key associated with an service account to authenticate * requests to the Cloud Storage XML API. * * See {@link https://cloud.google.com/storage/docs/authentication/hmackeys| HMAC keys documentation} * * @param {string} serviceAccountEmail The service account's email address * with which the HMAC key is created for. * @param {CreateHmacKeyCallback} [callback] Callback function. * @return {Promise} * * @example * ``` * const {Storage} = require('google-cloud/storage'); * const storage = new Storage(); * * // Replace with your service account's email address * const serviceAccountEmail = * 'my-service-account@appspot.gserviceaccount.com'; * * storage.createHmacKey(serviceAccountEmail, function(err, hmacKey, secret) { * if (!err) { * // Securely store the secret for use with the XML API. * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * storage.createHmacKey(serviceAccountEmail) * .then((response) => { * const hmacKey = response[0]; * const secret = response[1]; * // Securely store the secret for use with the XML API. * }); * ``` */ createHmacKey(serviceAccountEmail, optionsOrCb, cb) { if (typeof serviceAccountEmail !== 'string') { throw new Error(StorageExceptionMessages.HMAC_SERVICE_ACCOUNT); } const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); const query = Object.assign({}, options, { serviceAccountEmail }); const projectId = query.projectId || this.projectId; delete query.projectId; this.request({ method: 'POST', uri: `/projects/${projectId}/hmacKeys`, qs: query, maxRetries: 0, //explicitly set this value since this is a non-idempotent function }, (err, resp) => { if (err) { callback(err, null, null, resp); return; } const metadata = resp.metadata; const hmacKey = this.hmacKey(metadata.accessId, { projectId: metadata.projectId, }); hmacKey.metadata = resp.metadata; callback(null, hmacKey, resp.secret, resp); }); } /** * Query object for listing buckets. * * @typedef {object} GetBucketsRequest * @property {boolean} [autoPaginate=true] Have pagination handled * automatically. * @property {number} [maxApiCalls] Maximum number of API calls to make. * @property {number} [maxResults] Maximum number of items plus prefixes to * return per call. * Note: By default will handle pagination automatically * if more than 1 page worth of results are requested per call. * When `autoPaginate` is set to `false` the smaller of `maxResults` * or 1 page of results will be returned per call. * @property {string} [pageToken] A previously-returned page token * representing part of the larger set of results to view. * @property {string} [userProject] The ID of the project which will be billed * for the request. * @param {boolean} [softDeleted] If true, returns the soft-deleted object. * Object `generation` is required if `softDeleted` is set to True. */ /** * @typedef {array} GetBucketsResponse * @property {Bucket[]} 0 Array of {@link Bucket} instances. * @property {object} 1 nextQuery A query object to receive more results. * @property {object} 2 The full API response. */ /** * @callback GetBucketsCallback * @param {?Error} err Request error, if any. * @param {Bucket[]} buckets Array of {@link Bucket} instances. * @param {object} nextQuery A query object to receive more results. * @param {object} apiResponse The full API response. */ /** * Get Bucket objects for all of the buckets in your project. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/buckets/list| Buckets: list API Documentation} * * @param {GetBucketsRequest} [query] Query object for listing buckets. * @param {GetBucketsCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * storage.getBuckets(function(err, buckets) { * if (!err) { * // buckets is an array of Bucket objects. * } * }); * * //- * // To control how many API requests are made and page through the results * // manually, set `autoPaginate` to `false`. * //- * const callback = function(err, buckets, nextQuery, apiResponse) { * if (nextQuery) { * // More results exist. * storage.getBuckets(nextQuery, callback); * } * * // The `metadata` property is populated for you with the metadata at the * // time of fetching. * buckets[0].metadata; * * // However, in cases where you are concerned the metadata could have * // changed, use the `getMetadata` method. * buckets[0].getMetadata(function(err, metadata, apiResponse) {}); * }; * * storage.getBuckets({ * autoPaginate: false * }, callback); * * //- * // If the callback is omitted, we'll return a Promise. * //- * storage.getBuckets().then(function(data) { * const buckets = data[0]; * }); * * ``` * @example include:samples/buckets.js * region_tag:storage_list_buckets * Another example: */ getBuckets(optionsOrCallback, cb) { const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); options.project = options.project || this.projectId; this.request({ uri: '/b', qs: options, }, (err, resp) => { if (err) { callback(err, null, null, resp); return; } const itemsArray = resp.items ? resp.items : []; const buckets = itemsArray.map((bucket) => { const bucketInstance = this.bucket(bucket.id); bucketInstance.metadata = bucket; return bucketInstance; }); const nextQuery = resp.nextPageToken ? Object.assign({}, options, { pageToken: resp.nextPageToken }) : null; callback(null, buckets, nextQuery, resp); }); } getHmacKeys(optionsOrCb, cb) { const { options, callback } = (0, util_js_1.normalize)(optionsOrCb, cb); const query = Object.assign({}, options); const projectId = query.projectId || this.projectId; delete query.projectId; this.request({ uri: `/projects/${projectId}/hmacKeys`, qs: query, }, (err, resp) => { if (err) { callback(err, null, null, resp); return; } const itemsArray = resp.items ? resp.items : []; const hmacKeys = itemsArray.map((hmacKey) => { const hmacKeyInstance = this.hmacKey(hmacKey.accessId, { projectId: hmacKey.projectId, }); hmacKeyInstance.metadata = hmacKey; return hmacKeyInstance; }); const nextQuery = resp.nextPageToken ? Object.assign({}, options, { pageToken: resp.nextPageToken }) : null; callback(null, hmacKeys, nextQuery, resp); }); } /** * @typedef {array} GetServiceAccountResponse * @property {object} 0 The service account resource. * @property {object} 1 The full * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. */ /** * @callback GetServiceAccountCallback * @param {?Error} err Request error, if any. * @param {object} serviceAccount The serviceAccount resource. * @param {string} serviceAccount.emailAddress The service account email * address. * @param {object} apiResponse The full * {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| API response}. */ /** * Get the email address of this project's Google Cloud Storage service * account. * * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount/get| Projects.serviceAccount: get API Documentation} * See {@link https://cloud.google.com/storage/docs/json_api/v1/projects/serviceAccount#resource| Projects.serviceAccount Resource} * * @param {object} [options] Configuration object. * @param {string} [options.userProject] User project to be billed for this * request. * @param {GetServiceAccountCallback} [callback] Callback function. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * * storage.getServiceAccount(function(err, serviceAccount, apiResponse) { * if (!err) { * const serviceAccountEmail = serviceAccount.emailAddress; * } * }); * * //- * // If the callback is omitted, we'll return a Promise. * //- * storage.getServiceAccount().then(function(data) { * const serviceAccountEmail = data[0].emailAddress; * const apiResponse = data[1]; * }); * ``` */ getServiceAccount(optionsOrCallback, cb) { const { options, callback } = (0, util_js_1.normalize)(optionsOrCallback, cb); this.request({ uri: `/projects/${this.projectId}/serviceAccount`, qs: options, }, (err, resp) => { if (err) { callback(err, null, resp); return; } const camelCaseResponse = {}; for (const prop in resp) { // eslint-disable-next-line no-prototype-builtins if (resp.hasOwnProperty(prop)) { const camelCaseProp = prop.replace(/_(\w)/g, (_, match) => match.toUpperCase()); camelCaseResponse[camelCaseProp] = resp[prop]; } } callback(null, camelCaseResponse, resp); }); } /** * Get a reference to an HmacKey object. * Note: this does not fetch the HMAC key's metadata. Use HmacKey#get() to * retrieve and populate the metadata. * * To get a reference to an HMAC key that's not created for a service * account in the same project used to instantiate the Storage client, * supply the project's ID as `projectId` in the `options` argument. * * @param {string} accessId The HMAC key's access ID. * @param {HmacKeyOptions} options HmacKey constructor options. * @returns {HmacKey} * @see HmacKey * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const hmacKey = storage.hmacKey('ACCESS_ID'); * ``` */ hmacKey(accessId, options) { if (!accessId) { throw new Error(StorageExceptionMessages.HMAC_ACCESS_ID); } return new hmacKey_js_1.HmacKey(this, accessId, options); } } exports.Storage = Storage; /** * {@link Bucket} class. * * @name Storage.Bucket * @see Bucket * @type {Constructor} */ Storage.Bucket = bucket_js_1.Bucket; /** * {@link Channel} class. * * @name Storage.Channel * @see Channel * @type {Constructor} */ Storage.Channel = channel_js_1.Channel; /** * {@link File} class. * * @name Storage.File * @see File * @type {Constructor} */ Storage.File = file_js_1.File; /** * {@link HmacKey} class. * * @name Storage.HmacKey * @see HmacKey * @type {Constructor} */ Storage.HmacKey = hmacKey_js_1.HmacKey; Storage.acl = { OWNER_ROLE: 'OWNER', READER_ROLE: 'READER', WRITER_ROLE: 'WRITER', }; /*! Developer Documentation * * These methods can be auto-paginated. */ paginator_1.paginator.extend(Storage, ['getBuckets', 'getHmacKeys']); /*! Developer Documentation * * All async methods (except for streams) will return a Promise in the event * that a callback is omitted. */ (0, promisify_1.promisifyAll)(Storage, { exclude: ['bucket', 'channel', 'hmacKey'], }); /***/ }), /***/ 30004: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; /*! * Copyright 2022 Google LLC. All Rights Reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function() { return m[k]; } }; } Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; })); var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); }) : function(o, v) { o["default"] = v; }); var __importStar = (this && this.__importStar) || (function () { var ownKeys = function(o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; })(); var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) { if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter"); if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it"); return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var _XMLMultiPartUploadHelper_instances, _XMLMultiPartUploadHelper_setGoogApiClientHeaders, _XMLMultiPartUploadHelper_handleErrorResponse; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.TransferManager = exports.MultiPartUploadError = void 0; const file_js_1 = __nccwpck_require__(69975); const p_limit_1 = __importDefault(__nccwpck_require__(58890)); const path = __importStar(__nccwpck_require__(16928)); const fs_1 = __nccwpck_require__(79896); const crc32c_js_1 = __nccwpck_require__(34317); const google_auth_library_1 = __nccwpck_require__(492); const fast_xml_parser_1 = __nccwpck_require__(39741); const async_retry_1 = __importDefault(__nccwpck_require__(45195)); const crypto_1 = __nccwpck_require__(76982); const util_js_1 = __nccwpck_require__(37325); const util_js_2 = __nccwpck_require__(74557); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore const package_json_helper_cjs_1 = __nccwpck_require__(41969); const packageJson = (0, package_json_helper_cjs_1.getPackageJSON)(); /** * Default number of concurrently executing promises to use when calling uploadManyFiles. * */ const DEFAULT_PARALLEL_UPLOAD_LIMIT = 5; /** * Default number of concurrently executing promises to use when calling downloadManyFiles. * */ const DEFAULT_PARALLEL_DOWNLOAD_LIMIT = 5; /** * Default number of concurrently executing promises to use when calling downloadFileInChunks. * */ const DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT = 5; /** * The minimum size threshold in bytes at which to apply a chunked download strategy when calling downloadFileInChunks. * */ const DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD = 32 * 1024 * 1024; /** * The chunk size in bytes to use when calling downloadFileInChunks. * */ const DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; /** * The chunk size in bytes to use when calling uploadFileInChunks. * */ const UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE = 32 * 1024 * 1024; /** * Default number of concurrently executing promises to use when calling uploadFileInChunks. * */ const DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT = 5; const EMPTY_REGEX = '(?:)'; /** * The `gccl-gcs-cmd` value for the `X-Goog-API-Client` header. * Example: `gccl-gcs-cmd/tm.upload_many` * * @see {@link GCCL_GCS_CMD}. * @see {@link GCCL_GCS_CMD_KEY}. */ const GCCL_GCS_CMD_FEATURE = { UPLOAD_MANY: 'tm.upload_many', DOWNLOAD_MANY: 'tm.download_many', UPLOAD_SHARDED: 'tm.upload_sharded', DOWNLOAD_SHARDED: 'tm.download_sharded', }; const defaultMultiPartGenerator = (bucket, fileName, uploadId, partsMap) => { return new XMLMultiPartUploadHelper(bucket, fileName, uploadId, partsMap); }; class MultiPartUploadError extends Error { constructor(message, uploadId, partsMap) { super(message); this.uploadId = uploadId; this.partsMap = partsMap; } } exports.MultiPartUploadError = MultiPartUploadError; /** * Class representing an implementation of MPU in the XML API. This class is not meant for public usage. * * @private * */ class XMLMultiPartUploadHelper { constructor(bucket, fileName, uploadId, partsMap) { _XMLMultiPartUploadHelper_instances.add(this); this.authClient = bucket.storage.authClient || new google_auth_library_1.GoogleAuth(); this.uploadId = uploadId || ''; this.bucket = bucket; this.fileName = fileName; this.baseUrl = `https://${bucket.name}.${new URL(this.bucket.storage.apiEndpoint).hostname}/${fileName}`; this.xmlBuilder = new fast_xml_parser_1.XMLBuilder({ arrayNodeName: 'Part' }); this.xmlParser = new fast_xml_parser_1.XMLParser(); this.partsMap = partsMap || new Map(); this.retryOptions = { retries: this.bucket.storage.retryOptions.maxRetries, factor: this.bucket.storage.retryOptions.retryDelayMultiplier, maxTimeout: this.bucket.storage.retryOptions.maxRetryDelay * 1000, maxRetryTime: this.bucket.storage.retryOptions.totalTimeout * 1000, }; } /** * Initiates a multipart upload (MPU) to the XML API and stores the resultant upload id. * * @returns {Promise} */ async initiateUpload(headers = {}) { const url = `${this.baseUrl}?uploads`; return (0, async_retry_1.default)(async (bail) => { try { const res = await this.authClient.request({ headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this, headers), method: 'POST', url, }); if (res.data && res.data.error) { throw res.data.error; } const parsedXML = this.xmlParser.parse(res.data); this.uploadId = parsedXML.InitiateMultipartUploadResult.UploadId; } catch (e) { __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); } }, this.retryOptions); } /** * Uploads the provided chunk of data to the XML API using the previously created upload id. * * @param {number} partNumber the sequence number of this chunk. * @param {Buffer} chunk the chunk of data to be uploaded. * @param {string | false} validation whether or not to include the md5 hash in the headers to cause the server * to validate the chunk was not corrupted. * @returns {Promise} */ async uploadPart(partNumber, chunk, validation) { const url = `${this.baseUrl}?partNumber=${partNumber}&uploadId=${this.uploadId}`; let headers = __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this); if (validation === 'md5') { const hash = (0, crypto_1.createHash)('md5').update(chunk).digest('base64'); headers = { 'Content-MD5': hash, }; } return (0, async_retry_1.default)(async (bail) => { try { const res = await this.authClient.request({ url, method: 'PUT', body: chunk, headers, }); if (res.data && res.data.error) { throw res.data.error; } this.partsMap.set(partNumber, res.headers['etag']); } catch (e) { __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); } }, this.retryOptions); } /** * Sends the final request of the MPU to tell GCS the upload is now complete. * * @returns {Promise} */ async completeUpload() { const url = `${this.baseUrl}?uploadId=${this.uploadId}`; const sortedMap = new Map([...this.partsMap.entries()].sort((a, b) => a[0] - b[0])); const parts = []; for (const entry of sortedMap.entries()) { parts.push({ PartNumber: entry[0], ETag: entry[1] }); } const body = `${this.xmlBuilder.build(parts)}`; return (0, async_retry_1.default)(async (bail) => { try { const res = await this.authClient.request({ headers: __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_setGoogApiClientHeaders).call(this), url, method: 'POST', body, }); if (res.data && res.data.error) { throw res.data.error; } return res; } catch (e) { __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); return; } }, this.retryOptions); } /** * Aborts an multipart upload that is in progress. Once aborted, any parts in the process of being uploaded fail, * and future requests using the upload ID fail. * * @returns {Promise} */ async abortUpload() { const url = `${this.baseUrl}?uploadId=${this.uploadId}`; return (0, async_retry_1.default)(async (bail) => { try { const res = await this.authClient.request({ url, method: 'DELETE', }); if (res.data && res.data.error) { throw res.data.error; } } catch (e) { __classPrivateFieldGet(this, _XMLMultiPartUploadHelper_instances, "m", _XMLMultiPartUploadHelper_handleErrorResponse).call(this, e, bail); return; } }, this.retryOptions); } } _XMLMultiPartUploadHelper_instances = new WeakSet(), _XMLMultiPartUploadHelper_setGoogApiClientHeaders = function _XMLMultiPartUploadHelper_setGoogApiClientHeaders(headers = {}) { let headerFound = false; let userAgentFound = false; for (const [key, value] of Object.entries(headers)) { if (key.toLocaleLowerCase().trim() === 'x-goog-api-client') { headerFound = true; // Prepend command feature to value, if not already there if (!value.includes(GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED)) { headers[key] = `${value} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; } } else if (key.toLocaleLowerCase().trim() === 'user-agent') { userAgentFound = true; } } // If the header isn't present, add it if (!headerFound) { headers['x-goog-api-client'] = `${(0, util_js_2.getRuntimeTrackingString)()} gccl/${packageJson.version} gccl-gcs-cmd/${GCCL_GCS_CMD_FEATURE.UPLOAD_SHARDED}`; } // If the User-Agent isn't present, add it if (!userAgentFound) { headers['User-Agent'] = (0, util_js_2.getUserAgentString)(); } return headers; }, _XMLMultiPartUploadHelper_handleErrorResponse = function _XMLMultiPartUploadHelper_handleErrorResponse(err, bail) { if (this.bucket.storage.retryOptions.autoRetry && this.bucket.storage.retryOptions.retryableErrorFn(err)) { throw err; } else { bail(err); } }; /** * Create a TransferManager object to perform parallel transfer operations on a Cloud Storage bucket. * * @class * @hideconstructor * * @param {Bucket} bucket A {@link Bucket} instance * */ class TransferManager { constructor(bucket) { this.bucket = bucket; } /** * @typedef {object} UploadManyFilesOptions * @property {number} [concurrencyLimit] The number of concurrently executing promises * to use when uploading the files. * @property {Function} [customDestinationBuilder] A fuction that will take the current path of a local file * and return a string representing a custom path to be used to upload the file to GCS. * @property {boolean} [skipIfExists] Do not upload the file if it already exists in * the bucket. This will set the precondition ifGenerationMatch = 0. * @property {string} [prefix] A prefix to append to all of the uploaded files. * @property {object} [passthroughOptions] {@link UploadOptions} Options to be passed through * to each individual upload operation. * */ /** * Upload multiple files in parallel to the bucket. This is a convenience method * that utilizes {@link Bucket#upload} to perform the upload. * * @param {array | string} [filePathsOrDirectory] An array of fully qualified paths to the files or a directory name. * If a directory name is provided, the directory will be recursively walked and all files will be added to the upload list. * to be uploaded to the bucket * @param {UploadManyFilesOptions} [options] Configuration options. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const transferManager = new TransferManager(bucket); * * //- * // Upload multiple files in parallel. * //- * const response = await transferManager.uploadManyFiles(['/local/path/file1.txt, 'local/path/file2.txt']); * // Your bucket now contains: * // - "local/path/file1.txt" (with the contents of '/local/path/file1.txt') * // - "local/path/file2.txt" (with the contents of '/local/path/file2.txt') * const response = await transferManager.uploadManyFiles('/local/directory'); * // Your bucket will now contain all files contained in '/local/directory' maintaining the subdirectory structure. * ``` * */ async uploadManyFiles(filePathsOrDirectory, options = {}) { var _a; if (options.skipIfExists && ((_a = options.passthroughOptions) === null || _a === void 0 ? void 0 : _a.preconditionOpts)) { options.passthroughOptions.preconditionOpts.ifGenerationMatch = 0; } else if (options.skipIfExists && options.passthroughOptions === undefined) { options.passthroughOptions = { preconditionOpts: { ifGenerationMatch: 0, }, }; } const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_UPLOAD_LIMIT); const promises = []; let allPaths = []; if (!Array.isArray(filePathsOrDirectory)) { for await (const curPath of this.getPathsFromDirectory(filePathsOrDirectory)) { allPaths.push(curPath); } } else { allPaths = filePathsOrDirectory; } for (const filePath of allPaths) { const stat = await fs_1.promises.lstat(filePath); if (stat.isDirectory()) { continue; } const passThroughOptionsCopy = { ...options.passthroughOptions, [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.UPLOAD_MANY, }; passThroughOptionsCopy.destination = options.customDestinationBuilder ? options.customDestinationBuilder(filePath, options) : filePath.split(path.sep).join(path.posix.sep); if (options.prefix) { passThroughOptionsCopy.destination = path.posix.join(...options.prefix.split(path.sep), passThroughOptionsCopy.destination); } promises.push(limit(() => this.bucket.upload(filePath, passThroughOptionsCopy))); } return Promise.all(promises); } /** * @typedef {object} DownloadManyFilesOptions * @property {number} [concurrencyLimit] The number of concurrently executing promises * to use when downloading the files. * @property {string} [prefix] A prefix to append to all of the downloaded files. * @property {string} [stripPrefix] A prefix to remove from all of the downloaded files. * @property {object} [passthroughOptions] {@link DownloadOptions} Options to be passed through * to each individual download operation. * @property {boolean} [skipIfExists] Do not download the file if it already exists in * the destination. * */ /** * Download multiple files in parallel to the local filesystem. This is a convenience method * that utilizes {@link File#download} to perform the download. * * @param {array | string} [filesOrFolder] An array of file name strings or file objects to be downloaded. If * a string is provided this will be treated as a GCS prefix and all files with that prefix will be downloaded. * @param {DownloadManyFilesOptions} [options] Configuration options. Setting options.prefix or options.stripPrefix * or options.passthroughOptions.destination will cause the downloaded files to be written to the file system * instead of being returned as a buffer. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const transferManager = new TransferManager(bucket); * * //- * // Download multiple files in parallel. * //- * const response = await transferManager.downloadManyFiles(['file1.txt', 'file2.txt']); * // The following files have been downloaded: * // - "file1.txt" (with the contents from my-bucket.file1.txt) * // - "file2.txt" (with the contents from my-bucket.file2.txt) * const response = await transferManager.downloadManyFiles([bucket.File('file1.txt'), bucket.File('file2.txt')]); * // The following files have been downloaded: * // - "file1.txt" (with the contents from my-bucket.file1.txt) * // - "file2.txt" (with the contents from my-bucket.file2.txt) * const response = await transferManager.downloadManyFiles('test-folder'); * // All files with GCS prefix of 'test-folder' have been downloaded. * ``` * */ async downloadManyFiles(filesOrFolder, options = {}) { const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_DOWNLOAD_LIMIT); const promises = []; let files = []; if (!Array.isArray(filesOrFolder)) { const directoryFiles = await this.bucket.getFiles({ prefix: filesOrFolder, }); files = directoryFiles[0]; } else { files = filesOrFolder.map(curFile => { if (typeof curFile === 'string') { return this.bucket.file(curFile); } return curFile; }); } const stripRegexString = options.stripPrefix ? `^${options.stripPrefix}` : EMPTY_REGEX; const regex = new RegExp(stripRegexString, 'g'); for (const file of files) { const passThroughOptionsCopy = { ...options.passthroughOptions, [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_MANY, }; if (options.prefix || passThroughOptionsCopy.destination) { passThroughOptionsCopy.destination = path.join(options.prefix || '', passThroughOptionsCopy.destination || '', file.name); } if (options.stripPrefix) { passThroughOptionsCopy.destination = file.name.replace(regex, ''); } if (options.skipIfExists && (0, fs_1.existsSync)(passThroughOptionsCopy.destination || '')) { continue; } promises.push(limit(async () => { const destination = passThroughOptionsCopy.destination; if (destination && destination.endsWith(path.sep)) { await fs_1.promises.mkdir(destination, { recursive: true }); return Promise.resolve([ Buffer.alloc(0), ]); } return file.download(passThroughOptionsCopy); })); } return Promise.all(promises); } /** * @typedef {object} DownloadFileInChunksOptions * @property {number} [concurrencyLimit] The number of concurrently executing promises * to use when downloading the file. * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be downloaded. * @property {string | boolean} [validation] Whether or not to perform a CRC32C validation check when download is complete. * @property {boolean} [noReturnData] Whether or not to return the downloaded data. A `true` value here would be useful for files with a size that will not fit into memory. * */ /** * Download a large file in chunks utilizing parallel download operations. This is a convenience method * that utilizes {@link File#download} to perform the download. * * @param {File | string} fileOrName {@link File} to download. * @param {DownloadFileInChunksOptions} [options] Configuration options. * @returns {Promise} * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const transferManager = new TransferManager(bucket); * * //- * // Download a large file in chunks utilizing parallel operations. * //- * const response = await transferManager.downloadFileInChunks(bucket.file('large-file.txt'); * // Your local directory now contains: * // - "large-file.txt" (with the contents from my-bucket.large-file.txt) * ``` * */ async downloadFileInChunks(fileOrName, options = {}) { let chunkSize = options.chunkSizeBytes || DOWNLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; let limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_DOWNLOAD_LIMIT); const noReturnData = Boolean(options.noReturnData); const promises = []; const file = typeof fileOrName === 'string' ? this.bucket.file(fileOrName) : fileOrName; const fileInfo = await file.get(); const size = parseInt(fileInfo[0].metadata.size.toString()); // If the file size does not meet the threshold download it as a single chunk. if (size < DOWNLOAD_IN_CHUNKS_FILE_SIZE_THRESHOLD) { limit = (0, p_limit_1.default)(1); chunkSize = size; } let start = 0; const filePath = options.destination || path.basename(file.name); const fileToWrite = await fs_1.promises.open(filePath, 'w'); while (start < size) { const chunkStart = start; let chunkEnd = start + chunkSize - 1; chunkEnd = chunkEnd > size ? size : chunkEnd; promises.push(limit(async () => { const resp = await file.download({ start: chunkStart, end: chunkEnd, [util_js_1.GCCL_GCS_CMD_KEY]: GCCL_GCS_CMD_FEATURE.DOWNLOAD_SHARDED, }); const result = await fileToWrite.write(resp[0], 0, resp[0].length, chunkStart); if (noReturnData) return; return result.buffer; })); start += chunkSize; } let chunks; try { chunks = await Promise.all(promises); } finally { await fileToWrite.close(); } if (options.validation === 'crc32c' && fileInfo[0].metadata.crc32c) { const downloadedCrc32C = await crc32c_js_1.CRC32C.fromFile(filePath); if (!downloadedCrc32C.validate(fileInfo[0].metadata.crc32c)) { const mismatchError = new file_js_1.RequestError(file_js_1.FileExceptionMessages.DOWNLOAD_MISMATCH); mismatchError.code = 'CONTENT_DOWNLOAD_MISMATCH'; throw mismatchError; } } if (noReturnData) return; return [Buffer.concat(chunks, size)]; } /** * @typedef {object} UploadFileInChunksOptions * @property {number} [concurrencyLimit] The number of concurrently executing promises * to use when uploading the file. * @property {number} [chunkSizeBytes] The size in bytes of each chunk to be uploaded. * @property {string} [uploadName] Name of the file when saving to GCS. If ommitted the name is taken from the file path. * @property {number} [maxQueueSize] The number of chunks to be uploaded to hold in memory concurrently. If not specified * defaults to the specified concurrency limit. * @property {string} [uploadId] If specified attempts to resume a previous upload. * @property {Map} [partsMap] If specified alongside uploadId, attempts to resume a previous upload from the last chunk * specified in partsMap * @property {object} [headers] headers to be sent when initiating the multipart upload. * See {@link https://cloud.google.com/storage/docs/xml-api/post-object-multipart#request_headers| Request Headers: Initiate a Multipart Upload} * @property {boolean} [autoAbortFailure] boolean to indicate if an in progress upload session will be automatically aborted upon failure. If not set, * failures will be automatically aborted. * */ /** * Upload a large file in chunks utilizing parallel upload opertions. If the upload fails, an uploadId and * map containing all the successfully uploaded parts will be returned to the caller. These arguments can be used to * resume the upload. * * @param {string} [filePath] The path of the file to be uploaded * @param {UploadFileInChunksOptions} [options] Configuration options. * @param {MultiPartHelperGenerator} [generator] A function that will return a type that implements the MPU interface. Most users will not need to use this. * @returns {Promise} If successful a promise resolving to void, otherwise a error containing the message, uploadid, and parts map. * * @example * ``` * const {Storage} = require('@google-cloud/storage'); * const storage = new Storage(); * const bucket = storage.bucket('my-bucket'); * const transferManager = new TransferManager(bucket); * * //- * // Upload a large file in chunks utilizing parallel operations. * //- * const response = await transferManager.uploadFileInChunks('large-file.txt'); * // Your bucket now contains: * // - "large-file.txt" * ``` * * */ async uploadFileInChunks(filePath, options = {}, generator = defaultMultiPartGenerator) { const chunkSize = options.chunkSizeBytes || UPLOAD_IN_CHUNKS_DEFAULT_CHUNK_SIZE; const limit = (0, p_limit_1.default)(options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT); const maxQueueSize = options.maxQueueSize || options.concurrencyLimit || DEFAULT_PARALLEL_CHUNKED_UPLOAD_LIMIT; const fileName = options.uploadName || path.basename(filePath); const mpuHelper = generator(this.bucket, fileName, options.uploadId, options.partsMap); let partNumber = 1; let promises = []; try { if (options.uploadId === undefined) { await mpuHelper.initiateUpload(options.headers); } const startOrResumptionByte = mpuHelper.partsMap.size * chunkSize; const readStream = (0, fs_1.createReadStream)(filePath, { highWaterMark: chunkSize, start: startOrResumptionByte, }); // p-limit only limits the number of running promises. We do not want to hold an entire // large file in memory at once so promises acts a queue that will hold only maxQueueSize in memory. for await (const curChunk of readStream) { if (promises.length >= maxQueueSize) { await Promise.all(promises); promises = []; } promises.push(limit(() => mpuHelper.uploadPart(partNumber++, curChunk, options.validation))); } await Promise.all(promises); return await mpuHelper.completeUpload(); } catch (e) { if ((options.autoAbortFailure === undefined || options.autoAbortFailure) && mpuHelper.uploadId) { try { await mpuHelper.abortUpload(); return; } catch (e) { throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); } } throw new MultiPartUploadError(e.message, mpuHelper.uploadId, mpuHelper.partsMap); } } async *getPathsFromDirectory(directory) { const filesAndSubdirectories = await fs_1.promises.readdir(directory, { withFileTypes: true, }); for (const curFileOrDirectory of filesAndSubdirectories) { const fullPath = path.join(directory, curFileOrDirectory.name); curFileOrDirectory.isDirectory() ? yield* this.getPathsFromDirectory(fullPath) : yield fullPath; } } } exports.TransferManager = TransferManager; /***/ }), /***/ 74557: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; // Copyright 2019 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. var __createBinding = this && this.__createBinding || (Object.create ? function (o, m, k, k2) { if (k2 === undefined) k2 = k; var desc = Object.getOwnPropertyDescriptor(m, k); if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { desc = { enumerable: true, get: function () { return m[k]; } }; } Object.defineProperty(o, k2, desc); } : function (o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; }); var __setModuleDefault = this && this.__setModuleDefault || (Object.create ? function (o, v) { Object.defineProperty(o, "default", { enumerable: true, value: v }); } : function (o, v) { o["default"] = v; }); var __importStar = this && this.__importStar || function () { var ownKeys = function (o) { ownKeys = Object.getOwnPropertyNames || function (o) { var ar = []; for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; return ar; }; return ownKeys(o); }; return function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); __setModuleDefault(result, mod); return result; }; }(); Object.defineProperty(exports, "__esModule", ({ value: true })); exports.PassThroughShim = void 0; exports.normalize = normalize; exports.objectEntries = objectEntries; exports.fixedEncodeURIComponent = fixedEncodeURIComponent; exports.encodeURI = encodeURI; exports.qsStringify = qsStringify; exports.objectKeyToLowercase = objectKeyToLowercase; exports.unicodeJSONStringify = unicodeJSONStringify; exports.convertObjKeysToSnakeCase = convertObjKeysToSnakeCase; exports.formatAsUTCISO = formatAsUTCISO; exports.getRuntimeTrackingString = getRuntimeTrackingString; exports.getUserAgentString = getUserAgentString; exports.getDirName = getDirName; exports.getModuleFormat = getModuleFormat; const path = __importStar(__nccwpck_require__(16928)); const querystring = __importStar(__nccwpck_require__(83480)); const stream_1 = __nccwpck_require__(2203); const url = __importStar(__nccwpck_require__(87016)); // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore const package_json_helper_cjs_1 = __nccwpck_require__(41969); // Done to avoid a problem with mangling of identifiers when using esModuleInterop const fileURLToPath = url.fileURLToPath; const isEsm = false; function normalize(optionsOrCallback, cb) { const options = typeof optionsOrCallback === 'object' ? optionsOrCallback : {}; const callback = typeof optionsOrCallback === 'function' ? optionsOrCallback : cb; return { options, callback }; } /** * Flatten an object into an Array of arrays, [[key, value], ..]. * Implements Object.entries() for Node.js <8 * @internal */ function objectEntries(obj) { return Object.keys(obj).map(key => [key, obj[key]]); } /** * Encode `str` with encodeURIComponent, plus these * reserved characters: `! * ' ( )`. * * See {@link https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/encodeURIComponent| MDN: fixedEncodeURIComponent} * * @param {string} str The URI component to encode. * @return {string} The encoded string. */ function fixedEncodeURIComponent(str) { return encodeURIComponent(str).replace(/[!'()*]/g, c => '%' + c.charCodeAt(0).toString(16).toUpperCase()); } /** * URI encode `uri` for generating signed URLs, using fixedEncodeURIComponent. * * Encode every byte except `A-Z a-Z 0-9 ~ - . _`. * * @param {string} uri The URI to encode. * @param [boolean=false] encodeSlash If `true`, the "/" character is not encoded. * @return {string} The encoded string. */ function encodeURI(uri, encodeSlash) { // Split the string by `/`, and conditionally rejoin them with either // %2F if encodeSlash is `true`, or '/' if `false`. return uri.split('/').map(fixedEncodeURIComponent).join(encodeSlash ? '%2F' : '/'); } /** * Serialize an object to a URL query string using util.encodeURI(uri, true). * @param {string} url The object to serialize. * @return {string} Serialized string. */ function qsStringify(qs) { return querystring.stringify(qs, '&', '=', { encodeURIComponent: component => encodeURI(component, true) }); } function objectKeyToLowercase(object) { const newObj = {}; for (let key of Object.keys(object)) { const value = object[key]; key = key.toLowerCase(); newObj[key] = value; } return newObj; } /** * JSON encode str, with unicode \u+ representation. * @param {object} obj The object to encode. * @return {string} Serialized string. */ function unicodeJSONStringify(obj) { return JSON.stringify(obj).replace(/[\u0080-\uFFFF]/g, char => '\\u' + ('0000' + char.charCodeAt(0).toString(16)).slice(-4)); } /** * Converts the given objects keys to snake_case * @param {object} obj object to convert keys to snake case. * @returns {object} object with keys converted to snake case. */ function convertObjKeysToSnakeCase(obj) { if (obj instanceof Date || obj instanceof RegExp) { return obj; } if (Array.isArray(obj)) { return obj.map(convertObjKeysToSnakeCase); } if (obj instanceof Object) { return Object.keys(obj).reduce((acc, cur) => { const s = cur[0].toLocaleLowerCase() + cur.slice(1).replace(/([A-Z]+)/g, (match, p1) => { return `_${p1.toLowerCase()}`; }); acc[s] = convertObjKeysToSnakeCase(obj[cur]); return acc; }, Object()); } return obj; } /** * Formats the provided date object as a UTC ISO string. * @param {Date} dateTimeToFormat date object to be formatted. * @param {boolean} includeTime flag to include hours, minutes, seconds in output. * @param {string} dateDelimiter delimiter between date components. * @param {string} timeDelimiter delimiter between time components. * @returns {string} UTC ISO format of provided date obect. */ function formatAsUTCISO(dateTimeToFormat, includeTime = false, dateDelimiter = '', timeDelimiter = '') { const year = dateTimeToFormat.getUTCFullYear(); const month = dateTimeToFormat.getUTCMonth() + 1; const day = dateTimeToFormat.getUTCDate(); const hour = dateTimeToFormat.getUTCHours(); const minute = dateTimeToFormat.getUTCMinutes(); const second = dateTimeToFormat.getUTCSeconds(); let resultString = `${year.toString().padStart(4, '0')}${dateDelimiter}${month.toString().padStart(2, '0')}${dateDelimiter}${day.toString().padStart(2, '0')}`; if (includeTime) { resultString = `${resultString}T${hour.toString().padStart(2, '0')}${timeDelimiter}${minute.toString().padStart(2, '0')}${timeDelimiter}${second.toString().padStart(2, '0')}Z`; } return resultString; } /** * Examines the runtime environment and returns the appropriate tracking string. * @returns {string} metrics tracking string based on the current runtime environment. */ function getRuntimeTrackingString() { if ( // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore globalThis.Deno && // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore globalThis.Deno.version && // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore globalThis.Deno.version.deno) { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore return `gl-deno/${globalThis.Deno.version.deno}`; } else { return `gl-node/${process.versions.node}`; } } /** * Looks at package.json and creates the user-agent string to be applied to request headers. * @returns {string} user agent string. */ function getUserAgentString() { const pkg = (0, package_json_helper_cjs_1.getPackageJSON)(); const hyphenatedPackageName = pkg.name.replace('@google-cloud', 'gcloud-node') // For legacy purposes. .replace('/', '-'); // For UA spec-compliance purposes. return hyphenatedPackageName + '/' + pkg.version; } function getDirName() { let dirToUse = ''; try { dirToUse = __dirname; } catch (e) { // eslint-disable-next-line @typescript-eslint/ban-ts-comment // @ts-ignore dirToUse = __dirname; } return dirToUse; } function getModuleFormat() { return isEsm ? 'ESM' : 'CJS'; } class PassThroughShim extends stream_1.PassThrough { constructor() { super(...arguments); this.shouldEmitReading = true; this.shouldEmitWriting = true; } _read(size) { if (this.shouldEmitReading) { this.emit('reading'); this.shouldEmitReading = false; } super._read(size); } _write(chunk, encoding, callback) { if (this.shouldEmitWriting) { this.emit('writing'); this.shouldEmitWriting = false; } // Per the nodejs documention, callback must be invoked on the next tick process.nextTick(() => { super._write(chunk, encoding, callback); }); } _final(callback) { // If the stream is empty (i.e. empty file) final will be invoked before _read / _write // and we should still emit the proper events. if (this.shouldEmitReading) { this.emit('reading'); this.shouldEmitReading = false; } if (this.shouldEmitWriting) { this.emit('writing'); this.shouldEmitWriting = false; } callback(null); } } exports.PassThroughShim = PassThroughShim; /***/ }), /***/ 13660: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; var __assign = (this && this.__assign) || function () { __assign = Object.assign || function(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; return __assign.apply(this, arguments); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.encode = encode; exports.decodeEntity = decodeEntity; exports.decode = decode; var named_references_js_1 = __nccwpck_require__(56000); var numeric_unicode_map_js_1 = __nccwpck_require__(53438); var surrogate_pairs_js_1 = __nccwpck_require__(69718); var allNamedReferences = __assign(__assign({}, named_references_js_1.namedReferences), { all: named_references_js_1.namedReferences.html5 }); var encodeRegExps = { specialChars: /[<>'"&]/g, nonAscii: /[<>'"&\u0080-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, nonAsciiPrintable: /[<>'"&\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, nonAsciiPrintableOnly: /[\x01-\x08\x11-\x15\x17-\x1F\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g, extensive: /[\x01-\x0c\x0e-\x1f\x21-\x2c\x2e-\x2f\x3a-\x40\x5b-\x60\x7b-\x7d\x7f-\uD7FF\uE000-\uFFFF\uDC00-\uDFFF]|[\uD800-\uDBFF][\uDC00-\uDFFF]?/g }; var defaultEncodeOptions = { mode: 'specialChars', level: 'all', numeric: 'decimal' }; /** Encodes all the necessary (specified by `level`) characters in the text */ function encode(text, _a) { var _b = _a === void 0 ? defaultEncodeOptions : _a, _c = _b.mode, mode = _c === void 0 ? 'specialChars' : _c, _d = _b.numeric, numeric = _d === void 0 ? 'decimal' : _d, _e = _b.level, level = _e === void 0 ? 'all' : _e; if (!text) { return ''; } var encodeRegExp = encodeRegExps[mode]; var references = allNamedReferences[level].characters; var isHex = numeric === 'hexadecimal'; return String.prototype.replace.call(text, encodeRegExp, function (input) { var result = references[input]; if (!result) { var code = input.length > 1 ? (0, surrogate_pairs_js_1.getCodePoint)(input, 0) : input.charCodeAt(0); result = (isHex ? '&#x' + code.toString(16) : '&#' + code) + ';'; } return result; }); } var defaultDecodeOptions = { scope: 'body', level: 'all' }; var strict = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);/g; var attribute = /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+)[;=]?/g; var baseDecodeRegExps = { xml: { strict: strict, attribute: attribute, body: named_references_js_1.bodyRegExps.xml }, html4: { strict: strict, attribute: attribute, body: named_references_js_1.bodyRegExps.html4 }, html5: { strict: strict, attribute: attribute, body: named_references_js_1.bodyRegExps.html5 } }; var decodeRegExps = __assign(__assign({}, baseDecodeRegExps), { all: baseDecodeRegExps.html5 }); var fromCharCode = String.fromCharCode; var outOfBoundsChar = fromCharCode(65533); var defaultDecodeEntityOptions = { level: 'all' }; function getDecodedEntity(entity, references, isAttribute, isStrict) { var decodeResult = entity; var decodeEntityLastChar = entity[entity.length - 1]; if (isAttribute && decodeEntityLastChar === '=') { decodeResult = entity; } else if (isStrict && decodeEntityLastChar !== ';') { decodeResult = entity; } else { var decodeResultByReference = references[entity]; if (decodeResultByReference) { decodeResult = decodeResultByReference; } else if (entity[0] === '&' && entity[1] === '#') { var decodeSecondChar = entity[2]; var decodeCode = decodeSecondChar == 'x' || decodeSecondChar == 'X' ? parseInt(entity.substr(3), 16) : parseInt(entity.substr(2)); decodeResult = decodeCode >= 0x10ffff ? outOfBoundsChar : decodeCode > 65535 ? (0, surrogate_pairs_js_1.fromCodePoint)(decodeCode) : fromCharCode(numeric_unicode_map_js_1.numericUnicodeMap[decodeCode] || decodeCode); } } return decodeResult; } /** Decodes a single entity */ function decodeEntity(entity, _a) { var _b = _a === void 0 ? defaultDecodeEntityOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c; if (!entity) { return ''; } return getDecodedEntity(entity, allNamedReferences[level].entities, false, false); } /** Decodes all entities in the text */ function decode(text, _a) { var _b = _a === void 0 ? defaultDecodeOptions : _a, _c = _b.level, level = _c === void 0 ? 'all' : _c, _d = _b.scope, scope = _d === void 0 ? level === 'xml' ? 'strict' : 'body' : _d; if (!text) { return ''; } var decodeRegExp = decodeRegExps[level][scope]; var references = allNamedReferences[level].entities; var isAttribute = scope === 'attribute'; var isStrict = scope === 'strict'; return text.replace(decodeRegExp, function (entity) { return getDecodedEntity(entity, references, isAttribute, isStrict); }); } //# sourceMappingURL=index.js.map /***/ }), /***/ 56000: /***/ (function(__unused_webpack_module, exports) { "use strict"; var __assign = (this && this.__assign) || function () { __assign = Object.assign || function(t) { for (var s, i = 1, n = arguments.length; i < n; i++) { s = arguments[i]; for (var p in s) if (Object.prototype.hasOwnProperty.call(s, p)) t[p] = s[p]; } return t; }; return __assign.apply(this, arguments); }; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.namedReferences = exports.bodyRegExps = void 0; // This file is autogenerated by tools/process-named-references.ts var pairDivider = "~"; var blockDivider = "~~"; function generateNamedReferences(input, prev) { var entities = {}; var characters = {}; var blocks = input.split(blockDivider); var isOptionalBlock = false; for (var i = 0; blocks.length > i; i++) { var entries = blocks[i].split(pairDivider); for (var j = 0; j < entries.length; j += 2) { var entity = entries[j]; var character = entries[j + 1]; var fullEntity = '&' + entity + ';'; entities[fullEntity] = character; if (isOptionalBlock) { entities['&' + entity] = character; } characters[character] = fullEntity; } isOptionalBlock = true; } return prev ? { entities: __assign(__assign({}, entities), prev.entities), characters: __assign(__assign({}, characters), prev.characters) } : { entities: entities, characters: characters }; } exports.bodyRegExps = { xml: /&(?:#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, html4: /∉|&(?:nbsp|iexcl|cent|pound|curren|yen|brvbar|sect|uml|copy|ordf|laquo|not|shy|reg|macr|deg|plusmn|sup2|sup3|acute|micro|para|middot|cedil|sup1|ordm|raquo|frac14|frac12|frac34|iquest|Agrave|Aacute|Acirc|Atilde|Auml|Aring|AElig|Ccedil|Egrave|Eacute|Ecirc|Euml|Igrave|Iacute|Icirc|Iuml|ETH|Ntilde|Ograve|Oacute|Ocirc|Otilde|Ouml|times|Oslash|Ugrave|Uacute|Ucirc|Uuml|Yacute|THORN|szlig|agrave|aacute|acirc|atilde|auml|aring|aelig|ccedil|egrave|eacute|ecirc|euml|igrave|iacute|icirc|iuml|eth|ntilde|ograve|oacute|ocirc|otilde|ouml|divide|oslash|ugrave|uacute|ucirc|uuml|yacute|thorn|yuml|quot|amp|lt|gt|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g, html5: /·|℗|⋇|⪧|⩺|⋗|⦕|⩼|⪆|⥸|⋗|⋛|⪌|≷|≳|⪦|⩹|⋖|⋋|⋉|⥶|⩻|⦖|◃|⊴|◂|∉|⋹̸|⋵̸|∉|⋷|⋶|∌|∌|⋾|⋽|∥|⊠|⨱|⨰|&(?:AElig|AMP|Aacute|Acirc|Agrave|Aring|Atilde|Auml|COPY|Ccedil|ETH|Eacute|Ecirc|Egrave|Euml|GT|Iacute|Icirc|Igrave|Iuml|LT|Ntilde|Oacute|Ocirc|Ograve|Oslash|Otilde|Ouml|QUOT|REG|THORN|Uacute|Ucirc|Ugrave|Uuml|Yacute|aacute|acirc|acute|aelig|agrave|amp|aring|atilde|auml|brvbar|ccedil|cedil|cent|copy|curren|deg|divide|eacute|ecirc|egrave|eth|euml|frac12|frac14|frac34|gt|iacute|icirc|iexcl|igrave|iquest|iuml|laquo|lt|macr|micro|middot|nbsp|not|ntilde|oacute|ocirc|ograve|ordf|ordm|oslash|otilde|ouml|para|plusmn|pound|quot|raquo|reg|sect|shy|sup1|sup2|sup3|szlig|thorn|times|uacute|ucirc|ugrave|uml|uuml|yacute|yen|yuml|#\d+|#[xX][\da-fA-F]+|[0-9a-zA-Z]+);?/g }; exports.namedReferences = {}; exports.namedReferences.xml = generateNamedReferences("lt~<~gt~>~quot~\"~apos~'~amp~&"); exports.namedReferences.html4 = generateNamedReferences("apos~'~OElig~Œ~oelig~œ~Scaron~Š~scaron~š~Yuml~Ÿ~circ~ˆ~tilde~˜~ensp~ ~emsp~ ~thinsp~ ~zwnj~‌~zwj~‍~lrm~‎~rlm~‏~ndash~–~mdash~—~lsquo~‘~rsquo~’~sbquo~‚~ldquo~“~rdquo~”~bdquo~„~dagger~†~Dagger~‡~permil~‰~lsaquo~‹~rsaquo~›~euro~€~fnof~ƒ~Alpha~Α~Beta~Β~Gamma~Γ~Delta~Δ~Epsilon~Ε~Zeta~Ζ~Eta~Η~Theta~Θ~Iota~Ι~Kappa~Κ~Lambda~Λ~Mu~Μ~Nu~Ν~Xi~Ξ~Omicron~Ο~Pi~Π~Rho~Ρ~Sigma~Σ~Tau~Τ~Upsilon~Υ~Phi~Φ~Chi~Χ~Psi~Ψ~Omega~Ω~alpha~α~beta~β~gamma~γ~delta~δ~epsilon~ε~zeta~ζ~eta~η~theta~θ~iota~ι~kappa~κ~lambda~λ~mu~μ~nu~ν~xi~ξ~omicron~ο~pi~π~rho~ρ~sigmaf~ς~sigma~σ~tau~τ~upsilon~υ~phi~φ~chi~χ~psi~ψ~omega~ω~thetasym~ϑ~upsih~ϒ~piv~ϖ~bull~•~hellip~…~prime~′~Prime~″~oline~‾~frasl~⁄~weierp~℘~image~ℑ~real~ℜ~trade~™~alefsym~ℵ~larr~←~uarr~↑~rarr~→~darr~↓~harr~↔~crarr~↵~lArr~⇐~uArr~⇑~rArr~⇒~dArr~⇓~hArr~⇔~forall~∀~part~∂~exist~∃~empty~∅~nabla~∇~isin~∈~notin~∉~ni~∋~prod~∏~sum~∑~minus~−~lowast~∗~radic~√~prop~∝~infin~∞~ang~∠~and~∧~or~∨~cap~∩~cup~∪~int~∫~there4~∴~sim~∼~cong~≅~asymp~≈~ne~≠~equiv~≡~le~≤~ge~≥~sub~⊂~sup~⊃~nsub~⊄~sube~⊆~supe~⊇~oplus~⊕~otimes~⊗~perp~⊥~sdot~⋅~lceil~⌈~rceil~⌉~lfloor~⌊~rfloor~⌋~lang~〈~rang~〉~loz~◊~spades~♠~clubs~♣~hearts~♥~diams~♦~~nbsp~ ~iexcl~¡~cent~¢~pound~£~curren~¤~yen~¥~brvbar~¦~sect~§~uml~¨~copy~©~ordf~ª~laquo~«~not~¬~shy~­~reg~®~macr~¯~deg~°~plusmn~±~sup2~²~sup3~³~acute~´~micro~µ~para~¶~middot~·~cedil~¸~sup1~¹~ordm~º~raquo~»~frac14~¼~frac12~½~frac34~¾~iquest~¿~Agrave~À~Aacute~Á~Acirc~Â~Atilde~Ã~Auml~Ä~Aring~Å~AElig~Æ~Ccedil~Ç~Egrave~È~Eacute~É~Ecirc~Ê~Euml~Ë~Igrave~Ì~Iacute~Í~Icirc~Î~Iuml~Ï~ETH~Ð~Ntilde~Ñ~Ograve~Ò~Oacute~Ó~Ocirc~Ô~Otilde~Õ~Ouml~Ö~times~×~Oslash~Ø~Ugrave~Ù~Uacute~Ú~Ucirc~Û~Uuml~Ü~Yacute~Ý~THORN~Þ~szlig~ß~agrave~à~aacute~á~acirc~â~atilde~ã~auml~ä~aring~å~aelig~æ~ccedil~ç~egrave~è~eacute~é~ecirc~ê~euml~ë~igrave~ì~iacute~í~icirc~î~iuml~ï~eth~ð~ntilde~ñ~ograve~ò~oacute~ó~ocirc~ô~otilde~õ~ouml~ö~divide~÷~oslash~ø~ugrave~ù~uacute~ú~ucirc~û~uuml~ü~yacute~ý~thorn~þ~yuml~ÿ~quot~\"~amp~&~lt~<~gt~>"); exports.namedReferences.html5 = generateNamedReferences("Abreve~Ă~Acy~А~Afr~𝔄~Amacr~Ā~And~⩓~Aogon~Ą~Aopf~𝔸~ApplyFunction~⁡~Ascr~𝒜~Assign~≔~Backslash~∖~Barv~⫧~Barwed~⌆~Bcy~Б~Because~∵~Bernoullis~ℬ~Bfr~𝔅~Bopf~𝔹~Breve~˘~Bscr~ℬ~Bumpeq~≎~CHcy~Ч~Cacute~Ć~Cap~⋒~CapitalDifferentialD~ⅅ~Cayleys~ℭ~Ccaron~Č~Ccirc~Ĉ~Cconint~∰~Cdot~Ċ~Cedilla~¸~CenterDot~·~Cfr~ℭ~CircleDot~⊙~CircleMinus~⊖~CirclePlus~⊕~CircleTimes~⊗~ClockwiseContourIntegral~∲~CloseCurlyDoubleQuote~”~CloseCurlyQuote~’~Colon~∷~Colone~⩴~Congruent~≡~Conint~∯~ContourIntegral~∮~Copf~ℂ~Coproduct~∐~CounterClockwiseContourIntegral~∳~Cross~⨯~Cscr~𝒞~Cup~⋓~CupCap~≍~DD~ⅅ~DDotrahd~⤑~DJcy~Ђ~DScy~Ѕ~DZcy~Џ~Darr~↡~Dashv~⫤~Dcaron~Ď~Dcy~Д~Del~∇~Dfr~𝔇~DiacriticalAcute~´~DiacriticalDot~˙~DiacriticalDoubleAcute~˝~DiacriticalGrave~`~DiacriticalTilde~˜~Diamond~⋄~DifferentialD~ⅆ~Dopf~𝔻~Dot~¨~DotDot~⃜~DotEqual~≐~DoubleContourIntegral~∯~DoubleDot~¨~DoubleDownArrow~⇓~DoubleLeftArrow~⇐~DoubleLeftRightArrow~⇔~DoubleLeftTee~⫤~DoubleLongLeftArrow~⟸~DoubleLongLeftRightArrow~⟺~DoubleLongRightArrow~⟹~DoubleRightArrow~⇒~DoubleRightTee~⊨~DoubleUpArrow~⇑~DoubleUpDownArrow~⇕~DoubleVerticalBar~∥~DownArrow~↓~DownArrowBar~⤓~DownArrowUpArrow~⇵~DownBreve~̑~DownLeftRightVector~⥐~DownLeftTeeVector~⥞~DownLeftVector~↽~DownLeftVectorBar~⥖~DownRightTeeVector~⥟~DownRightVector~⇁~DownRightVectorBar~⥗~DownTee~⊤~DownTeeArrow~↧~Downarrow~⇓~Dscr~𝒟~Dstrok~Đ~ENG~Ŋ~Ecaron~Ě~Ecy~Э~Edot~Ė~Efr~𝔈~Element~∈~Emacr~Ē~EmptySmallSquare~◻~EmptyVerySmallSquare~▫~Eogon~Ę~Eopf~𝔼~Equal~⩵~EqualTilde~≂~Equilibrium~⇌~Escr~ℰ~Esim~⩳~Exists~∃~ExponentialE~ⅇ~Fcy~Ф~Ffr~𝔉~FilledSmallSquare~◼~FilledVerySmallSquare~▪~Fopf~𝔽~ForAll~∀~Fouriertrf~ℱ~Fscr~ℱ~GJcy~Ѓ~Gammad~Ϝ~Gbreve~Ğ~Gcedil~Ģ~Gcirc~Ĝ~Gcy~Г~Gdot~Ġ~Gfr~𝔊~Gg~⋙~Gopf~𝔾~GreaterEqual~≥~GreaterEqualLess~⋛~GreaterFullEqual~≧~GreaterGreater~⪢~GreaterLess~≷~GreaterSlantEqual~⩾~GreaterTilde~≳~Gscr~𝒢~Gt~≫~HARDcy~Ъ~Hacek~ˇ~Hat~^~Hcirc~Ĥ~Hfr~ℌ~HilbertSpace~ℋ~Hopf~ℍ~HorizontalLine~─~Hscr~ℋ~Hstrok~Ħ~HumpDownHump~≎~HumpEqual~≏~IEcy~Е~IJlig~IJ~IOcy~Ё~Icy~И~Idot~İ~Ifr~ℑ~Im~ℑ~Imacr~Ī~ImaginaryI~ⅈ~Implies~⇒~Int~∬~Integral~∫~Intersection~⋂~InvisibleComma~⁣~InvisibleTimes~⁢~Iogon~Į~Iopf~𝕀~Iscr~ℐ~Itilde~Ĩ~Iukcy~І~Jcirc~Ĵ~Jcy~Й~Jfr~𝔍~Jopf~𝕁~Jscr~𝒥~Jsercy~Ј~Jukcy~Є~KHcy~Х~KJcy~Ќ~Kcedil~Ķ~Kcy~К~Kfr~𝔎~Kopf~𝕂~Kscr~𝒦~LJcy~Љ~Lacute~Ĺ~Lang~⟪~Laplacetrf~ℒ~Larr~↞~Lcaron~Ľ~Lcedil~Ļ~Lcy~Л~LeftAngleBracket~⟨~LeftArrow~←~LeftArrowBar~⇤~LeftArrowRightArrow~⇆~LeftCeiling~⌈~LeftDoubleBracket~⟦~LeftDownTeeVector~⥡~LeftDownVector~⇃~LeftDownVectorBar~⥙~LeftFloor~⌊~LeftRightArrow~↔~LeftRightVector~⥎~LeftTee~⊣~LeftTeeArrow~↤~LeftTeeVector~⥚~LeftTriangle~⊲~LeftTriangleBar~⧏~LeftTriangleEqual~⊴~LeftUpDownVector~⥑~LeftUpTeeVector~⥠~LeftUpVector~↿~LeftUpVectorBar~⥘~LeftVector~↼~LeftVectorBar~⥒~Leftarrow~⇐~Leftrightarrow~⇔~LessEqualGreater~⋚~LessFullEqual~≦~LessGreater~≶~LessLess~⪡~LessSlantEqual~⩽~LessTilde~≲~Lfr~𝔏~Ll~⋘~Lleftarrow~⇚~Lmidot~Ŀ~LongLeftArrow~⟵~LongLeftRightArrow~⟷~LongRightArrow~⟶~Longleftarrow~⟸~Longleftrightarrow~⟺~Longrightarrow~⟹~Lopf~𝕃~LowerLeftArrow~↙~LowerRightArrow~↘~Lscr~ℒ~Lsh~↰~Lstrok~Ł~Lt~≪~Map~⤅~Mcy~М~MediumSpace~ ~Mellintrf~ℳ~Mfr~𝔐~MinusPlus~∓~Mopf~𝕄~Mscr~ℳ~NJcy~Њ~Nacute~Ń~Ncaron~Ň~Ncedil~Ņ~Ncy~Н~NegativeMediumSpace~​~NegativeThickSpace~​~NegativeThinSpace~​~NegativeVeryThinSpace~​~NestedGreaterGreater~≫~NestedLessLess~≪~NewLine~\n~Nfr~𝔑~NoBreak~⁠~NonBreakingSpace~ ~Nopf~ℕ~Not~⫬~NotCongruent~≢~NotCupCap~≭~NotDoubleVerticalBar~∦~NotElement~∉~NotEqual~≠~NotEqualTilde~≂̸~NotExists~∄~NotGreater~≯~NotGreaterEqual~≱~NotGreaterFullEqual~≧̸~NotGreaterGreater~≫̸~NotGreaterLess~≹~NotGreaterSlantEqual~⩾̸~NotGreaterTilde~≵~NotHumpDownHump~≎̸~NotHumpEqual~≏̸~NotLeftTriangle~⋪~NotLeftTriangleBar~⧏̸~NotLeftTriangleEqual~⋬~NotLess~≮~NotLessEqual~≰~NotLessGreater~≸~NotLessLess~≪̸~NotLessSlantEqual~⩽̸~NotLessTilde~≴~NotNestedGreaterGreater~⪢̸~NotNestedLessLess~⪡̸~NotPrecedes~⊀~NotPrecedesEqual~⪯̸~NotPrecedesSlantEqual~⋠~NotReverseElement~∌~NotRightTriangle~⋫~NotRightTriangleBar~⧐̸~NotRightTriangleEqual~⋭~NotSquareSubset~⊏̸~NotSquareSubsetEqual~⋢~NotSquareSuperset~⊐̸~NotSquareSupersetEqual~⋣~NotSubset~⊂⃒~NotSubsetEqual~⊈~NotSucceeds~⊁~NotSucceedsEqual~⪰̸~NotSucceedsSlantEqual~⋡~NotSucceedsTilde~≿̸~NotSuperset~⊃⃒~NotSupersetEqual~⊉~NotTilde~≁~NotTildeEqual~≄~NotTildeFullEqual~≇~NotTildeTilde~≉~NotVerticalBar~∤~Nscr~𝒩~Ocy~О~Odblac~Ő~Ofr~𝔒~Omacr~Ō~Oopf~𝕆~OpenCurlyDoubleQuote~“~OpenCurlyQuote~‘~Or~⩔~Oscr~𝒪~Otimes~⨷~OverBar~‾~OverBrace~⏞~OverBracket~⎴~OverParenthesis~⏜~PartialD~∂~Pcy~П~Pfr~𝔓~PlusMinus~±~Poincareplane~ℌ~Popf~ℙ~Pr~⪻~Precedes~≺~PrecedesEqual~⪯~PrecedesSlantEqual~≼~PrecedesTilde~≾~Product~∏~Proportion~∷~Proportional~∝~Pscr~𝒫~Qfr~𝔔~Qopf~ℚ~Qscr~𝒬~RBarr~⤐~Racute~Ŕ~Rang~⟫~Rarr~↠~Rarrtl~⤖~Rcaron~Ř~Rcedil~Ŗ~Rcy~Р~Re~ℜ~ReverseElement~∋~ReverseEquilibrium~⇋~ReverseUpEquilibrium~⥯~Rfr~ℜ~RightAngleBracket~⟩~RightArrow~→~RightArrowBar~⇥~RightArrowLeftArrow~⇄~RightCeiling~⌉~RightDoubleBracket~⟧~RightDownTeeVector~⥝~RightDownVector~⇂~RightDownVectorBar~⥕~RightFloor~⌋~RightTee~⊢~RightTeeArrow~↦~RightTeeVector~⥛~RightTriangle~⊳~RightTriangleBar~⧐~RightTriangleEqual~⊵~RightUpDownVector~⥏~RightUpTeeVector~⥜~RightUpVector~↾~RightUpVectorBar~⥔~RightVector~⇀~RightVectorBar~⥓~Rightarrow~⇒~Ropf~ℝ~RoundImplies~⥰~Rrightarrow~⇛~Rscr~ℛ~Rsh~↱~RuleDelayed~⧴~SHCHcy~Щ~SHcy~Ш~SOFTcy~Ь~Sacute~Ś~Sc~⪼~Scedil~Ş~Scirc~Ŝ~Scy~С~Sfr~𝔖~ShortDownArrow~↓~ShortLeftArrow~←~ShortRightArrow~→~ShortUpArrow~↑~SmallCircle~∘~Sopf~𝕊~Sqrt~√~Square~□~SquareIntersection~⊓~SquareSubset~⊏~SquareSubsetEqual~⊑~SquareSuperset~⊐~SquareSupersetEqual~⊒~SquareUnion~⊔~Sscr~𝒮~Star~⋆~Sub~⋐~Subset~⋐~SubsetEqual~⊆~Succeeds~≻~SucceedsEqual~⪰~SucceedsSlantEqual~≽~SucceedsTilde~≿~SuchThat~∋~Sum~∑~Sup~⋑~Superset~⊃~SupersetEqual~⊇~Supset~⋑~TRADE~™~TSHcy~Ћ~TScy~Ц~Tab~\t~Tcaron~Ť~Tcedil~Ţ~Tcy~Т~Tfr~𝔗~Therefore~∴~ThickSpace~  ~ThinSpace~ ~Tilde~∼~TildeEqual~≃~TildeFullEqual~≅~TildeTilde~≈~Topf~𝕋~TripleDot~⃛~Tscr~𝒯~Tstrok~Ŧ~Uarr~↟~Uarrocir~⥉~Ubrcy~Ў~Ubreve~Ŭ~Ucy~У~Udblac~Ű~Ufr~𝔘~Umacr~Ū~UnderBar~_~UnderBrace~⏟~UnderBracket~⎵~UnderParenthesis~⏝~Union~⋃~UnionPlus~⊎~Uogon~Ų~Uopf~𝕌~UpArrow~↑~UpArrowBar~⤒~UpArrowDownArrow~⇅~UpDownArrow~↕~UpEquilibrium~⥮~UpTee~⊥~UpTeeArrow~↥~Uparrow~⇑~Updownarrow~⇕~UpperLeftArrow~↖~UpperRightArrow~↗~Upsi~ϒ~Uring~Ů~Uscr~𝒰~Utilde~Ũ~VDash~⊫~Vbar~⫫~Vcy~В~Vdash~⊩~Vdashl~⫦~Vee~⋁~Verbar~‖~Vert~‖~VerticalBar~∣~VerticalLine~|~VerticalSeparator~❘~VerticalTilde~≀~VeryThinSpace~ ~Vfr~𝔙~Vopf~𝕍~Vscr~𝒱~Vvdash~⊪~Wcirc~Ŵ~Wedge~⋀~Wfr~𝔚~Wopf~𝕎~Wscr~𝒲~Xfr~𝔛~Xopf~𝕏~Xscr~𝒳~YAcy~Я~YIcy~Ї~YUcy~Ю~Ycirc~Ŷ~Ycy~Ы~Yfr~𝔜~Yopf~𝕐~Yscr~𝒴~ZHcy~Ж~Zacute~Ź~Zcaron~Ž~Zcy~З~Zdot~Ż~ZeroWidthSpace~​~Zfr~ℨ~Zopf~ℤ~Zscr~𝒵~abreve~ă~ac~∾~acE~∾̳~acd~∿~acy~а~af~⁡~afr~𝔞~aleph~ℵ~amacr~ā~amalg~⨿~andand~⩕~andd~⩜~andslope~⩘~andv~⩚~ange~⦤~angle~∠~angmsd~∡~angmsdaa~⦨~angmsdab~⦩~angmsdac~⦪~angmsdad~⦫~angmsdae~⦬~angmsdaf~⦭~angmsdag~⦮~angmsdah~⦯~angrt~∟~angrtvb~⊾~angrtvbd~⦝~angsph~∢~angst~Å~angzarr~⍼~aogon~ą~aopf~𝕒~ap~≈~apE~⩰~apacir~⩯~ape~≊~apid~≋~approx~≈~approxeq~≊~ascr~𝒶~ast~*~asympeq~≍~awconint~∳~awint~⨑~bNot~⫭~backcong~≌~backepsilon~϶~backprime~‵~backsim~∽~backsimeq~⋍~barvee~⊽~barwed~⌅~barwedge~⌅~bbrk~⎵~bbrktbrk~⎶~bcong~≌~bcy~б~becaus~∵~because~∵~bemptyv~⦰~bepsi~϶~bernou~ℬ~beth~ℶ~between~≬~bfr~𝔟~bigcap~⋂~bigcirc~◯~bigcup~⋃~bigodot~⨀~bigoplus~⨁~bigotimes~⨂~bigsqcup~⨆~bigstar~★~bigtriangledown~▽~bigtriangleup~△~biguplus~⨄~bigvee~⋁~bigwedge~⋀~bkarow~⤍~blacklozenge~⧫~blacksquare~▪~blacktriangle~▴~blacktriangledown~▾~blacktriangleleft~◂~blacktriangleright~▸~blank~␣~blk12~▒~blk14~░~blk34~▓~block~█~bne~=⃥~bnequiv~≡⃥~bnot~⌐~bopf~𝕓~bot~⊥~bottom~⊥~bowtie~⋈~boxDL~╗~boxDR~╔~boxDl~╖~boxDr~╓~boxH~═~boxHD~╦~boxHU~╩~boxHd~╤~boxHu~╧~boxUL~╝~boxUR~╚~boxUl~╜~boxUr~╙~boxV~║~boxVH~╬~boxVL~╣~boxVR~╠~boxVh~╫~boxVl~╢~boxVr~╟~boxbox~⧉~boxdL~╕~boxdR~╒~boxdl~┐~boxdr~┌~boxh~─~boxhD~╥~boxhU~╨~boxhd~┬~boxhu~┴~boxminus~⊟~boxplus~⊞~boxtimes~⊠~boxuL~╛~boxuR~╘~boxul~┘~boxur~└~boxv~│~boxvH~╪~boxvL~╡~boxvR~╞~boxvh~┼~boxvl~┤~boxvr~├~bprime~‵~breve~˘~bscr~𝒷~bsemi~⁏~bsim~∽~bsime~⋍~bsol~\\~bsolb~⧅~bsolhsub~⟈~bullet~•~bump~≎~bumpE~⪮~bumpe~≏~bumpeq~≏~cacute~ć~capand~⩄~capbrcup~⩉~capcap~⩋~capcup~⩇~capdot~⩀~caps~∩︀~caret~⁁~caron~ˇ~ccaps~⩍~ccaron~č~ccirc~ĉ~ccups~⩌~ccupssm~⩐~cdot~ċ~cemptyv~⦲~centerdot~·~cfr~𝔠~chcy~ч~check~✓~checkmark~✓~cir~○~cirE~⧃~circeq~≗~circlearrowleft~↺~circlearrowright~↻~circledR~®~circledS~Ⓢ~circledast~⊛~circledcirc~⊚~circleddash~⊝~cire~≗~cirfnint~⨐~cirmid~⫯~cirscir~⧂~clubsuit~♣~colon~:~colone~≔~coloneq~≔~comma~,~commat~@~comp~∁~compfn~∘~complement~∁~complexes~ℂ~congdot~⩭~conint~∮~copf~𝕔~coprod~∐~copysr~℗~cross~✗~cscr~𝒸~csub~⫏~csube~⫑~csup~⫐~csupe~⫒~ctdot~⋯~cudarrl~⤸~cudarrr~⤵~cuepr~⋞~cuesc~⋟~cularr~↶~cularrp~⤽~cupbrcap~⩈~cupcap~⩆~cupcup~⩊~cupdot~⊍~cupor~⩅~cups~∪︀~curarr~↷~curarrm~⤼~curlyeqprec~⋞~curlyeqsucc~⋟~curlyvee~⋎~curlywedge~⋏~curvearrowleft~↶~curvearrowright~↷~cuvee~⋎~cuwed~⋏~cwconint~∲~cwint~∱~cylcty~⌭~dHar~⥥~daleth~ℸ~dash~‐~dashv~⊣~dbkarow~⤏~dblac~˝~dcaron~ď~dcy~д~dd~ⅆ~ddagger~‡~ddarr~⇊~ddotseq~⩷~demptyv~⦱~dfisht~⥿~dfr~𝔡~dharl~⇃~dharr~⇂~diam~⋄~diamond~⋄~diamondsuit~♦~die~¨~digamma~ϝ~disin~⋲~div~÷~divideontimes~⋇~divonx~⋇~djcy~ђ~dlcorn~⌞~dlcrop~⌍~dollar~$~dopf~𝕕~dot~˙~doteq~≐~doteqdot~≑~dotminus~∸~dotplus~∔~dotsquare~⊡~doublebarwedge~⌆~downarrow~↓~downdownarrows~⇊~downharpoonleft~⇃~downharpoonright~⇂~drbkarow~⤐~drcorn~⌟~drcrop~⌌~dscr~𝒹~dscy~ѕ~dsol~⧶~dstrok~đ~dtdot~⋱~dtri~▿~dtrif~▾~duarr~⇵~duhar~⥯~dwangle~⦦~dzcy~џ~dzigrarr~⟿~eDDot~⩷~eDot~≑~easter~⩮~ecaron~ě~ecir~≖~ecolon~≕~ecy~э~edot~ė~ee~ⅇ~efDot~≒~efr~𝔢~eg~⪚~egs~⪖~egsdot~⪘~el~⪙~elinters~⏧~ell~ℓ~els~⪕~elsdot~⪗~emacr~ē~emptyset~∅~emptyv~∅~emsp13~ ~emsp14~ ~eng~ŋ~eogon~ę~eopf~𝕖~epar~⋕~eparsl~⧣~eplus~⩱~epsi~ε~epsiv~ϵ~eqcirc~≖~eqcolon~≕~eqsim~≂~eqslantgtr~⪖~eqslantless~⪕~equals~=~equest~≟~equivDD~⩸~eqvparsl~⧥~erDot~≓~erarr~⥱~escr~ℯ~esdot~≐~esim~≂~excl~!~expectation~ℰ~exponentiale~ⅇ~fallingdotseq~≒~fcy~ф~female~♀~ffilig~ffi~fflig~ff~ffllig~ffl~ffr~𝔣~filig~fi~fjlig~fj~flat~♭~fllig~fl~fltns~▱~fopf~𝕗~fork~⋔~forkv~⫙~fpartint~⨍~frac13~⅓~frac15~⅕~frac16~⅙~frac18~⅛~frac23~⅔~frac25~⅖~frac35~⅗~frac38~⅜~frac45~⅘~frac56~⅚~frac58~⅝~frac78~⅞~frown~⌢~fscr~𝒻~gE~≧~gEl~⪌~gacute~ǵ~gammad~ϝ~gap~⪆~gbreve~ğ~gcirc~ĝ~gcy~г~gdot~ġ~gel~⋛~geq~≥~geqq~≧~geqslant~⩾~ges~⩾~gescc~⪩~gesdot~⪀~gesdoto~⪂~gesdotol~⪄~gesl~⋛︀~gesles~⪔~gfr~𝔤~gg~≫~ggg~⋙~gimel~ℷ~gjcy~ѓ~gl~≷~glE~⪒~gla~⪥~glj~⪤~gnE~≩~gnap~⪊~gnapprox~⪊~gne~⪈~gneq~⪈~gneqq~≩~gnsim~⋧~gopf~𝕘~grave~`~gscr~ℊ~gsim~≳~gsime~⪎~gsiml~⪐~gtcc~⪧~gtcir~⩺~gtdot~⋗~gtlPar~⦕~gtquest~⩼~gtrapprox~⪆~gtrarr~⥸~gtrdot~⋗~gtreqless~⋛~gtreqqless~⪌~gtrless~≷~gtrsim~≳~gvertneqq~≩︀~gvnE~≩︀~hairsp~ ~half~½~hamilt~ℋ~hardcy~ъ~harrcir~⥈~harrw~↭~hbar~ℏ~hcirc~ĥ~heartsuit~♥~hercon~⊹~hfr~𝔥~hksearow~⤥~hkswarow~⤦~hoarr~⇿~homtht~∻~hookleftarrow~↩~hookrightarrow~↪~hopf~𝕙~horbar~―~hscr~𝒽~hslash~ℏ~hstrok~ħ~hybull~⁃~hyphen~‐~ic~⁣~icy~и~iecy~е~iff~⇔~ifr~𝔦~ii~ⅈ~iiiint~⨌~iiint~∭~iinfin~⧜~iiota~℩~ijlig~ij~imacr~ī~imagline~ℐ~imagpart~ℑ~imath~ı~imof~⊷~imped~Ƶ~in~∈~incare~℅~infintie~⧝~inodot~ı~intcal~⊺~integers~ℤ~intercal~⊺~intlarhk~⨗~intprod~⨼~iocy~ё~iogon~į~iopf~𝕚~iprod~⨼~iscr~𝒾~isinE~⋹~isindot~⋵~isins~⋴~isinsv~⋳~isinv~∈~it~⁢~itilde~ĩ~iukcy~і~jcirc~ĵ~jcy~й~jfr~𝔧~jmath~ȷ~jopf~𝕛~jscr~𝒿~jsercy~ј~jukcy~є~kappav~ϰ~kcedil~ķ~kcy~к~kfr~𝔨~kgreen~ĸ~khcy~х~kjcy~ќ~kopf~𝕜~kscr~𝓀~lAarr~⇚~lAtail~⤛~lBarr~⤎~lE~≦~lEg~⪋~lHar~⥢~lacute~ĺ~laemptyv~⦴~lagran~ℒ~langd~⦑~langle~⟨~lap~⪅~larrb~⇤~larrbfs~⤟~larrfs~⤝~larrhk~↩~larrlp~↫~larrpl~⤹~larrsim~⥳~larrtl~↢~lat~⪫~latail~⤙~late~⪭~lates~⪭︀~lbarr~⤌~lbbrk~❲~lbrace~{~lbrack~[~lbrke~⦋~lbrksld~⦏~lbrkslu~⦍~lcaron~ľ~lcedil~ļ~lcub~{~lcy~л~ldca~⤶~ldquor~„~ldrdhar~⥧~ldrushar~⥋~ldsh~↲~leftarrow~←~leftarrowtail~↢~leftharpoondown~↽~leftharpoonup~↼~leftleftarrows~⇇~leftrightarrow~↔~leftrightarrows~⇆~leftrightharpoons~⇋~leftrightsquigarrow~↭~leftthreetimes~⋋~leg~⋚~leq~≤~leqq~≦~leqslant~⩽~les~⩽~lescc~⪨~lesdot~⩿~lesdoto~⪁~lesdotor~⪃~lesg~⋚︀~lesges~⪓~lessapprox~⪅~lessdot~⋖~lesseqgtr~⋚~lesseqqgtr~⪋~lessgtr~≶~lesssim~≲~lfisht~⥼~lfr~𝔩~lg~≶~lgE~⪑~lhard~↽~lharu~↼~lharul~⥪~lhblk~▄~ljcy~љ~ll~≪~llarr~⇇~llcorner~⌞~llhard~⥫~lltri~◺~lmidot~ŀ~lmoust~⎰~lmoustache~⎰~lnE~≨~lnap~⪉~lnapprox~⪉~lne~⪇~lneq~⪇~lneqq~≨~lnsim~⋦~loang~⟬~loarr~⇽~lobrk~⟦~longleftarrow~⟵~longleftrightarrow~⟷~longmapsto~⟼~longrightarrow~⟶~looparrowleft~↫~looparrowright~↬~lopar~⦅~lopf~𝕝~loplus~⨭~lotimes~⨴~lowbar~_~lozenge~◊~lozf~⧫~lpar~(~lparlt~⦓~lrarr~⇆~lrcorner~⌟~lrhar~⇋~lrhard~⥭~lrtri~⊿~lscr~𝓁~lsh~↰~lsim~≲~lsime~⪍~lsimg~⪏~lsqb~[~lsquor~‚~lstrok~ł~ltcc~⪦~ltcir~⩹~ltdot~⋖~lthree~⋋~ltimes~⋉~ltlarr~⥶~ltquest~⩻~ltrPar~⦖~ltri~◃~ltrie~⊴~ltrif~◂~lurdshar~⥊~luruhar~⥦~lvertneqq~≨︀~lvnE~≨︀~mDDot~∺~male~♂~malt~✠~maltese~✠~map~↦~mapsto~↦~mapstodown~↧~mapstoleft~↤~mapstoup~↥~marker~▮~mcomma~⨩~mcy~м~measuredangle~∡~mfr~𝔪~mho~℧~mid~∣~midast~*~midcir~⫰~minusb~⊟~minusd~∸~minusdu~⨪~mlcp~⫛~mldr~…~mnplus~∓~models~⊧~mopf~𝕞~mp~∓~mscr~𝓂~mstpos~∾~multimap~⊸~mumap~⊸~nGg~⋙̸~nGt~≫⃒~nGtv~≫̸~nLeftarrow~⇍~nLeftrightarrow~⇎~nLl~⋘̸~nLt~≪⃒~nLtv~≪̸~nRightarrow~⇏~nVDash~⊯~nVdash~⊮~nacute~ń~nang~∠⃒~nap~≉~napE~⩰̸~napid~≋̸~napos~ʼn~napprox~≉~natur~♮~natural~♮~naturals~ℕ~nbump~≎̸~nbumpe~≏̸~ncap~⩃~ncaron~ň~ncedil~ņ~ncong~≇~ncongdot~⩭̸~ncup~⩂~ncy~н~neArr~⇗~nearhk~⤤~nearr~↗~nearrow~↗~nedot~≐̸~nequiv~≢~nesear~⤨~nesim~≂̸~nexist~∄~nexists~∄~nfr~𝔫~ngE~≧̸~nge~≱~ngeq~≱~ngeqq~≧̸~ngeqslant~⩾̸~nges~⩾̸~ngsim~≵~ngt~≯~ngtr~≯~nhArr~⇎~nharr~↮~nhpar~⫲~nis~⋼~nisd~⋺~niv~∋~njcy~њ~nlArr~⇍~nlE~≦̸~nlarr~↚~nldr~‥~nle~≰~nleftarrow~↚~nleftrightarrow~↮~nleq~≰~nleqq~≦̸~nleqslant~⩽̸~nles~⩽̸~nless~≮~nlsim~≴~nlt~≮~nltri~⋪~nltrie~⋬~nmid~∤~nopf~𝕟~notinE~⋹̸~notindot~⋵̸~notinva~∉~notinvb~⋷~notinvc~⋶~notni~∌~notniva~∌~notnivb~⋾~notnivc~⋽~npar~∦~nparallel~∦~nparsl~⫽⃥~npart~∂̸~npolint~⨔~npr~⊀~nprcue~⋠~npre~⪯̸~nprec~⊀~npreceq~⪯̸~nrArr~⇏~nrarr~↛~nrarrc~⤳̸~nrarrw~↝̸~nrightarrow~↛~nrtri~⋫~nrtrie~⋭~nsc~⊁~nsccue~⋡~nsce~⪰̸~nscr~𝓃~nshortmid~∤~nshortparallel~∦~nsim~≁~nsime~≄~nsimeq~≄~nsmid~∤~nspar~∦~nsqsube~⋢~nsqsupe~⋣~nsubE~⫅̸~nsube~⊈~nsubset~⊂⃒~nsubseteq~⊈~nsubseteqq~⫅̸~nsucc~⊁~nsucceq~⪰̸~nsup~⊅~nsupE~⫆̸~nsupe~⊉~nsupset~⊃⃒~nsupseteq~⊉~nsupseteqq~⫆̸~ntgl~≹~ntlg~≸~ntriangleleft~⋪~ntrianglelefteq~⋬~ntriangleright~⋫~ntrianglerighteq~⋭~num~#~numero~№~numsp~ ~nvDash~⊭~nvHarr~⤄~nvap~≍⃒~nvdash~⊬~nvge~≥⃒~nvgt~>⃒~nvinfin~⧞~nvlArr~⤂~nvle~≤⃒~nvlt~<⃒~nvltrie~⊴⃒~nvrArr~⤃~nvrtrie~⊵⃒~nvsim~∼⃒~nwArr~⇖~nwarhk~⤣~nwarr~↖~nwarrow~↖~nwnear~⤧~oS~Ⓢ~oast~⊛~ocir~⊚~ocy~о~odash~⊝~odblac~ő~odiv~⨸~odot~⊙~odsold~⦼~ofcir~⦿~ofr~𝔬~ogon~˛~ogt~⧁~ohbar~⦵~ohm~Ω~oint~∮~olarr~↺~olcir~⦾~olcross~⦻~olt~⧀~omacr~ō~omid~⦶~ominus~⊖~oopf~𝕠~opar~⦷~operp~⦹~orarr~↻~ord~⩝~order~ℴ~orderof~ℴ~origof~⊶~oror~⩖~orslope~⩗~orv~⩛~oscr~ℴ~osol~⊘~otimesas~⨶~ovbar~⌽~par~∥~parallel~∥~parsim~⫳~parsl~⫽~pcy~п~percnt~%~period~.~pertenk~‱~pfr~𝔭~phiv~ϕ~phmmat~ℳ~phone~☎~pitchfork~⋔~planck~ℏ~planckh~ℎ~plankv~ℏ~plus~+~plusacir~⨣~plusb~⊞~pluscir~⨢~plusdo~∔~plusdu~⨥~pluse~⩲~plussim~⨦~plustwo~⨧~pm~±~pointint~⨕~popf~𝕡~pr~≺~prE~⪳~prap~⪷~prcue~≼~pre~⪯~prec~≺~precapprox~⪷~preccurlyeq~≼~preceq~⪯~precnapprox~⪹~precneqq~⪵~precnsim~⋨~precsim~≾~primes~ℙ~prnE~⪵~prnap~⪹~prnsim~⋨~profalar~⌮~profline~⌒~profsurf~⌓~propto~∝~prsim~≾~prurel~⊰~pscr~𝓅~puncsp~ ~qfr~𝔮~qint~⨌~qopf~𝕢~qprime~⁗~qscr~𝓆~quaternions~ℍ~quatint~⨖~quest~?~questeq~≟~rAarr~⇛~rAtail~⤜~rBarr~⤏~rHar~⥤~race~∽̱~racute~ŕ~raemptyv~⦳~rangd~⦒~range~⦥~rangle~⟩~rarrap~⥵~rarrb~⇥~rarrbfs~⤠~rarrc~⤳~rarrfs~⤞~rarrhk~↪~rarrlp~↬~rarrpl~⥅~rarrsim~⥴~rarrtl~↣~rarrw~↝~ratail~⤚~ratio~∶~rationals~ℚ~rbarr~⤍~rbbrk~❳~rbrace~}~rbrack~]~rbrke~⦌~rbrksld~⦎~rbrkslu~⦐~rcaron~ř~rcedil~ŗ~rcub~}~rcy~р~rdca~⤷~rdldhar~⥩~rdquor~”~rdsh~↳~realine~ℛ~realpart~ℜ~reals~ℝ~rect~▭~rfisht~⥽~rfr~𝔯~rhard~⇁~rharu~⇀~rharul~⥬~rhov~ϱ~rightarrow~→~rightarrowtail~↣~rightharpoondown~⇁~rightharpoonup~⇀~rightleftarrows~⇄~rightleftharpoons~⇌~rightrightarrows~⇉~rightsquigarrow~↝~rightthreetimes~⋌~ring~˚~risingdotseq~≓~rlarr~⇄~rlhar~⇌~rmoust~⎱~rmoustache~⎱~rnmid~⫮~roang~⟭~roarr~⇾~robrk~⟧~ropar~⦆~ropf~𝕣~roplus~⨮~rotimes~⨵~rpar~)~rpargt~⦔~rppolint~⨒~rrarr~⇉~rscr~𝓇~rsh~↱~rsqb~]~rsquor~’~rthree~⋌~rtimes~⋊~rtri~▹~rtrie~⊵~rtrif~▸~rtriltri~⧎~ruluhar~⥨~rx~℞~sacute~ś~sc~≻~scE~⪴~scap~⪸~sccue~≽~sce~⪰~scedil~ş~scirc~ŝ~scnE~⪶~scnap~⪺~scnsim~⋩~scpolint~⨓~scsim~≿~scy~с~sdotb~⊡~sdote~⩦~seArr~⇘~searhk~⤥~searr~↘~searrow~↘~semi~;~seswar~⤩~setminus~∖~setmn~∖~sext~✶~sfr~𝔰~sfrown~⌢~sharp~♯~shchcy~щ~shcy~ш~shortmid~∣~shortparallel~∥~sigmav~ς~simdot~⩪~sime~≃~simeq~≃~simg~⪞~simgE~⪠~siml~⪝~simlE~⪟~simne~≆~simplus~⨤~simrarr~⥲~slarr~←~smallsetminus~∖~smashp~⨳~smeparsl~⧤~smid~∣~smile~⌣~smt~⪪~smte~⪬~smtes~⪬︀~softcy~ь~sol~/~solb~⧄~solbar~⌿~sopf~𝕤~spadesuit~♠~spar~∥~sqcap~⊓~sqcaps~⊓︀~sqcup~⊔~sqcups~⊔︀~sqsub~⊏~sqsube~⊑~sqsubset~⊏~sqsubseteq~⊑~sqsup~⊐~sqsupe~⊒~sqsupset~⊐~sqsupseteq~⊒~squ~□~square~□~squarf~▪~squf~▪~srarr~→~sscr~𝓈~ssetmn~∖~ssmile~⌣~sstarf~⋆~star~☆~starf~★~straightepsilon~ϵ~straightphi~ϕ~strns~¯~subE~⫅~subdot~⪽~subedot~⫃~submult~⫁~subnE~⫋~subne~⊊~subplus~⪿~subrarr~⥹~subset~⊂~subseteq~⊆~subseteqq~⫅~subsetneq~⊊~subsetneqq~⫋~subsim~⫇~subsub~⫕~subsup~⫓~succ~≻~succapprox~⪸~succcurlyeq~≽~succeq~⪰~succnapprox~⪺~succneqq~⪶~succnsim~⋩~succsim~≿~sung~♪~supE~⫆~supdot~⪾~supdsub~⫘~supedot~⫄~suphsol~⟉~suphsub~⫗~suplarr~⥻~supmult~⫂~supnE~⫌~supne~⊋~supplus~⫀~supset~⊃~supseteq~⊇~supseteqq~⫆~supsetneq~⊋~supsetneqq~⫌~supsim~⫈~supsub~⫔~supsup~⫖~swArr~⇙~swarhk~⤦~swarr~↙~swarrow~↙~swnwar~⤪~target~⌖~tbrk~⎴~tcaron~ť~tcedil~ţ~tcy~т~tdot~⃛~telrec~⌕~tfr~𝔱~therefore~∴~thetav~ϑ~thickapprox~≈~thicksim~∼~thkap~≈~thksim~∼~timesb~⊠~timesbar~⨱~timesd~⨰~tint~∭~toea~⤨~top~⊤~topbot~⌶~topcir~⫱~topf~𝕥~topfork~⫚~tosa~⤩~tprime~‴~triangle~▵~triangledown~▿~triangleleft~◃~trianglelefteq~⊴~triangleq~≜~triangleright~▹~trianglerighteq~⊵~tridot~◬~trie~≜~triminus~⨺~triplus~⨹~trisb~⧍~tritime~⨻~trpezium~⏢~tscr~𝓉~tscy~ц~tshcy~ћ~tstrok~ŧ~twixt~≬~twoheadleftarrow~↞~twoheadrightarrow~↠~uHar~⥣~ubrcy~ў~ubreve~ŭ~ucy~у~udarr~⇅~udblac~ű~udhar~⥮~ufisht~⥾~ufr~𝔲~uharl~↿~uharr~↾~uhblk~▀~ulcorn~⌜~ulcorner~⌜~ulcrop~⌏~ultri~◸~umacr~ū~uogon~ų~uopf~𝕦~uparrow~↑~updownarrow~↕~upharpoonleft~↿~upharpoonright~↾~uplus~⊎~upsi~υ~upuparrows~⇈~urcorn~⌝~urcorner~⌝~urcrop~⌎~uring~ů~urtri~◹~uscr~𝓊~utdot~⋰~utilde~ũ~utri~▵~utrif~▴~uuarr~⇈~uwangle~⦧~vArr~⇕~vBar~⫨~vBarv~⫩~vDash~⊨~vangrt~⦜~varepsilon~ϵ~varkappa~ϰ~varnothing~∅~varphi~ϕ~varpi~ϖ~varpropto~∝~varr~↕~varrho~ϱ~varsigma~ς~varsubsetneq~⊊︀~varsubsetneqq~⫋︀~varsupsetneq~⊋︀~varsupsetneqq~⫌︀~vartheta~ϑ~vartriangleleft~⊲~vartriangleright~⊳~vcy~в~vdash~⊢~vee~∨~veebar~⊻~veeeq~≚~vellip~⋮~verbar~|~vert~|~vfr~𝔳~vltri~⊲~vnsub~⊂⃒~vnsup~⊃⃒~vopf~𝕧~vprop~∝~vrtri~⊳~vscr~𝓋~vsubnE~⫋︀~vsubne~⊊︀~vsupnE~⫌︀~vsupne~⊋︀~vzigzag~⦚~wcirc~ŵ~wedbar~⩟~wedge~∧~wedgeq~≙~wfr~𝔴~wopf~𝕨~wp~℘~wr~≀~wreath~≀~wscr~𝓌~xcap~⋂~xcirc~◯~xcup~⋃~xdtri~▽~xfr~𝔵~xhArr~⟺~xharr~⟷~xlArr~⟸~xlarr~⟵~xmap~⟼~xnis~⋻~xodot~⨀~xopf~𝕩~xoplus~⨁~xotime~⨂~xrArr~⟹~xrarr~⟶~xscr~𝓍~xsqcup~⨆~xuplus~⨄~xutri~△~xvee~⋁~xwedge~⋀~yacy~я~ycirc~ŷ~ycy~ы~yfr~𝔶~yicy~ї~yopf~𝕪~yscr~𝓎~yucy~ю~zacute~ź~zcaron~ž~zcy~з~zdot~ż~zeetrf~ℨ~zfr~𝔷~zhcy~ж~zigrarr~⇝~zopf~𝕫~zscr~𝓏~~AMP~&~COPY~©~GT~>~LT~<~QUOT~\"~REG~®", exports.namedReferences['html4']); //# sourceMappingURL=named-references.js.map /***/ }), /***/ 53438: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.numericUnicodeMap = void 0; exports.numericUnicodeMap = { 0: 65533, 128: 8364, 130: 8218, 131: 402, 132: 8222, 133: 8230, 134: 8224, 135: 8225, 136: 710, 137: 8240, 138: 352, 139: 8249, 140: 338, 142: 381, 145: 8216, 146: 8217, 147: 8220, 148: 8221, 149: 8226, 150: 8211, 151: 8212, 152: 732, 153: 8482, 154: 353, 155: 8250, 156: 339, 158: 382, 159: 376 }; //# sourceMappingURL=numeric-unicode-map.js.map /***/ }), /***/ 69718: /***/ ((__unused_webpack_module, exports) => { "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); exports.highSurrogateTo = exports.highSurrogateFrom = exports.getCodePoint = exports.fromCodePoint = void 0; exports.fromCodePoint = String.fromCodePoint || function (astralCodePoint) { return String.fromCharCode(Math.floor((astralCodePoint - 0x10000) / 0x400) + 0xd800, ((astralCodePoint - 0x10000) % 0x400) + 0xdc00); }; // @ts-expect-error - String.prototype.codePointAt might not exist in older node versions exports.getCodePoint = String.prototype.codePointAt ? function (input, position) { return input.codePointAt(position); } : function (input, position) { return (input.charCodeAt(position) - 0xd800) * 0x400 + input.charCodeAt(position + 1) - 0xdc00 + 0x10000; }; exports.highSurrogateFrom = 0xd800; exports.highSurrogateTo = 0xdbff; //# sourceMappingURL=surrogate-pairs.js.map /***/ }), /***/ 41969: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { // Copyright 2023 Google LLC // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. /* eslint-disable node/no-missing-require */ function getPackageJSON() { return __nccwpck_require__(72721); } exports.getPackageJSON = getPackageJSON; /***/ }), /***/ 87269: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; /*! Axios v1.10.0 Copyright (c) 2025 Matt Zabriskie and contributors */ const FormData$1 = __nccwpck_require__(45416); const crypto = __nccwpck_require__(76982); const url = __nccwpck_require__(87016); const proxyFromEnv = __nccwpck_require__(77777); const http = __nccwpck_require__(58611); const https = __nccwpck_require__(65692); const util = __nccwpck_require__(39023); const followRedirects = __nccwpck_require__(1573); const zlib = __nccwpck_require__(43106); const stream = __nccwpck_require__(2203); const events = __nccwpck_require__(24434); function _interopDefaultLegacy (e) { return e && typeof e === 'object' && 'default' in e ? e : { 'default': e }; } const FormData__default = /*#__PURE__*/_interopDefaultLegacy(FormData$1); const crypto__default = /*#__PURE__*/_interopDefaultLegacy(crypto); const url__default = /*#__PURE__*/_interopDefaultLegacy(url); const proxyFromEnv__default = /*#__PURE__*/_interopDefaultLegacy(proxyFromEnv); const http__default = /*#__PURE__*/_interopDefaultLegacy(http); const https__default = /*#__PURE__*/_interopDefaultLegacy(https); const util__default = /*#__PURE__*/_interopDefaultLegacy(util); const followRedirects__default = /*#__PURE__*/_interopDefaultLegacy(followRedirects); const zlib__default = /*#__PURE__*/_interopDefaultLegacy(zlib); const stream__default = /*#__PURE__*/_interopDefaultLegacy(stream); function bind(fn, thisArg) { return function wrap() { return fn.apply(thisArg, arguments); }; } // utils is a library of generic helper functions non-specific to axios const {toString} = Object.prototype; const {getPrototypeOf} = Object; const {iterator, toStringTag} = Symbol; const kindOf = (cache => thing => { const str = toString.call(thing); return cache[str] || (cache[str] = str.slice(8, -1).toLowerCase()); })(Object.create(null)); const kindOfTest = (type) => { type = type.toLowerCase(); return (thing) => kindOf(thing) === type }; const typeOfTest = type => thing => typeof thing === type; /** * Determine if a value is an Array * * @param {Object} val The value to test * * @returns {boolean} True if value is an Array, otherwise false */ const {isArray} = Array; /** * Determine if a value is undefined * * @param {*} val The value to test * * @returns {boolean} True if the value is undefined, otherwise false */ const isUndefined = typeOfTest('undefined'); /** * Determine if a value is a Buffer * * @param {*} val The value to test * * @returns {boolean} True if value is a Buffer, otherwise false */ function isBuffer(val) { return val !== null && !isUndefined(val) && val.constructor !== null && !isUndefined(val.constructor) && isFunction(val.constructor.isBuffer) && val.constructor.isBuffer(val); } /** * Determine if a value is an ArrayBuffer * * @param {*} val The value to test * * @returns {boolean} True if value is an ArrayBuffer, otherwise false */ const isArrayBuffer = kindOfTest('ArrayBuffer'); /** * Determine if a value is a view on an ArrayBuffer * * @param {*} val The value to test * * @returns {boolean} True if value is a view on an ArrayBuffer, otherwise false */ function isArrayBufferView(val) { let result; if ((typeof ArrayBuffer !== 'undefined') && (ArrayBuffer.isView)) { result = ArrayBuffer.isView(val); } else { result = (val) && (val.buffer) && (isArrayBuffer(val.buffer)); } return result; } /** * Determine if a value is a String * * @param {*} val The value to test * * @returns {boolean} True if value is a String, otherwise false */ const isString = typeOfTest('string'); /** * Determine if a value is a Function * * @param {*} val The value to test * @returns {boolean} True if value is a Function, otherwise false */ const isFunction = typeOfTest('function'); /** * Determine if a value is a Number * * @param {*} val The value to test * * @returns {boolean} True if value is a Number, otherwise false */ const isNumber = typeOfTest('number'); /** * Determine if a value is an Object * * @param {*} thing The value to test * * @returns {boolean} True if value is an Object, otherwise false */ const isObject = (thing) => thing !== null && typeof thing === 'object'; /** * Determine if a value is a Boolean * * @param {*} thing The value to test * @returns {boolean} True if value is a Boolean, otherwise false */ const isBoolean = thing => thing === true || thing === false; /** * Determine if a value is a plain Object * * @param {*} val The value to test * * @returns {boolean} True if value is a plain Object, otherwise false */ const isPlainObject = (val) => { if (kindOf(val) !== 'object') { return false; } const prototype = getPrototypeOf(val); return (prototype === null || prototype === Object.prototype || Object.getPrototypeOf(prototype) === null) && !(toStringTag in val) && !(iterator in val); }; /** * Determine if a value is a Date * * @param {*} val The value to test * * @returns {boolean} True if value is a Date, otherwise false */ const isDate = kindOfTest('Date'); /** * Determine if a value is a File * * @param {*} val The value to test * * @returns {boolean} True if value is a File, otherwise false */ const isFile = kindOfTest('File'); /** * Determine if a value is a Blob * * @param {*} val The value to test * * @returns {boolean} True if value is a Blob, otherwise false */ const isBlob = kindOfTest('Blob'); /** * Determine if a value is a FileList * * @param {*} val The value to test * * @returns {boolean} True if value is a File, otherwise false */ const isFileList = kindOfTest('FileList'); /** * Determine if a value is a Stream * * @param {*} val The value to test * * @returns {boolean} True if value is a Stream, otherwise false */ const isStream = (val) => isObject(val) && isFunction(val.pipe); /** * Determine if a value is a FormData * * @param {*} thing The value to test * * @returns {boolean} True if value is an FormData, otherwise false */ const isFormData = (thing) => { let kind; return thing && ( (typeof FormData === 'function' && thing instanceof FormData) || ( isFunction(thing.append) && ( (kind = kindOf(thing)) === 'formdata' || // detect form-data instance (kind === 'object' && isFunction(thing.toString) && thing.toString() === '[object FormData]') ) ) ) }; /** * Determine if a value is a URLSearchParams object * * @param {*} val The value to test * * @returns {boolean} True if value is a URLSearchParams object, otherwise false */ const isURLSearchParams = kindOfTest('URLSearchParams'); const [isReadableStream, isRequest, isResponse, isHeaders] = ['ReadableStream', 'Request', 'Response', 'Headers'].map(kindOfTest); /** * Trim excess whitespace off the beginning and end of a string * * @param {String} str The String to trim * * @returns {String} The String freed of excess whitespace */ const trim = (str) => str.trim ? str.trim() : str.replace(/^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g, ''); /** * Iterate over an Array or an Object invoking a function for each item. * * If `obj` is an Array callback will be called passing * the value, index, and complete array for each item. * * If 'obj' is an Object callback will be called passing * the value, key, and complete object for each property. * * @param {Object|Array} obj The object to iterate * @param {Function} fn The callback to invoke for each item * * @param {Boolean} [allOwnKeys = false] * @returns {any} */ function forEach(obj, fn, {allOwnKeys = false} = {}) { // Don't bother if no value provided if (obj === null || typeof obj === 'undefined') { return; } let i; let l; // Force an array if not already something iterable if (typeof obj !== 'object') { /*eslint no-param-reassign:0*/ obj = [obj]; } if (isArray(obj)) { // Iterate over array values for (i = 0, l = obj.length; i < l; i++) { fn.call(null, obj[i], i, obj); } } else { // Iterate over object keys const keys = allOwnKeys ? Object.getOwnPropertyNames(obj) : Object.keys(obj); const len = keys.length; let key; for (i = 0; i < len; i++) { key = keys[i]; fn.call(null, obj[key], key, obj); } } } function findKey(obj, key) { key = key.toLowerCase(); const keys = Object.keys(obj); let i = keys.length; let _key; while (i-- > 0) { _key = keys[i]; if (key === _key.toLowerCase()) { return _key; } } return null; } const _global = (() => { /*eslint no-undef:0*/ if (typeof globalThis !== "undefined") return globalThis; return typeof self !== "undefined" ? self : (typeof window !== 'undefined' ? window : global) })(); const isContextDefined = (context) => !isUndefined(context) && context !== _global; /** * Accepts varargs expecting each argument to be an object, then * immutably merges the properties of each object and returns result. * * When multiple objects contain the same key the later object in * the arguments list will take precedence. * * Example: * * ```js * var result = merge({foo: 123}, {foo: 456}); * console.log(result.foo); // outputs 456 * ``` * * @param {Object} obj1 Object to merge * * @returns {Object} Result of all merge properties */ function merge(/* obj1, obj2, obj3, ... */) { const {caseless} = isContextDefined(this) && this || {}; const result = {}; const assignValue = (val, key) => { const targetKey = caseless && findKey(result, key) || key; if (isPlainObject(result[targetKey]) && isPlainObject(val)) { result[targetKey] = merge(result[targetKey], val); } else if (isPlainObject(val)) { result[targetKey] = merge({}, val); } else if (isArray(val)) { result[targetKey] = val.slice(); } else { result[targetKey] = val; } }; for (let i = 0, l = arguments.length; i < l; i++) { arguments[i] && forEach(arguments[i], assignValue); } return result; } /** * Extends object a by mutably adding to it the properties of object b. * * @param {Object} a The object to be extended * @param {Object} b The object to copy properties from * @param {Object} thisArg The object to bind function to * * @param {Boolean} [allOwnKeys] * @returns {Object} The resulting value of object a */ const extend = (a, b, thisArg, {allOwnKeys}= {}) => { forEach(b, (val, key) => { if (thisArg && isFunction(val)) { a[key] = bind(val, thisArg); } else { a[key] = val; } }, {allOwnKeys}); return a; }; /** * Remove byte order marker. This catches EF BB BF (the UTF-8 BOM) * * @param {string} content with BOM * * @returns {string} content value without BOM */ const stripBOM = (content) => { if (content.charCodeAt(0) === 0xFEFF) { content = content.slice(1); } return content; }; /** * Inherit the prototype methods from one constructor into another * @param {function} constructor * @param {function} superConstructor * @param {object} [props] * @param {object} [descriptors] * * @returns {void} */ const inherits = (constructor, superConstructor, props, descriptors) => { constructor.prototype = Object.create(superConstructor.prototype, descriptors); constructor.prototype.constructor = constructor; Object.defineProperty(constructor, 'super', { value: superConstructor.prototype }); props && Object.assign(constructor.prototype, props); }; /** * Resolve object with deep prototype chain to a flat object * @param {Object} sourceObj source object * @param {Object} [destObj] * @param {Function|Boolean} [filter] * @param {Function} [propFilter] * * @returns {Object} */ const toFlatObject = (sourceObj, destObj, filter, propFilter) => { let props; let i; let prop; const merged = {}; destObj = destObj || {}; // eslint-disable-next-line no-eq-null,eqeqeq if (sourceObj == null) return destObj; do { props = Object.getOwnPropertyNames(sourceObj); i = props.length; while (i-- > 0) { prop = props[i]; if ((!propFilter || propFilter(prop, sourceObj, destObj)) && !merged[prop]) { destObj[prop] = sourceObj[prop]; merged[prop] = true; } } sourceObj = filter !== false && getPrototypeOf(sourceObj); } while (sourceObj && (!filter || filter(sourceObj, destObj)) && sourceObj !== Object.prototype); return destObj; }; /** * Determines whether a string ends with the characters of a specified string * * @param {String} str * @param {String} searchString * @param {Number} [position= 0] * * @returns {boolean} */ const endsWith = (str, searchString, position) => { str = String(str); if (position === undefined || position > str.length) { position = str.length; } position -= searchString.length; const lastIndex = str.indexOf(searchString, position); return lastIndex !== -1 && lastIndex === position; }; /** * Returns new array from array like object or null if failed * * @param {*} [thing] * * @returns {?Array} */ const toArray = (thing) => { if (!thing) return null; if (isArray(thing)) return thing; let i = thing.length; if (!isNumber(i)) return null; const arr = new Array(i); while (i-- > 0) { arr[i] = thing[i]; } return arr; }; /** * Checking if the Uint8Array exists and if it does, it returns a function that checks if the * thing passed in is an instance of Uint8Array * * @param {TypedArray} * * @returns {Array} */ // eslint-disable-next-line func-names const isTypedArray = (TypedArray => { // eslint-disable-next-line func-names return thing => { return TypedArray && thing instanceof TypedArray; }; })(typeof Uint8Array !== 'undefined' && getPrototypeOf(Uint8Array)); /** * For each entry in the object, call the function with the key and value. * * @param {Object} obj - The object to iterate over. * @param {Function} fn - The function to call for each entry. * * @returns {void} */ const forEachEntry = (obj, fn) => { const generator = obj && obj[iterator]; const _iterator = generator.call(obj); let result; while ((result = _iterator.next()) && !result.done) { const pair = result.value; fn.call(obj, pair[0], pair[1]); } }; /** * It takes a regular expression and a string, and returns an array of all the matches * * @param {string} regExp - The regular expression to match against. * @param {string} str - The string to search. * * @returns {Array} */ const matchAll = (regExp, str) => { let matches; const arr = []; while ((matches = regExp.exec(str)) !== null) { arr.push(matches); } return arr; }; /* Checking if the kindOfTest function returns true when passed an HTMLFormElement. */ const isHTMLForm = kindOfTest('HTMLFormElement'); const toCamelCase = str => { return str.toLowerCase().replace(/[-_\s]([a-z\d])(\w*)/g, function replacer(m, p1, p2) { return p1.toUpperCase() + p2; } ); }; /* Creating a function that will check if an object has a property. */ const hasOwnProperty = (({hasOwnProperty}) => (obj, prop) => hasOwnProperty.call(obj, prop))(Object.prototype); /** * Determine if a value is a RegExp object * * @param {*} val The value to test * * @returns {boolean} True if value is a RegExp object, otherwise false */ const isRegExp = kindOfTest('RegExp'); const reduceDescriptors = (obj, reducer) => { const descriptors = Object.getOwnPropertyDescriptors(obj); const reducedDescriptors = {}; forEach(descriptors, (descriptor, name) => { let ret; if ((ret = reducer(descriptor, name, obj)) !== false) { reducedDescriptors[name] = ret || descriptor; } }); Object.defineProperties(obj, reducedDescriptors); }; /** * Makes all methods read-only * @param {Object} obj */ const freezeMethods = (obj) => { reduceDescriptors(obj, (descriptor, name) => { // skip restricted props in strict mode if (isFunction(obj) && ['arguments', 'caller', 'callee'].indexOf(name) !== -1) { return false; } const value = obj[name]; if (!isFunction(value)) return; descriptor.enumerable = false; if ('writable' in descriptor) { descriptor.writable = false; return; } if (!descriptor.set) { descriptor.set = () => { throw Error('Can not rewrite read-only method \'' + name + '\''); }; } }); }; const toObjectSet = (arrayOrString, delimiter) => { const obj = {}; const define = (arr) => { arr.forEach(value => { obj[value] = true; }); }; isArray(arrayOrString) ? define(arrayOrString) : define(String(arrayOrString).split(delimiter)); return obj; }; const noop = () => {}; const toFiniteNumber = (value, defaultValue) => { return value != null && Number.isFinite(value = +value) ? value : defaultValue; }; /** * If the thing is a FormData object, return true, otherwise return false. * * @param {unknown} thing - The thing to check. * * @returns {boolean} */ function isSpecCompliantForm(thing) { return !!(thing && isFunction(thing.append) && thing[toStringTag] === 'FormData' && thing[iterator]); } const toJSONObject = (obj) => { const stack = new Array(10); const visit = (source, i) => { if (isObject(source)) { if (stack.indexOf(source) >= 0) { return; } if(!('toJSON' in source)) { stack[i] = source; const target = isArray(source) ? [] : {}; forEach(source, (value, key) => { const reducedValue = visit(value, i + 1); !isUndefined(reducedValue) && (target[key] = reducedValue); }); stack[i] = undefined; return target; } } return source; }; return visit(obj, 0); }; const isAsyncFn = kindOfTest('AsyncFunction'); const isThenable = (thing) => thing && (isObject(thing) || isFunction(thing)) && isFunction(thing.then) && isFunction(thing.catch); // original code // https://github.com/DigitalBrainJS/AxiosPromise/blob/16deab13710ec09779922131f3fa5954320f83ab/lib/utils.js#L11-L34 const _setImmediate = ((setImmediateSupported, postMessageSupported) => { if (setImmediateSupported) { return setImmediate; } return postMessageSupported ? ((token, callbacks) => { _global.addEventListener("message", ({source, data}) => { if (source === _global && data === token) { callbacks.length && callbacks.shift()(); } }, false); return (cb) => { callbacks.push(cb); _global.postMessage(token, "*"); } })(`axios@${Math.random()}`, []) : (cb) => setTimeout(cb); })( typeof setImmediate === 'function', isFunction(_global.postMessage) ); const asap = typeof queueMicrotask !== 'undefined' ? queueMicrotask.bind(_global) : ( typeof process !== 'undefined' && process.nextTick || _setImmediate); // ********************* const isIterable = (thing) => thing != null && isFunction(thing[iterator]); const utils$1 = { isArray, isArrayBuffer, isBuffer, isFormData, isArrayBufferView, isString, isNumber, isBoolean, isObject, isPlainObject, isReadableStream, isRequest, isResponse, isHeaders, isUndefined, isDate, isFile, isBlob, isRegExp, isFunction, isStream, isURLSearchParams, isTypedArray, isFileList, forEach, merge, extend, trim, stripBOM, inherits, toFlatObject, kindOf, kindOfTest, endsWith, toArray, forEachEntry, matchAll, isHTMLForm, hasOwnProperty, hasOwnProp: hasOwnProperty, // an alias to avoid ESLint no-prototype-builtins detection reduceDescriptors, freezeMethods, toObjectSet, toCamelCase, noop, toFiniteNumber, findKey, global: _global, isContextDefined, isSpecCompliantForm, toJSONObject, isAsyncFn, isThenable, setImmediate: _setImmediate, asap, isIterable }; /** * Create an Error with the specified message, config, error code, request and response. * * @param {string} message The error message. * @param {string} [code] The error code (for example, 'ECONNABORTED'). * @param {Object} [config] The config. * @param {Object} [request] The request. * @param {Object} [response] The response. * * @returns {Error} The created error. */ function AxiosError(message, code, config, request, response) { Error.call(this); if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor); } else { this.stack = (new Error()).stack; } this.message = message; this.name = 'AxiosError'; code && (this.code = code); config && (this.config = config); request && (this.request = request); if (response) { this.response = response; this.status = response.status ? response.status : null; } } utils$1.inherits(AxiosError, Error, { toJSON: function toJSON() { return { // Standard message: this.message, name: this.name, // Microsoft description: this.description, number: this.number, // Mozilla fileName: this.fileName, lineNumber: this.lineNumber, columnNumber: this.columnNumber, stack: this.stack, // Axios config: utils$1.toJSONObject(this.config), code: this.code, status: this.status }; } }); const prototype$1 = AxiosError.prototype; const descriptors = {}; [ 'ERR_BAD_OPTION_VALUE', 'ERR_BAD_OPTION', 'ECONNABORTED', 'ETIMEDOUT', 'ERR_NETWORK', 'ERR_FR_TOO_MANY_REDIRECTS', 'ERR_DEPRECATED', 'ERR_BAD_RESPONSE', 'ERR_BAD_REQUEST', 'ERR_CANCELED', 'ERR_NOT_SUPPORT', 'ERR_INVALID_URL' // eslint-disable-next-line func-names ].forEach(code => { descriptors[code] = {value: code}; }); Object.defineProperties(AxiosError, descriptors); Object.defineProperty(prototype$1, 'isAxiosError', {value: true}); // eslint-disable-next-line func-names AxiosError.from = (error, code, config, request, response, customProps) => { const axiosError = Object.create(prototype$1); utils$1.toFlatObject(error, axiosError, function filter(obj) { return obj !== Error.prototype; }, prop => { return prop !== 'isAxiosError'; }); AxiosError.call(axiosError, error.message, code, config, request, response); axiosError.cause = error; axiosError.name = error.name; customProps && Object.assign(axiosError, customProps); return axiosError; }; /** * Determines if the given thing is a array or js object. * * @param {string} thing - The object or array to be visited. * * @returns {boolean} */ function isVisitable(thing) { return utils$1.isPlainObject(thing) || utils$1.isArray(thing); } /** * It removes the brackets from the end of a string * * @param {string} key - The key of the parameter. * * @returns {string} the key without the brackets. */ function removeBrackets(key) { return utils$1.endsWith(key, '[]') ? key.slice(0, -2) : key; } /** * It takes a path, a key, and a boolean, and returns a string * * @param {string} path - The path to the current key. * @param {string} key - The key of the current object being iterated over. * @param {string} dots - If true, the key will be rendered with dots instead of brackets. * * @returns {string} The path to the current key. */ function renderKey(path, key, dots) { if (!path) return key; return path.concat(key).map(function each(token, i) { // eslint-disable-next-line no-param-reassign token = removeBrackets(token); return !dots && i ? '[' + token + ']' : token; }).join(dots ? '.' : ''); } /** * If the array is an array and none of its elements are visitable, then it's a flat array. * * @param {Array} arr - The array to check * * @returns {boolean} */ function isFlatArray(arr) { return utils$1.isArray(arr) && !arr.some(isVisitable); } const predicates = utils$1.toFlatObject(utils$1, {}, null, function filter(prop) { return /^is[A-Z]/.test(prop); }); /** * Convert a data object to FormData * * @param {Object} obj * @param {?Object} [formData] * @param {?Object} [options] * @param {Function} [options.visitor] * @param {Boolean} [options.metaTokens = true] * @param {Boolean} [options.dots = false] * @param {?Boolean} [options.indexes = false] * * @returns {Object} **/ /** * It converts an object into a FormData object * * @param {Object} obj - The object to convert to form data. * @param {string} formData - The FormData object to append to. * @param {Object} options * * @returns */ function toFormData(obj, formData, options) { if (!utils$1.isObject(obj)) { throw new TypeError('target must be an object'); } // eslint-disable-next-line no-param-reassign formData = formData || new (FormData__default["default"] || FormData)(); // eslint-disable-next-line no-param-reassign options = utils$1.toFlatObject(options, { metaTokens: true, dots: false, indexes: false }, false, function defined(option, source) { // eslint-disable-next-line no-eq-null,eqeqeq return !utils$1.isUndefined(source[option]); }); const metaTokens = options.metaTokens; // eslint-disable-next-line no-use-before-define const visitor = options.visitor || defaultVisitor; const dots = options.dots; const indexes = options.indexes; const _Blob = options.Blob || typeof Blob !== 'undefined' && Blob; const useBlob = _Blob && utils$1.isSpecCompliantForm(formData); if (!utils$1.isFunction(visitor)) { throw new TypeError('visitor must be a function'); } function convertValue(value) { if (value === null) return ''; if (utils$1.isDate(value)) { return value.toISOString(); } if (utils$1.isBoolean(value)) { return value.toString(); } if (!useBlob && utils$1.isBlob(value)) { throw new AxiosError('Blob is not supported. Use a Buffer instead.'); } if (utils$1.isArrayBuffer(value) || utils$1.isTypedArray(value)) { return useBlob && typeof Blob === 'function' ? new Blob([value]) : Buffer.from(value); } return value; } /** * Default visitor. * * @param {*} value * @param {String|Number} key * @param {Array} path * @this {FormData} * * @returns {boolean} return true to visit the each prop of the value recursively */ function defaultVisitor(value, key, path) { let arr = value; if (value && !path && typeof value === 'object') { if (utils$1.endsWith(key, '{}')) { // eslint-disable-next-line no-param-reassign key = metaTokens ? key : key.slice(0, -2); // eslint-disable-next-line no-param-reassign value = JSON.stringify(value); } else if ( (utils$1.isArray(value) && isFlatArray(value)) || ((utils$1.isFileList(value) || utils$1.endsWith(key, '[]')) && (arr = utils$1.toArray(value)) )) { // eslint-disable-next-line no-param-reassign key = removeBrackets(key); arr.forEach(function each(el, index) { !(utils$1.isUndefined(el) || el === null) && formData.append( // eslint-disable-next-line no-nested-ternary indexes === true ? renderKey([key], index, dots) : (indexes === null ? key : key + '[]'), convertValue(el) ); }); return false; } } if (isVisitable(value)) { return true; } formData.append(renderKey(path, key, dots), convertValue(value)); return false; } const stack = []; const exposedHelpers = Object.assign(predicates, { defaultVisitor, convertValue, isVisitable }); function build(value, path) { if (utils$1.isUndefined(value)) return; if (stack.indexOf(value) !== -1) { throw Error('Circular reference detected in ' + path.join('.')); } stack.push(value); utils$1.forEach(value, function each(el, key) { const result = !(utils$1.isUndefined(el) || el === null) && visitor.call( formData, el, utils$1.isString(key) ? key.trim() : key, path, exposedHelpers ); if (result === true) { build(el, path ? path.concat(key) : [key]); } }); stack.pop(); } if (!utils$1.isObject(obj)) { throw new TypeError('data must be an object'); } build(obj); return formData; } /** * It encodes a string by replacing all characters that are not in the unreserved set with * their percent-encoded equivalents * * @param {string} str - The string to encode. * * @returns {string} The encoded string. */ function encode$1(str) { const charMap = { '!': '%21', "'": '%27', '(': '%28', ')': '%29', '~': '%7E', '%20': '+', '%00': '\x00' }; return encodeURIComponent(str).replace(/[!'()~]|%20|%00/g, function replacer(match) { return charMap[match]; }); } /** * It takes a params object and converts it to a FormData object * * @param {Object} params - The parameters to be converted to a FormData object. * @param {Object} options - The options object passed to the Axios constructor. * * @returns {void} */ function AxiosURLSearchParams(params, options) { this._pairs = []; params && toFormData(params, this, options); } const prototype = AxiosURLSearchParams.prototype; prototype.append = function append(name, value) { this._pairs.push([name, value]); }; prototype.toString = function toString(encoder) { const _encode = encoder ? function(value) { return encoder.call(this, value, encode$1); } : encode$1; return this._pairs.map(function each(pair) { return _encode(pair[0]) + '=' + _encode(pair[1]); }, '').join('&'); }; /** * It replaces all instances of the characters `:`, `$`, `,`, `+`, `[`, and `]` with their * URI encoded counterparts * * @param {string} val The value to be encoded. * * @returns {string} The encoded value. */ function encode(val) { return encodeURIComponent(val). replace(/%3A/gi, ':'). replace(/%24/g, '$'). replace(/%2C/gi, ','). replace(/%20/g, '+'). replace(/%5B/gi, '['). replace(/%5D/gi, ']'); } /** * Build a URL by appending params to the end * * @param {string} url The base of the url (e.g., http://www.google.com) * @param {object} [params] The params to be appended * @param {?(object|Function)} options * * @returns {string} The formatted url */ function buildURL(url, params, options) { /*eslint no-param-reassign:0*/ if (!params) { return url; } const _encode = options && options.encode || encode; if (utils$1.isFunction(options)) { options = { serialize: options }; } const serializeFn = options && options.serialize; let serializedParams; if (serializeFn) { serializedParams = serializeFn(params, options); } else { serializedParams = utils$1.isURLSearchParams(params) ? params.toString() : new AxiosURLSearchParams(params, options).toString(_encode); } if (serializedParams) { const hashmarkIndex = url.indexOf("#"); if (hashmarkIndex !== -1) { url = url.slice(0, hashmarkIndex); } url += (url.indexOf('?') === -1 ? '?' : '&') + serializedParams; } return url; } class InterceptorManager { constructor() { this.handlers = []; } /** * Add a new interceptor to the stack * * @param {Function} fulfilled The function to handle `then` for a `Promise` * @param {Function} rejected The function to handle `reject` for a `Promise` * * @return {Number} An ID used to remove interceptor later */ use(fulfilled, rejected, options) { this.handlers.push({ fulfilled, rejected, synchronous: options ? options.synchronous : false, runWhen: options ? options.runWhen : null }); return this.handlers.length - 1; } /** * Remove an interceptor from the stack * * @param {Number} id The ID that was returned by `use` * * @returns {Boolean} `true` if the interceptor was removed, `false` otherwise */ eject(id) { if (this.handlers[id]) { this.handlers[id] = null; } } /** * Clear all interceptors from the stack * * @returns {void} */ clear() { if (this.handlers) { this.handlers = []; } } /** * Iterate over all the registered interceptors * * This method is particularly useful for skipping over any * interceptors that may have become `null` calling `eject`. * * @param {Function} fn The function to call for each interceptor * * @returns {void} */ forEach(fn) { utils$1.forEach(this.handlers, function forEachHandler(h) { if (h !== null) { fn(h); } }); } } const InterceptorManager$1 = InterceptorManager; const transitionalDefaults = { silentJSONParsing: true, forcedJSONParsing: true, clarifyTimeoutError: false }; const URLSearchParams = url__default["default"].URLSearchParams; const ALPHA = 'abcdefghijklmnopqrstuvwxyz'; const DIGIT = '0123456789'; const ALPHABET = { DIGIT, ALPHA, ALPHA_DIGIT: ALPHA + ALPHA.toUpperCase() + DIGIT }; const generateString = (size = 16, alphabet = ALPHABET.ALPHA_DIGIT) => { let str = ''; const {length} = alphabet; const randomValues = new Uint32Array(size); crypto__default["default"].randomFillSync(randomValues); for (let i = 0; i < size; i++) { str += alphabet[randomValues[i] % length]; } return str; }; const platform$1 = { isNode: true, classes: { URLSearchParams, FormData: FormData__default["default"], Blob: typeof Blob !== 'undefined' && Blob || null }, ALPHABET, generateString, protocols: [ 'http', 'https', 'file', 'data' ] }; const hasBrowserEnv = typeof window !== 'undefined' && typeof document !== 'undefined'; const _navigator = typeof navigator === 'object' && navigator || undefined; /** * Determine if we're running in a standard browser environment * * This allows axios to run in a web worker, and react-native. * Both environments support XMLHttpRequest, but not fully standard globals. * * web workers: * typeof window -> undefined * typeof document -> undefined * * react-native: * navigator.product -> 'ReactNative' * nativescript * navigator.product -> 'NativeScript' or 'NS' * * @returns {boolean} */ const hasStandardBrowserEnv = hasBrowserEnv && (!_navigator || ['ReactNative', 'NativeScript', 'NS'].indexOf(_navigator.product) < 0); /** * Determine if we're running in a standard browser webWorker environment * * Although the `isStandardBrowserEnv` method indicates that * `allows axios to run in a web worker`, the WebWorker will still be * filtered out due to its judgment standard * `typeof window !== 'undefined' && typeof document !== 'undefined'`. * This leads to a problem when axios post `FormData` in webWorker */ const hasStandardBrowserWebWorkerEnv = (() => { return ( typeof WorkerGlobalScope !== 'undefined' && // eslint-disable-next-line no-undef self instanceof WorkerGlobalScope && typeof self.importScripts === 'function' ); })(); const origin = hasBrowserEnv && window.location.href || 'http://localhost'; const utils = /*#__PURE__*/Object.freeze({ __proto__: null, hasBrowserEnv: hasBrowserEnv, hasStandardBrowserWebWorkerEnv: hasStandardBrowserWebWorkerEnv, hasStandardBrowserEnv: hasStandardBrowserEnv, navigator: _navigator, origin: origin }); const platform = { ...utils, ...platform$1 }; function toURLEncodedForm(data, options) { return toFormData(data, new platform.classes.URLSearchParams(), Object.assign({ visitor: function(value, key, path, helpers) { if (platform.isNode && utils$1.isBuffer(value)) { this.append(key, value.toString('base64')); return false; } return helpers.defaultVisitor.apply(this, arguments); } }, options)); } /** * It takes a string like `foo[x][y][z]` and returns an array like `['foo', 'x', 'y', 'z'] * * @param {string} name - The name of the property to get. * * @returns An array of strings. */ function parsePropPath(name) { // foo[x][y][z] // foo.x.y.z // foo-x-y-z // foo x y z return utils$1.matchAll(/\w+|\[(\w*)]/g, name).map(match => { return match[0] === '[]' ? '' : match[1] || match[0]; }); } /** * Convert an array to an object. * * @param {Array} arr - The array to convert to an object. * * @returns An object with the same keys and values as the array. */ function arrayToObject(arr) { const obj = {}; const keys = Object.keys(arr); let i; const len = keys.length; let key; for (i = 0; i < len; i++) { key = keys[i]; obj[key] = arr[key]; } return obj; } /** * It takes a FormData object and returns a JavaScript object * * @param {string} formData The FormData object to convert to JSON. * * @returns {Object | null} The converted object. */ function formDataToJSON(formData) { function buildPath(path, value, target, index) { let name = path[index++]; if (name === '__proto__') return true; const isNumericKey = Number.isFinite(+name); const isLast = index >= path.length; name = !name && utils$1.isArray(target) ? target.length : name; if (isLast) { if (utils$1.hasOwnProp(target, name)) { target[name] = [target[name], value]; } else { target[name] = value; } return !isNumericKey; } if (!target[name] || !utils$1.isObject(target[name])) { target[name] = []; } const result = buildPath(path, value, target[name], index); if (result && utils$1.isArray(target[name])) { target[name] = arrayToObject(target[name]); } return !isNumericKey; } if (utils$1.isFormData(formData) && utils$1.isFunction(formData.entries)) { const obj = {}; utils$1.forEachEntry(formData, (name, value) => { buildPath(parsePropPath(name), value, obj, 0); }); return obj; } return null; } /** * It takes a string, tries to parse it, and if it fails, it returns the stringified version * of the input * * @param {any} rawValue - The value to be stringified. * @param {Function} parser - A function that parses a string into a JavaScript object. * @param {Function} encoder - A function that takes a value and returns a string. * * @returns {string} A stringified version of the rawValue. */ function stringifySafely(rawValue, parser, encoder) { if (utils$1.isString(rawValue)) { try { (parser || JSON.parse)(rawValue); return utils$1.trim(rawValue); } catch (e) { if (e.name !== 'SyntaxError') { throw e; } } } return (encoder || JSON.stringify)(rawValue); } const defaults = { transitional: transitionalDefaults, adapter: ['xhr', 'http', 'fetch'], transformRequest: [function transformRequest(data, headers) { const contentType = headers.getContentType() || ''; const hasJSONContentType = contentType.indexOf('application/json') > -1; const isObjectPayload = utils$1.isObject(data); if (isObjectPayload && utils$1.isHTMLForm(data)) { data = new FormData(data); } const isFormData = utils$1.isFormData(data); if (isFormData) { return hasJSONContentType ? JSON.stringify(formDataToJSON(data)) : data; } if (utils$1.isArrayBuffer(data) || utils$1.isBuffer(data) || utils$1.isStream(data) || utils$1.isFile(data) || utils$1.isBlob(data) || utils$1.isReadableStream(data) ) { return data; } if (utils$1.isArrayBufferView(data)) { return data.buffer; } if (utils$1.isURLSearchParams(data)) { headers.setContentType('application/x-www-form-urlencoded;charset=utf-8', false); return data.toString(); } let isFileList; if (isObjectPayload) { if (contentType.indexOf('application/x-www-form-urlencoded') > -1) { return toURLEncodedForm(data, this.formSerializer).toString(); } if ((isFileList = utils$1.isFileList(data)) || contentType.indexOf('multipart/form-data') > -1) { const _FormData = this.env && this.env.FormData; return toFormData( isFileList ? {'files[]': data} : data, _FormData && new _FormData(), this.formSerializer ); } } if (isObjectPayload || hasJSONContentType ) { headers.setContentType('application/json', false); return stringifySafely(data); } return data; }], transformResponse: [function transformResponse(data) { const transitional = this.transitional || defaults.transitional; const forcedJSONParsing = transitional && transitional.forcedJSONParsing; const JSONRequested = this.responseType === 'json'; if (utils$1.isResponse(data) || utils$1.isReadableStream(data)) { return data; } if (data && utils$1.isString(data) && ((forcedJSONParsing && !this.responseType) || JSONRequested)) { const silentJSONParsing = transitional && transitional.silentJSONParsing; const strictJSONParsing = !silentJSONParsing && JSONRequested; try { return JSON.parse(data); } catch (e) { if (strictJSONParsing) { if (e.name === 'SyntaxError') { throw AxiosError.from(e, AxiosError.ERR_BAD_RESPONSE, this, null, this.response); } throw e; } } } return data; }], /** * A timeout in milliseconds to abort a request. If set to 0 (default) a * timeout is not created. */ timeout: 0, xsrfCookieName: 'XSRF-TOKEN', xsrfHeaderName: 'X-XSRF-TOKEN', maxContentLength: -1, maxBodyLength: -1, env: { FormData: platform.classes.FormData, Blob: platform.classes.Blob }, validateStatus: function validateStatus(status) { return status >= 200 && status < 300; }, headers: { common: { 'Accept': 'application/json, text/plain, */*', 'Content-Type': undefined } } }; utils$1.forEach(['delete', 'get', 'head', 'post', 'put', 'patch'], (method) => { defaults.headers[method] = {}; }); const defaults$1 = defaults; // RawAxiosHeaders whose duplicates are ignored by node // c.f. https://nodejs.org/api/http.html#http_message_headers const ignoreDuplicateOf = utils$1.toObjectSet([ 'age', 'authorization', 'content-length', 'content-type', 'etag', 'expires', 'from', 'host', 'if-modified-since', 'if-unmodified-since', 'last-modified', 'location', 'max-forwards', 'proxy-authorization', 'referer', 'retry-after', 'user-agent' ]); /** * Parse headers into an object * * ``` * Date: Wed, 27 Aug 2014 08:58:49 GMT * Content-Type: application/json * Connection: keep-alive * Transfer-Encoding: chunked * ``` * * @param {String} rawHeaders Headers needing to be parsed * * @returns {Object} Headers parsed into an object */ const parseHeaders = rawHeaders => { const parsed = {}; let key; let val; let i; rawHeaders && rawHeaders.split('\n').forEach(function parser(line) { i = line.indexOf(':'); key = line.substring(0, i).trim().toLowerCase(); val = line.substring(i + 1).trim(); if (!key || (parsed[key] && ignoreDuplicateOf[key])) { return; } if (key === 'set-cookie') { if (parsed[key]) { parsed[key].push(val); } else { parsed[key] = [val]; } } else { parsed[key] = parsed[key] ? parsed[key] + ', ' + val : val; } }); return parsed; }; const $internals = Symbol('internals'); function normalizeHeader(header) { return header && String(header).trim().toLowerCase(); } function normalizeValue(value) { if (value === false || value == null) { return value; } return utils$1.isArray(value) ? value.map(normalizeValue) : String(value); } function parseTokens(str) { const tokens = Object.create(null); const tokensRE = /([^\s,;=]+)\s*(?:=\s*([^,;]+))?/g; let match; while ((match = tokensRE.exec(str))) { tokens[match[1]] = match[2]; } return tokens; } const isValidHeaderName = (str) => /^[-_a-zA-Z0-9^`|~,!#$%&'*+.]+$/.test(str.trim()); function matchHeaderValue(context, value, header, filter, isHeaderNameFilter) { if (utils$1.isFunction(filter)) { return filter.call(this, value, header); } if (isHeaderNameFilter) { value = header; } if (!utils$1.isString(value)) return; if (utils$1.isString(filter)) { return value.indexOf(filter) !== -1; } if (utils$1.isRegExp(filter)) { return filter.test(value); } } function formatHeader(header) { return header.trim() .toLowerCase().replace(/([a-z\d])(\w*)/g, (w, char, str) => { return char.toUpperCase() + str; }); } function buildAccessors(obj, header) { const accessorName = utils$1.toCamelCase(' ' + header); ['get', 'set', 'has'].forEach(methodName => { Object.defineProperty(obj, methodName + accessorName, { value: function(arg1, arg2, arg3) { return this[methodName].call(this, header, arg1, arg2, arg3); }, configurable: true }); }); } class AxiosHeaders { constructor(headers) { headers && this.set(headers); } set(header, valueOrRewrite, rewrite) { const self = this; function setHeader(_value, _header, _rewrite) { const lHeader = normalizeHeader(_header); if (!lHeader) { throw new Error('header name must be a non-empty string'); } const key = utils$1.findKey(self, lHeader); if(!key || self[key] === undefined || _rewrite === true || (_rewrite === undefined && self[key] !== false)) { self[key || _header] = normalizeValue(_value); } } const setHeaders = (headers, _rewrite) => utils$1.forEach(headers, (_value, _header) => setHeader(_value, _header, _rewrite)); if (utils$1.isPlainObject(header) || header instanceof this.constructor) { setHeaders(header, valueOrRewrite); } else if(utils$1.isString(header) && (header = header.trim()) && !isValidHeaderName(header)) { setHeaders(parseHeaders(header), valueOrRewrite); } else if (utils$1.isObject(header) && utils$1.isIterable(header)) { let obj = {}, dest, key; for (const entry of header) { if (!utils$1.isArray(entry)) { throw TypeError('Object iterator must return a key-value pair'); } obj[key = entry[0]] = (dest = obj[key]) ? (utils$1.isArray(dest) ? [...dest, entry[1]] : [dest, entry[1]]) : entry[1]; } setHeaders(obj, valueOrRewrite); } else { header != null && setHeader(valueOrRewrite, header, rewrite); } return this; } get(header, parser) { header = normalizeHeader(header); if (header) { const key = utils$1.findKey(this, header); if (key) { const value = this[key]; if (!parser) { return value; } if (parser === true) { return parseTokens(value); } if (utils$1.isFunction(parser)) { return parser.call(this, value, key); } if (utils$1.isRegExp(parser)) { return parser.exec(value); } throw new TypeError('parser must be boolean|regexp|function'); } } } has(header, matcher) { header = normalizeHeader(header); if (header) { const key = utils$1.findKey(this, header); return !!(key && this[key] !== undefined && (!matcher || matchHeaderValue(this, this[key], key, matcher))); } return false; } delete(header, matcher) { const self = this; let deleted = false; function deleteHeader(_header) { _header = normalizeHeader(_header); if (_header) { const key = utils$1.findKey(self, _header); if (key && (!matcher || matchHeaderValue(self, self[key], key, matcher))) { delete self[key]; deleted = true; } } } if (utils$1.isArray(header)) { header.forEach(deleteHeader); } else { deleteHeader(header); } return deleted; } clear(matcher) { const keys = Object.keys(this); let i = keys.length; let deleted = false; while (i--) { const key = keys[i]; if(!matcher || matchHeaderValue(this, this[key], key, matcher, true)) { delete this[key]; deleted = true; } } return deleted; } normalize(format) { const self = this; const headers = {}; utils$1.forEach(this, (value, header) => { const key = utils$1.findKey(headers, header); if (key) { self[key] = normalizeValue(value); delete self[header]; return; } const normalized = format ? formatHeader(header) : String(header).trim(); if (normalized !== header) { delete self[header]; } self[normalized] = normalizeValue(value); headers[normalized] = true; }); return this; } concat(...targets) { return this.constructor.concat(this, ...targets); } toJSON(asStrings) { const obj = Object.create(null); utils$1.forEach(this, (value, header) => { value != null && value !== false && (obj[header] = asStrings && utils$1.isArray(value) ? value.join(', ') : value); }); return obj; } [Symbol.iterator]() { return Object.entries(this.toJSON())[Symbol.iterator](); } toString() { return Object.entries(this.toJSON()).map(([header, value]) => header + ': ' + value).join('\n'); } getSetCookie() { return this.get("set-cookie") || []; } get [Symbol.toStringTag]() { return 'AxiosHeaders'; } static from(thing) { return thing instanceof this ? thing : new this(thing); } static concat(first, ...targets) { const computed = new this(first); targets.forEach((target) => computed.set(target)); return computed; } static accessor(header) { const internals = this[$internals] = (this[$internals] = { accessors: {} }); const accessors = internals.accessors; const prototype = this.prototype; function defineAccessor(_header) { const lHeader = normalizeHeader(_header); if (!accessors[lHeader]) { buildAccessors(prototype, _header); accessors[lHeader] = true; } } utils$1.isArray(header) ? header.forEach(defineAccessor) : defineAccessor(header); return this; } } AxiosHeaders.accessor(['Content-Type', 'Content-Length', 'Accept', 'Accept-Encoding', 'User-Agent', 'Authorization']); // reserved names hotfix utils$1.reduceDescriptors(AxiosHeaders.prototype, ({value}, key) => { let mapped = key[0].toUpperCase() + key.slice(1); // map `set` => `Set` return { get: () => value, set(headerValue) { this[mapped] = headerValue; } } }); utils$1.freezeMethods(AxiosHeaders); const AxiosHeaders$1 = AxiosHeaders; /** * Transform the data for a request or a response * * @param {Array|Function} fns A single function or Array of functions * @param {?Object} response The response object * * @returns {*} The resulting transformed data */ function transformData(fns, response) { const config = this || defaults$1; const context = response || config; const headers = AxiosHeaders$1.from(context.headers); let data = context.data; utils$1.forEach(fns, function transform(fn) { data = fn.call(config, data, headers.normalize(), response ? response.status : undefined); }); headers.normalize(); return data; } function isCancel(value) { return !!(value && value.__CANCEL__); } /** * A `CanceledError` is an object that is thrown when an operation is canceled. * * @param {string=} message The message. * @param {Object=} config The config. * @param {Object=} request The request. * * @returns {CanceledError} The created error. */ function CanceledError(message, config, request) { // eslint-disable-next-line no-eq-null,eqeqeq AxiosError.call(this, message == null ? 'canceled' : message, AxiosError.ERR_CANCELED, config, request); this.name = 'CanceledError'; } utils$1.inherits(CanceledError, AxiosError, { __CANCEL__: true }); /** * Resolve or reject a Promise based on response status. * * @param {Function} resolve A function that resolves the promise. * @param {Function} reject A function that rejects the promise. * @param {object} response The response. * * @returns {object} The response. */ function settle(resolve, reject, response) { const validateStatus = response.config.validateStatus; if (!response.status || !validateStatus || validateStatus(response.status)) { resolve(response); } else { reject(new AxiosError( 'Request failed with status code ' + response.status, [AxiosError.ERR_BAD_REQUEST, AxiosError.ERR_BAD_RESPONSE][Math.floor(response.status / 100) - 4], response.config, response.request, response )); } } /** * Determines whether the specified URL is absolute * * @param {string} url The URL to test * * @returns {boolean} True if the specified URL is absolute, otherwise false */ function isAbsoluteURL(url) { // A URL is considered absolute if it begins with "://" or "//" (protocol-relative URL). // RFC 3986 defines scheme name as a sequence of characters beginning with a letter and followed // by any combination of letters, digits, plus, period, or hyphen. return /^([a-z][a-z\d+\-.]*:)?\/\//i.test(url); } /** * Creates a new URL by combining the specified URLs * * @param {string} baseURL The base URL * @param {string} relativeURL The relative URL * * @returns {string} The combined URL */ function combineURLs(baseURL, relativeURL) { return relativeURL ? baseURL.replace(/\/?\/$/, '') + '/' + relativeURL.replace(/^\/+/, '') : baseURL; } /** * Creates a new URL by combining the baseURL with the requestedURL, * only when the requestedURL is not already an absolute URL. * If the requestURL is absolute, this function returns the requestedURL untouched. * * @param {string} baseURL The base URL * @param {string} requestedURL Absolute or relative URL to combine * * @returns {string} The combined full path */ function buildFullPath(baseURL, requestedURL, allowAbsoluteUrls) { let isRelativeUrl = !isAbsoluteURL(requestedURL); if (baseURL && (isRelativeUrl || allowAbsoluteUrls == false)) { return combineURLs(baseURL, requestedURL); } return requestedURL; } const VERSION = "1.10.0"; function parseProtocol(url) { const match = /^([-+\w]{1,25})(:?\/\/|:)/.exec(url); return match && match[1] || ''; } const DATA_URL_PATTERN = /^(?:([^;]+);)?(?:[^;]+;)?(base64|),([\s\S]*)$/; /** * Parse data uri to a Buffer or Blob * * @param {String} uri * @param {?Boolean} asBlob * @param {?Object} options * @param {?Function} options.Blob * * @returns {Buffer|Blob} */ function fromDataURI(uri, asBlob, options) { const _Blob = options && options.Blob || platform.classes.Blob; const protocol = parseProtocol(uri); if (asBlob === undefined && _Blob) { asBlob = true; } if (protocol === 'data') { uri = protocol.length ? uri.slice(protocol.length + 1) : uri; const match = DATA_URL_PATTERN.exec(uri); if (!match) { throw new AxiosError('Invalid URL', AxiosError.ERR_INVALID_URL); } const mime = match[1]; const isBase64 = match[2]; const body = match[3]; const buffer = Buffer.from(decodeURIComponent(body), isBase64 ? 'base64' : 'utf8'); if (asBlob) { if (!_Blob) { throw new AxiosError('Blob is not supported', AxiosError.ERR_NOT_SUPPORT); } return new _Blob([buffer], {type: mime}); } return buffer; } throw new AxiosError('Unsupported protocol ' + protocol, AxiosError.ERR_NOT_SUPPORT); } const kInternals = Symbol('internals'); class AxiosTransformStream extends stream__default["default"].Transform{ constructor(options) { options = utils$1.toFlatObject(options, { maxRate: 0, chunkSize: 64 * 1024, minChunkSize: 100, timeWindow: 500, ticksRate: 2, samplesCount: 15 }, null, (prop, source) => { return !utils$1.isUndefined(source[prop]); }); super({ readableHighWaterMark: options.chunkSize }); const internals = this[kInternals] = { timeWindow: options.timeWindow, chunkSize: options.chunkSize, maxRate: options.maxRate, minChunkSize: options.minChunkSize, bytesSeen: 0, isCaptured: false, notifiedBytesLoaded: 0, ts: Date.now(), bytes: 0, onReadCallback: null }; this.on('newListener', event => { if (event === 'progress') { if (!internals.isCaptured) { internals.isCaptured = true; } } }); } _read(size) { const internals = this[kInternals]; if (internals.onReadCallback) { internals.onReadCallback(); } return super._read(size); } _transform(chunk, encoding, callback) { const internals = this[kInternals]; const maxRate = internals.maxRate; const readableHighWaterMark = this.readableHighWaterMark; const timeWindow = internals.timeWindow; const divider = 1000 / timeWindow; const bytesThreshold = (maxRate / divider); const minChunkSize = internals.minChunkSize !== false ? Math.max(internals.minChunkSize, bytesThreshold * 0.01) : 0; const pushChunk = (_chunk, _callback) => { const bytes = Buffer.byteLength(_chunk); internals.bytesSeen += bytes; internals.bytes += bytes; internals.isCaptured && this.emit('progress', internals.bytesSeen); if (this.push(_chunk)) { process.nextTick(_callback); } else { internals.onReadCallback = () => { internals.onReadCallback = null; process.nextTick(_callback); }; } }; const transformChunk = (_chunk, _callback) => { const chunkSize = Buffer.byteLength(_chunk); let chunkRemainder = null; let maxChunkSize = readableHighWaterMark; let bytesLeft; let passed = 0; if (maxRate) { const now = Date.now(); if (!internals.ts || (passed = (now - internals.ts)) >= timeWindow) { internals.ts = now; bytesLeft = bytesThreshold - internals.bytes; internals.bytes = bytesLeft < 0 ? -bytesLeft : 0; passed = 0; } bytesLeft = bytesThreshold - internals.bytes; } if (maxRate) { if (bytesLeft <= 0) { // next time window return setTimeout(() => { _callback(null, _chunk); }, timeWindow - passed); } if (bytesLeft < maxChunkSize) { maxChunkSize = bytesLeft; } } if (maxChunkSize && chunkSize > maxChunkSize && (chunkSize - maxChunkSize) > minChunkSize) { chunkRemainder = _chunk.subarray(maxChunkSize); _chunk = _chunk.subarray(0, maxChunkSize); } pushChunk(_chunk, chunkRemainder ? () => { process.nextTick(_callback, null, chunkRemainder); } : _callback); }; transformChunk(chunk, function transformNextChunk(err, _chunk) { if (err) { return callback(err); } if (_chunk) { transformChunk(_chunk, transformNextChunk); } else { callback(null); } }); } } const AxiosTransformStream$1 = AxiosTransformStream; const {asyncIterator} = Symbol; const readBlob = async function* (blob) { if (blob.stream) { yield* blob.stream(); } else if (blob.arrayBuffer) { yield await blob.arrayBuffer(); } else if (blob[asyncIterator]) { yield* blob[asyncIterator](); } else { yield blob; } }; const readBlob$1 = readBlob; const BOUNDARY_ALPHABET = platform.ALPHABET.ALPHA_DIGIT + '-_'; const textEncoder = typeof TextEncoder === 'function' ? new TextEncoder() : new util__default["default"].TextEncoder(); const CRLF = '\r\n'; const CRLF_BYTES = textEncoder.encode(CRLF); const CRLF_BYTES_COUNT = 2; class FormDataPart { constructor(name, value) { const {escapeName} = this.constructor; const isStringValue = utils$1.isString(value); let headers = `Content-Disposition: form-data; name="${escapeName(name)}"${ !isStringValue && value.name ? `; filename="${escapeName(value.name)}"` : '' }${CRLF}`; if (isStringValue) { value = textEncoder.encode(String(value).replace(/\r?\n|\r\n?/g, CRLF)); } else { headers += `Content-Type: ${value.type || "application/octet-stream"}${CRLF}`; } this.headers = textEncoder.encode(headers + CRLF); this.contentLength = isStringValue ? value.byteLength : value.size; this.size = this.headers.byteLength + this.contentLength + CRLF_BYTES_COUNT; this.name = name; this.value = value; } async *encode(){ yield this.headers; const {value} = this; if(utils$1.isTypedArray(value)) { yield value; } else { yield* readBlob$1(value); } yield CRLF_BYTES; } static escapeName(name) { return String(name).replace(/[\r\n"]/g, (match) => ({ '\r' : '%0D', '\n' : '%0A', '"' : '%22', }[match])); } } const formDataToStream = (form, headersHandler, options) => { const { tag = 'form-data-boundary', size = 25, boundary = tag + '-' + platform.generateString(size, BOUNDARY_ALPHABET) } = options || {}; if(!utils$1.isFormData(form)) { throw TypeError('FormData instance required'); } if (boundary.length < 1 || boundary.length > 70) { throw Error('boundary must be 10-70 characters long') } const boundaryBytes = textEncoder.encode('--' + boundary + CRLF); const footerBytes = textEncoder.encode('--' + boundary + '--' + CRLF); let contentLength = footerBytes.byteLength; const parts = Array.from(form.entries()).map(([name, value]) => { const part = new FormDataPart(name, value); contentLength += part.size; return part; }); contentLength += boundaryBytes.byteLength * parts.length; contentLength = utils$1.toFiniteNumber(contentLength); const computedHeaders = { 'Content-Type': `multipart/form-data; boundary=${boundary}` }; if (Number.isFinite(contentLength)) { computedHeaders['Content-Length'] = contentLength; } headersHandler && headersHandler(computedHeaders); return stream.Readable.from((async function *() { for(const part of parts) { yield boundaryBytes; yield* part.encode(); } yield footerBytes; })()); }; const formDataToStream$1 = formDataToStream; class ZlibHeaderTransformStream extends stream__default["default"].Transform { __transform(chunk, encoding, callback) { this.push(chunk); callback(); } _transform(chunk, encoding, callback) { if (chunk.length !== 0) { this._transform = this.__transform; // Add Default Compression headers if no zlib headers are present if (chunk[0] !== 120) { // Hex: 78 const header = Buffer.alloc(2); header[0] = 120; // Hex: 78 header[1] = 156; // Hex: 9C this.push(header, encoding); } } this.__transform(chunk, encoding, callback); } } const ZlibHeaderTransformStream$1 = ZlibHeaderTransformStream; const callbackify = (fn, reducer) => { return utils$1.isAsyncFn(fn) ? function (...args) { const cb = args.pop(); fn.apply(this, args).then((value) => { try { reducer ? cb(null, ...reducer(value)) : cb(null, value); } catch (err) { cb(err); } }, cb); } : fn; }; const callbackify$1 = callbackify; /** * Calculate data maxRate * @param {Number} [samplesCount= 10] * @param {Number} [min= 1000] * @returns {Function} */ function speedometer(samplesCount, min) { samplesCount = samplesCount || 10; const bytes = new Array(samplesCount); const timestamps = new Array(samplesCount); let head = 0; let tail = 0; let firstSampleTS; min = min !== undefined ? min : 1000; return function push(chunkLength) { const now = Date.now(); const startedAt = timestamps[tail]; if (!firstSampleTS) { firstSampleTS = now; } bytes[head] = chunkLength; timestamps[head] = now; let i = tail; let bytesCount = 0; while (i !== head) { bytesCount += bytes[i++]; i = i % samplesCount; } head = (head + 1) % samplesCount; if (head === tail) { tail = (tail + 1) % samplesCount; } if (now - firstSampleTS < min) { return; } const passed = startedAt && now - startedAt; return passed ? Math.round(bytesCount * 1000 / passed) : undefined; }; } /** * Throttle decorator * @param {Function} fn * @param {Number} freq * @return {Function} */ function throttle(fn, freq) { let timestamp = 0; let threshold = 1000 / freq; let lastArgs; let timer; const invoke = (args, now = Date.now()) => { timestamp = now; lastArgs = null; if (timer) { clearTimeout(timer); timer = null; } fn.apply(null, args); }; const throttled = (...args) => { const now = Date.now(); const passed = now - timestamp; if ( passed >= threshold) { invoke(args, now); } else { lastArgs = args; if (!timer) { timer = setTimeout(() => { timer = null; invoke(lastArgs); }, threshold - passed); } } }; const flush = () => lastArgs && invoke(lastArgs); return [throttled, flush]; } const progressEventReducer = (listener, isDownloadStream, freq = 3) => { let bytesNotified = 0; const _speedometer = speedometer(50, 250); return throttle(e => { const loaded = e.loaded; const total = e.lengthComputable ? e.total : undefined; const progressBytes = loaded - bytesNotified; const rate = _speedometer(progressBytes); const inRange = loaded <= total; bytesNotified = loaded; const data = { loaded, total, progress: total ? (loaded / total) : undefined, bytes: progressBytes, rate: rate ? rate : undefined, estimated: rate && total && inRange ? (total - loaded) / rate : undefined, event: e, lengthComputable: total != null, [isDownloadStream ? 'download' : 'upload']: true }; listener(data); }, freq); }; const progressEventDecorator = (total, throttled) => { const lengthComputable = total != null; return [(loaded) => throttled[0]({ lengthComputable, total, loaded }), throttled[1]]; }; const asyncDecorator = (fn) => (...args) => utils$1.asap(() => fn(...args)); const zlibOptions = { flush: zlib__default["default"].constants.Z_SYNC_FLUSH, finishFlush: zlib__default["default"].constants.Z_SYNC_FLUSH }; const brotliOptions = { flush: zlib__default["default"].constants.BROTLI_OPERATION_FLUSH, finishFlush: zlib__default["default"].constants.BROTLI_OPERATION_FLUSH }; const isBrotliSupported = utils$1.isFunction(zlib__default["default"].createBrotliDecompress); const {http: httpFollow, https: httpsFollow} = followRedirects__default["default"]; const isHttps = /https:?/; const supportedProtocols = platform.protocols.map(protocol => { return protocol + ':'; }); const flushOnFinish = (stream, [throttled, flush]) => { stream .on('end', flush) .on('error', flush); return throttled; }; /** * If the proxy or config beforeRedirects functions are defined, call them with the options * object. * * @param {Object} options - The options object that was passed to the request. * * @returns {Object} */ function dispatchBeforeRedirect(options, responseDetails) { if (options.beforeRedirects.proxy) { options.beforeRedirects.proxy(options); } if (options.beforeRedirects.config) { options.beforeRedirects.config(options, responseDetails); } } /** * If the proxy or config afterRedirects functions are defined, call them with the options * * @param {http.ClientRequestArgs} options * @param {AxiosProxyConfig} configProxy configuration from Axios options object * @param {string} location * * @returns {http.ClientRequestArgs} */ function setProxy(options, configProxy, location) { let proxy = configProxy; if (!proxy && proxy !== false) { const proxyUrl = proxyFromEnv__default["default"].getProxyForUrl(location); if (proxyUrl) { proxy = new URL(proxyUrl); } } if (proxy) { // Basic proxy authorization if (proxy.username) { proxy.auth = (proxy.username || '') + ':' + (proxy.password || ''); } if (proxy.auth) { // Support proxy auth object form if (proxy.auth.username || proxy.auth.password) { proxy.auth = (proxy.auth.username || '') + ':' + (proxy.auth.password || ''); } const base64 = Buffer .from(proxy.auth, 'utf8') .toString('base64'); options.headers['Proxy-Authorization'] = 'Basic ' + base64; } options.headers.host = options.hostname + (options.port ? ':' + options.port : ''); const proxyHost = proxy.hostname || proxy.host; options.hostname = proxyHost; // Replace 'host' since options is not a URL object options.host = proxyHost; options.port = proxy.port; options.path = location; if (proxy.protocol) { options.protocol = proxy.protocol.includes(':') ? proxy.protocol : `${proxy.protocol}:`; } } options.beforeRedirects.proxy = function beforeRedirect(redirectOptions) { // Configure proxy for redirected request, passing the original config proxy to apply // the exact same logic as if the redirected request was performed by axios directly. setProxy(redirectOptions, configProxy, redirectOptions.href); }; } const isHttpAdapterSupported = typeof process !== 'undefined' && utils$1.kindOf(process) === 'process'; // temporary hotfix const wrapAsync = (asyncExecutor) => { return new Promise((resolve, reject) => { let onDone; let isDone; const done = (value, isRejected) => { if (isDone) return; isDone = true; onDone && onDone(value, isRejected); }; const _resolve = (value) => { done(value); resolve(value); }; const _reject = (reason) => { done(reason, true); reject(reason); }; asyncExecutor(_resolve, _reject, (onDoneHandler) => (onDone = onDoneHandler)).catch(_reject); }) }; const resolveFamily = ({address, family}) => { if (!utils$1.isString(address)) { throw TypeError('address must be a string'); } return ({ address, family: family || (address.indexOf('.') < 0 ? 6 : 4) }); }; const buildAddressEntry = (address, family) => resolveFamily(utils$1.isObject(address) ? address : {address, family}); /*eslint consistent-return:0*/ const httpAdapter = isHttpAdapterSupported && function httpAdapter(config) { return wrapAsync(async function dispatchHttpRequest(resolve, reject, onDone) { let {data, lookup, family} = config; const {responseType, responseEncoding} = config; const method = config.method.toUpperCase(); let isDone; let rejected = false; let req; if (lookup) { const _lookup = callbackify$1(lookup, (value) => utils$1.isArray(value) ? value : [value]); // hotfix to support opt.all option which is required for node 20.x lookup = (hostname, opt, cb) => { _lookup(hostname, opt, (err, arg0, arg1) => { if (err) { return cb(err); } const addresses = utils$1.isArray(arg0) ? arg0.map(addr => buildAddressEntry(addr)) : [buildAddressEntry(arg0, arg1)]; opt.all ? cb(err, addresses) : cb(err, addresses[0].address, addresses[0].family); }); }; } // temporary internal emitter until the AxiosRequest class will be implemented const emitter = new events.EventEmitter(); const onFinished = () => { if (config.cancelToken) { config.cancelToken.unsubscribe(abort); } if (config.signal) { config.signal.removeEventListener('abort', abort); } emitter.removeAllListeners(); }; onDone((value, isRejected) => { isDone = true; if (isRejected) { rejected = true; onFinished(); } }); function abort(reason) { emitter.emit('abort', !reason || reason.type ? new CanceledError(null, config, req) : reason); } emitter.once('abort', reject); if (config.cancelToken || config.signal) { config.cancelToken && config.cancelToken.subscribe(abort); if (config.signal) { config.signal.aborted ? abort() : config.signal.addEventListener('abort', abort); } } // Parse url const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls); const parsed = new URL(fullPath, platform.hasBrowserEnv ? platform.origin : undefined); const protocol = parsed.protocol || supportedProtocols[0]; if (protocol === 'data:') { let convertedData; if (method !== 'GET') { return settle(resolve, reject, { status: 405, statusText: 'method not allowed', headers: {}, config }); } try { convertedData = fromDataURI(config.url, responseType === 'blob', { Blob: config.env && config.env.Blob }); } catch (err) { throw AxiosError.from(err, AxiosError.ERR_BAD_REQUEST, config); } if (responseType === 'text') { convertedData = convertedData.toString(responseEncoding); if (!responseEncoding || responseEncoding === 'utf8') { convertedData = utils$1.stripBOM(convertedData); } } else if (responseType === 'stream') { convertedData = stream__default["default"].Readable.from(convertedData); } return settle(resolve, reject, { data: convertedData, status: 200, statusText: 'OK', headers: new AxiosHeaders$1(), config }); } if (supportedProtocols.indexOf(protocol) === -1) { return reject(new AxiosError( 'Unsupported protocol ' + protocol, AxiosError.ERR_BAD_REQUEST, config )); } const headers = AxiosHeaders$1.from(config.headers).normalize(); // Set User-Agent (required by some servers) // See https://github.com/axios/axios/issues/69 // User-Agent is specified; handle case where no UA header is desired // Only set header if it hasn't been set in config headers.set('User-Agent', 'axios/' + VERSION, false); const {onUploadProgress, onDownloadProgress} = config; const maxRate = config.maxRate; let maxUploadRate = undefined; let maxDownloadRate = undefined; // support for spec compliant FormData objects if (utils$1.isSpecCompliantForm(data)) { const userBoundary = headers.getContentType(/boundary=([-_\w\d]{10,70})/i); data = formDataToStream$1(data, (formHeaders) => { headers.set(formHeaders); }, { tag: `axios-${VERSION}-boundary`, boundary: userBoundary && userBoundary[1] || undefined }); // support for https://www.npmjs.com/package/form-data api } else if (utils$1.isFormData(data) && utils$1.isFunction(data.getHeaders)) { headers.set(data.getHeaders()); if (!headers.hasContentLength()) { try { const knownLength = await util__default["default"].promisify(data.getLength).call(data); Number.isFinite(knownLength) && knownLength >= 0 && headers.setContentLength(knownLength); /*eslint no-empty:0*/ } catch (e) { } } } else if (utils$1.isBlob(data) || utils$1.isFile(data)) { data.size && headers.setContentType(data.type || 'application/octet-stream'); headers.setContentLength(data.size || 0); data = stream__default["default"].Readable.from(readBlob$1(data)); } else if (data && !utils$1.isStream(data)) { if (Buffer.isBuffer(data)) ; else if (utils$1.isArrayBuffer(data)) { data = Buffer.from(new Uint8Array(data)); } else if (utils$1.isString(data)) { data = Buffer.from(data, 'utf-8'); } else { return reject(new AxiosError( 'Data after transformation must be a string, an ArrayBuffer, a Buffer, or a Stream', AxiosError.ERR_BAD_REQUEST, config )); } // Add Content-Length header if data exists headers.setContentLength(data.length, false); if (config.maxBodyLength > -1 && data.length > config.maxBodyLength) { return reject(new AxiosError( 'Request body larger than maxBodyLength limit', AxiosError.ERR_BAD_REQUEST, config )); } } const contentLength = utils$1.toFiniteNumber(headers.getContentLength()); if (utils$1.isArray(maxRate)) { maxUploadRate = maxRate[0]; maxDownloadRate = maxRate[1]; } else { maxUploadRate = maxDownloadRate = maxRate; } if (data && (onUploadProgress || maxUploadRate)) { if (!utils$1.isStream(data)) { data = stream__default["default"].Readable.from(data, {objectMode: false}); } data = stream__default["default"].pipeline([data, new AxiosTransformStream$1({ maxRate: utils$1.toFiniteNumber(maxUploadRate) })], utils$1.noop); onUploadProgress && data.on('progress', flushOnFinish( data, progressEventDecorator( contentLength, progressEventReducer(asyncDecorator(onUploadProgress), false, 3) ) )); } // HTTP basic authentication let auth = undefined; if (config.auth) { const username = config.auth.username || ''; const password = config.auth.password || ''; auth = username + ':' + password; } if (!auth && parsed.username) { const urlUsername = parsed.username; const urlPassword = parsed.password; auth = urlUsername + ':' + urlPassword; } auth && headers.delete('authorization'); let path; try { path = buildURL( parsed.pathname + parsed.search, config.params, config.paramsSerializer ).replace(/^\?/, ''); } catch (err) { const customErr = new Error(err.message); customErr.config = config; customErr.url = config.url; customErr.exists = true; return reject(customErr); } headers.set( 'Accept-Encoding', 'gzip, compress, deflate' + (isBrotliSupported ? ', br' : ''), false ); const options = { path, method: method, headers: headers.toJSON(), agents: { http: config.httpAgent, https: config.httpsAgent }, auth, protocol, family, beforeRedirect: dispatchBeforeRedirect, beforeRedirects: {} }; // cacheable-lookup integration hotfix !utils$1.isUndefined(lookup) && (options.lookup = lookup); if (config.socketPath) { options.socketPath = config.socketPath; } else { options.hostname = parsed.hostname.startsWith("[") ? parsed.hostname.slice(1, -1) : parsed.hostname; options.port = parsed.port; setProxy(options, config.proxy, protocol + '//' + parsed.hostname + (parsed.port ? ':' + parsed.port : '') + options.path); } let transport; const isHttpsRequest = isHttps.test(options.protocol); options.agent = isHttpsRequest ? config.httpsAgent : config.httpAgent; if (config.transport) { transport = config.transport; } else if (config.maxRedirects === 0) { transport = isHttpsRequest ? https__default["default"] : http__default["default"]; } else { if (config.maxRedirects) { options.maxRedirects = config.maxRedirects; } if (config.beforeRedirect) { options.beforeRedirects.config = config.beforeRedirect; } transport = isHttpsRequest ? httpsFollow : httpFollow; } if (config.maxBodyLength > -1) { options.maxBodyLength = config.maxBodyLength; } else { // follow-redirects does not skip comparison, so it should always succeed for axios -1 unlimited options.maxBodyLength = Infinity; } if (config.insecureHTTPParser) { options.insecureHTTPParser = config.insecureHTTPParser; } // Create the request req = transport.request(options, function handleResponse(res) { if (req.destroyed) return; const streams = [res]; const responseLength = +res.headers['content-length']; if (onDownloadProgress || maxDownloadRate) { const transformStream = new AxiosTransformStream$1({ maxRate: utils$1.toFiniteNumber(maxDownloadRate) }); onDownloadProgress && transformStream.on('progress', flushOnFinish( transformStream, progressEventDecorator( responseLength, progressEventReducer(asyncDecorator(onDownloadProgress), true, 3) ) )); streams.push(transformStream); } // decompress the response body transparently if required let responseStream = res; // return the last request in case of redirects const lastRequest = res.req || req; // if decompress disabled we should not decompress if (config.decompress !== false && res.headers['content-encoding']) { // if no content, but headers still say that it is encoded, // remove the header not confuse downstream operations if (method === 'HEAD' || res.statusCode === 204) { delete res.headers['content-encoding']; } switch ((res.headers['content-encoding'] || '').toLowerCase()) { /*eslint default-case:0*/ case 'gzip': case 'x-gzip': case 'compress': case 'x-compress': // add the unzipper to the body stream processing pipeline streams.push(zlib__default["default"].createUnzip(zlibOptions)); // remove the content-encoding in order to not confuse downstream operations delete res.headers['content-encoding']; break; case 'deflate': streams.push(new ZlibHeaderTransformStream$1()); // add the unzipper to the body stream processing pipeline streams.push(zlib__default["default"].createUnzip(zlibOptions)); // remove the content-encoding in order to not confuse downstream operations delete res.headers['content-encoding']; break; case 'br': if (isBrotliSupported) { streams.push(zlib__default["default"].createBrotliDecompress(brotliOptions)); delete res.headers['content-encoding']; } } } responseStream = streams.length > 1 ? stream__default["default"].pipeline(streams, utils$1.noop) : streams[0]; const offListeners = stream__default["default"].finished(responseStream, () => { offListeners(); onFinished(); }); const response = { status: res.statusCode, statusText: res.statusMessage, headers: new AxiosHeaders$1(res.headers), config, request: lastRequest }; if (responseType === 'stream') { response.data = responseStream; settle(resolve, reject, response); } else { const responseBuffer = []; let totalResponseBytes = 0; responseStream.on('data', function handleStreamData(chunk) { responseBuffer.push(chunk); totalResponseBytes += chunk.length; // make sure the content length is not over the maxContentLength if specified if (config.maxContentLength > -1 && totalResponseBytes > config.maxContentLength) { // stream.destroy() emit aborted event before calling reject() on Node.js v16 rejected = true; responseStream.destroy(); reject(new AxiosError('maxContentLength size of ' + config.maxContentLength + ' exceeded', AxiosError.ERR_BAD_RESPONSE, config, lastRequest)); } }); responseStream.on('aborted', function handlerStreamAborted() { if (rejected) { return; } const err = new AxiosError( 'stream has been aborted', AxiosError.ERR_BAD_RESPONSE, config, lastRequest ); responseStream.destroy(err); reject(err); }); responseStream.on('error', function handleStreamError(err) { if (req.destroyed) return; reject(AxiosError.from(err, null, config, lastRequest)); }); responseStream.on('end', function handleStreamEnd() { try { let responseData = responseBuffer.length === 1 ? responseBuffer[0] : Buffer.concat(responseBuffer); if (responseType !== 'arraybuffer') { responseData = responseData.toString(responseEncoding); if (!responseEncoding || responseEncoding === 'utf8') { responseData = utils$1.stripBOM(responseData); } } response.data = responseData; } catch (err) { return reject(AxiosError.from(err, null, config, response.request, response)); } settle(resolve, reject, response); }); } emitter.once('abort', err => { if (!responseStream.destroyed) { responseStream.emit('error', err); responseStream.destroy(); } }); }); emitter.once('abort', err => { reject(err); req.destroy(err); }); // Handle errors req.on('error', function handleRequestError(err) { // @todo remove // if (req.aborted && err.code !== AxiosError.ERR_FR_TOO_MANY_REDIRECTS) return; reject(AxiosError.from(err, null, config, req)); }); // set tcp keep alive to prevent drop connection by peer req.on('socket', function handleRequestSocket(socket) { // default interval of sending ack packet is 1 minute socket.setKeepAlive(true, 1000 * 60); }); // Handle request timeout if (config.timeout) { // This is forcing a int timeout to avoid problems if the `req` interface doesn't handle other types. const timeout = parseInt(config.timeout, 10); if (Number.isNaN(timeout)) { reject(new AxiosError( 'error trying to parse `config.timeout` to int', AxiosError.ERR_BAD_OPTION_VALUE, config, req )); return; } // Sometime, the response will be very slow, and does not respond, the connect event will be block by event loop system. // And timer callback will be fired, and abort() will be invoked before connection, then get "socket hang up" and code ECONNRESET. // At this time, if we have a large number of request, nodejs will hang up some socket on background. and the number will up and up. // And then these socket which be hang up will devouring CPU little by little. // ClientRequest.setTimeout will be fired on the specify milliseconds, and can make sure that abort() will be fired after connect. req.setTimeout(timeout, function handleRequestTimeout() { if (isDone) return; let timeoutErrorMessage = config.timeout ? 'timeout of ' + config.timeout + 'ms exceeded' : 'timeout exceeded'; const transitional = config.transitional || transitionalDefaults; if (config.timeoutErrorMessage) { timeoutErrorMessage = config.timeoutErrorMessage; } reject(new AxiosError( timeoutErrorMessage, transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, config, req )); abort(); }); } // Send the request if (utils$1.isStream(data)) { let ended = false; let errored = false; data.on('end', () => { ended = true; }); data.once('error', err => { errored = true; req.destroy(err); }); data.on('close', () => { if (!ended && !errored) { abort(new CanceledError('Request stream has been aborted', config, req)); } }); data.pipe(req); } else { req.end(data); } }); }; const isURLSameOrigin = platform.hasStandardBrowserEnv ? ((origin, isMSIE) => (url) => { url = new URL(url, platform.origin); return ( origin.protocol === url.protocol && origin.host === url.host && (isMSIE || origin.port === url.port) ); })( new URL(platform.origin), platform.navigator && /(msie|trident)/i.test(platform.navigator.userAgent) ) : () => true; const cookies = platform.hasStandardBrowserEnv ? // Standard browser envs support document.cookie { write(name, value, expires, path, domain, secure) { const cookie = [name + '=' + encodeURIComponent(value)]; utils$1.isNumber(expires) && cookie.push('expires=' + new Date(expires).toGMTString()); utils$1.isString(path) && cookie.push('path=' + path); utils$1.isString(domain) && cookie.push('domain=' + domain); secure === true && cookie.push('secure'); document.cookie = cookie.join('; '); }, read(name) { const match = document.cookie.match(new RegExp('(^|;\\s*)(' + name + ')=([^;]*)')); return (match ? decodeURIComponent(match[3]) : null); }, remove(name) { this.write(name, '', Date.now() - 86400000); } } : // Non-standard browser env (web workers, react-native) lack needed support. { write() {}, read() { return null; }, remove() {} }; const headersToObject = (thing) => thing instanceof AxiosHeaders$1 ? { ...thing } : thing; /** * Config-specific merge-function which creates a new config-object * by merging two configuration objects together. * * @param {Object} config1 * @param {Object} config2 * * @returns {Object} New object resulting from merging config2 to config1 */ function mergeConfig(config1, config2) { // eslint-disable-next-line no-param-reassign config2 = config2 || {}; const config = {}; function getMergedValue(target, source, prop, caseless) { if (utils$1.isPlainObject(target) && utils$1.isPlainObject(source)) { return utils$1.merge.call({caseless}, target, source); } else if (utils$1.isPlainObject(source)) { return utils$1.merge({}, source); } else if (utils$1.isArray(source)) { return source.slice(); } return source; } // eslint-disable-next-line consistent-return function mergeDeepProperties(a, b, prop , caseless) { if (!utils$1.isUndefined(b)) { return getMergedValue(a, b, prop , caseless); } else if (!utils$1.isUndefined(a)) { return getMergedValue(undefined, a, prop , caseless); } } // eslint-disable-next-line consistent-return function valueFromConfig2(a, b) { if (!utils$1.isUndefined(b)) { return getMergedValue(undefined, b); } } // eslint-disable-next-line consistent-return function defaultToConfig2(a, b) { if (!utils$1.isUndefined(b)) { return getMergedValue(undefined, b); } else if (!utils$1.isUndefined(a)) { return getMergedValue(undefined, a); } } // eslint-disable-next-line consistent-return function mergeDirectKeys(a, b, prop) { if (prop in config2) { return getMergedValue(a, b); } else if (prop in config1) { return getMergedValue(undefined, a); } } const mergeMap = { url: valueFromConfig2, method: valueFromConfig2, data: valueFromConfig2, baseURL: defaultToConfig2, transformRequest: defaultToConfig2, transformResponse: defaultToConfig2, paramsSerializer: defaultToConfig2, timeout: defaultToConfig2, timeoutMessage: defaultToConfig2, withCredentials: defaultToConfig2, withXSRFToken: defaultToConfig2, adapter: defaultToConfig2, responseType: defaultToConfig2, xsrfCookieName: defaultToConfig2, xsrfHeaderName: defaultToConfig2, onUploadProgress: defaultToConfig2, onDownloadProgress: defaultToConfig2, decompress: defaultToConfig2, maxContentLength: defaultToConfig2, maxBodyLength: defaultToConfig2, beforeRedirect: defaultToConfig2, transport: defaultToConfig2, httpAgent: defaultToConfig2, httpsAgent: defaultToConfig2, cancelToken: defaultToConfig2, socketPath: defaultToConfig2, responseEncoding: defaultToConfig2, validateStatus: mergeDirectKeys, headers: (a, b , prop) => mergeDeepProperties(headersToObject(a), headersToObject(b),prop, true) }; utils$1.forEach(Object.keys(Object.assign({}, config1, config2)), function computeConfigValue(prop) { const merge = mergeMap[prop] || mergeDeepProperties; const configValue = merge(config1[prop], config2[prop], prop); (utils$1.isUndefined(configValue) && merge !== mergeDirectKeys) || (config[prop] = configValue); }); return config; } const resolveConfig = (config) => { const newConfig = mergeConfig({}, config); let {data, withXSRFToken, xsrfHeaderName, xsrfCookieName, headers, auth} = newConfig; newConfig.headers = headers = AxiosHeaders$1.from(headers); newConfig.url = buildURL(buildFullPath(newConfig.baseURL, newConfig.url, newConfig.allowAbsoluteUrls), config.params, config.paramsSerializer); // HTTP basic authentication if (auth) { headers.set('Authorization', 'Basic ' + btoa((auth.username || '') + ':' + (auth.password ? unescape(encodeURIComponent(auth.password)) : '')) ); } let contentType; if (utils$1.isFormData(data)) { if (platform.hasStandardBrowserEnv || platform.hasStandardBrowserWebWorkerEnv) { headers.setContentType(undefined); // Let the browser set it } else if ((contentType = headers.getContentType()) !== false) { // fix semicolon duplication issue for ReactNative FormData implementation const [type, ...tokens] = contentType ? contentType.split(';').map(token => token.trim()).filter(Boolean) : []; headers.setContentType([type || 'multipart/form-data', ...tokens].join('; ')); } } // Add xsrf header // This is only done if running in a standard browser environment. // Specifically not if we're in a web worker, or react-native. if (platform.hasStandardBrowserEnv) { withXSRFToken && utils$1.isFunction(withXSRFToken) && (withXSRFToken = withXSRFToken(newConfig)); if (withXSRFToken || (withXSRFToken !== false && isURLSameOrigin(newConfig.url))) { // Add xsrf header const xsrfValue = xsrfHeaderName && xsrfCookieName && cookies.read(xsrfCookieName); if (xsrfValue) { headers.set(xsrfHeaderName, xsrfValue); } } } return newConfig; }; const isXHRAdapterSupported = typeof XMLHttpRequest !== 'undefined'; const xhrAdapter = isXHRAdapterSupported && function (config) { return new Promise(function dispatchXhrRequest(resolve, reject) { const _config = resolveConfig(config); let requestData = _config.data; const requestHeaders = AxiosHeaders$1.from(_config.headers).normalize(); let {responseType, onUploadProgress, onDownloadProgress} = _config; let onCanceled; let uploadThrottled, downloadThrottled; let flushUpload, flushDownload; function done() { flushUpload && flushUpload(); // flush events flushDownload && flushDownload(); // flush events _config.cancelToken && _config.cancelToken.unsubscribe(onCanceled); _config.signal && _config.signal.removeEventListener('abort', onCanceled); } let request = new XMLHttpRequest(); request.open(_config.method.toUpperCase(), _config.url, true); // Set the request timeout in MS request.timeout = _config.timeout; function onloadend() { if (!request) { return; } // Prepare the response const responseHeaders = AxiosHeaders$1.from( 'getAllResponseHeaders' in request && request.getAllResponseHeaders() ); const responseData = !responseType || responseType === 'text' || responseType === 'json' ? request.responseText : request.response; const response = { data: responseData, status: request.status, statusText: request.statusText, headers: responseHeaders, config, request }; settle(function _resolve(value) { resolve(value); done(); }, function _reject(err) { reject(err); done(); }, response); // Clean up request request = null; } if ('onloadend' in request) { // Use onloadend if available request.onloadend = onloadend; } else { // Listen for ready state to emulate onloadend request.onreadystatechange = function handleLoad() { if (!request || request.readyState !== 4) { return; } // The request errored out and we didn't get a response, this will be // handled by onerror instead // With one exception: request that using file: protocol, most browsers // will return status as 0 even though it's a successful request if (request.status === 0 && !(request.responseURL && request.responseURL.indexOf('file:') === 0)) { return; } // readystate handler is calling before onerror or ontimeout handlers, // so we should call onloadend on the next 'tick' setTimeout(onloadend); }; } // Handle browser request cancellation (as opposed to a manual cancellation) request.onabort = function handleAbort() { if (!request) { return; } reject(new AxiosError('Request aborted', AxiosError.ECONNABORTED, config, request)); // Clean up request request = null; }; // Handle low level network errors request.onerror = function handleError() { // Real errors are hidden from us by the browser // onerror should only fire if it's a network error reject(new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request)); // Clean up request request = null; }; // Handle timeout request.ontimeout = function handleTimeout() { let timeoutErrorMessage = _config.timeout ? 'timeout of ' + _config.timeout + 'ms exceeded' : 'timeout exceeded'; const transitional = _config.transitional || transitionalDefaults; if (_config.timeoutErrorMessage) { timeoutErrorMessage = _config.timeoutErrorMessage; } reject(new AxiosError( timeoutErrorMessage, transitional.clarifyTimeoutError ? AxiosError.ETIMEDOUT : AxiosError.ECONNABORTED, config, request)); // Clean up request request = null; }; // Remove Content-Type if data is undefined requestData === undefined && requestHeaders.setContentType(null); // Add headers to the request if ('setRequestHeader' in request) { utils$1.forEach(requestHeaders.toJSON(), function setRequestHeader(val, key) { request.setRequestHeader(key, val); }); } // Add withCredentials to request if needed if (!utils$1.isUndefined(_config.withCredentials)) { request.withCredentials = !!_config.withCredentials; } // Add responseType to request if needed if (responseType && responseType !== 'json') { request.responseType = _config.responseType; } // Handle progress if needed if (onDownloadProgress) { ([downloadThrottled, flushDownload] = progressEventReducer(onDownloadProgress, true)); request.addEventListener('progress', downloadThrottled); } // Not all browsers support upload events if (onUploadProgress && request.upload) { ([uploadThrottled, flushUpload] = progressEventReducer(onUploadProgress)); request.upload.addEventListener('progress', uploadThrottled); request.upload.addEventListener('loadend', flushUpload); } if (_config.cancelToken || _config.signal) { // Handle cancellation // eslint-disable-next-line func-names onCanceled = cancel => { if (!request) { return; } reject(!cancel || cancel.type ? new CanceledError(null, config, request) : cancel); request.abort(); request = null; }; _config.cancelToken && _config.cancelToken.subscribe(onCanceled); if (_config.signal) { _config.signal.aborted ? onCanceled() : _config.signal.addEventListener('abort', onCanceled); } } const protocol = parseProtocol(_config.url); if (protocol && platform.protocols.indexOf(protocol) === -1) { reject(new AxiosError('Unsupported protocol ' + protocol + ':', AxiosError.ERR_BAD_REQUEST, config)); return; } // Send the request request.send(requestData || null); }); }; const composeSignals = (signals, timeout) => { const {length} = (signals = signals ? signals.filter(Boolean) : []); if (timeout || length) { let controller = new AbortController(); let aborted; const onabort = function (reason) { if (!aborted) { aborted = true; unsubscribe(); const err = reason instanceof Error ? reason : this.reason; controller.abort(err instanceof AxiosError ? err : new CanceledError(err instanceof Error ? err.message : err)); } }; let timer = timeout && setTimeout(() => { timer = null; onabort(new AxiosError(`timeout ${timeout} of ms exceeded`, AxiosError.ETIMEDOUT)); }, timeout); const unsubscribe = () => { if (signals) { timer && clearTimeout(timer); timer = null; signals.forEach(signal => { signal.unsubscribe ? signal.unsubscribe(onabort) : signal.removeEventListener('abort', onabort); }); signals = null; } }; signals.forEach((signal) => signal.addEventListener('abort', onabort)); const {signal} = controller; signal.unsubscribe = () => utils$1.asap(unsubscribe); return signal; } }; const composeSignals$1 = composeSignals; const streamChunk = function* (chunk, chunkSize) { let len = chunk.byteLength; if (!chunkSize || len < chunkSize) { yield chunk; return; } let pos = 0; let end; while (pos < len) { end = pos + chunkSize; yield chunk.slice(pos, end); pos = end; } }; const readBytes = async function* (iterable, chunkSize) { for await (const chunk of readStream(iterable)) { yield* streamChunk(chunk, chunkSize); } }; const readStream = async function* (stream) { if (stream[Symbol.asyncIterator]) { yield* stream; return; } const reader = stream.getReader(); try { for (;;) { const {done, value} = await reader.read(); if (done) { break; } yield value; } } finally { await reader.cancel(); } }; const trackStream = (stream, chunkSize, onProgress, onFinish) => { const iterator = readBytes(stream, chunkSize); let bytes = 0; let done; let _onFinish = (e) => { if (!done) { done = true; onFinish && onFinish(e); } }; return new ReadableStream({ async pull(controller) { try { const {done, value} = await iterator.next(); if (done) { _onFinish(); controller.close(); return; } let len = value.byteLength; if (onProgress) { let loadedBytes = bytes += len; onProgress(loadedBytes); } controller.enqueue(new Uint8Array(value)); } catch (err) { _onFinish(err); throw err; } }, cancel(reason) { _onFinish(reason); return iterator.return(); } }, { highWaterMark: 2 }) }; const isFetchSupported = typeof fetch === 'function' && typeof Request === 'function' && typeof Response === 'function'; const isReadableStreamSupported = isFetchSupported && typeof ReadableStream === 'function'; // used only inside the fetch adapter const encodeText = isFetchSupported && (typeof TextEncoder === 'function' ? ((encoder) => (str) => encoder.encode(str))(new TextEncoder()) : async (str) => new Uint8Array(await new Response(str).arrayBuffer()) ); const test = (fn, ...args) => { try { return !!fn(...args); } catch (e) { return false } }; const supportsRequestStream = isReadableStreamSupported && test(() => { let duplexAccessed = false; const hasContentType = new Request(platform.origin, { body: new ReadableStream(), method: 'POST', get duplex() { duplexAccessed = true; return 'half'; }, }).headers.has('Content-Type'); return duplexAccessed && !hasContentType; }); const DEFAULT_CHUNK_SIZE = 64 * 1024; const supportsResponseStream = isReadableStreamSupported && test(() => utils$1.isReadableStream(new Response('').body)); const resolvers = { stream: supportsResponseStream && ((res) => res.body) }; isFetchSupported && (((res) => { ['text', 'arrayBuffer', 'blob', 'formData', 'stream'].forEach(type => { !resolvers[type] && (resolvers[type] = utils$1.isFunction(res[type]) ? (res) => res[type]() : (_, config) => { throw new AxiosError(`Response type '${type}' is not supported`, AxiosError.ERR_NOT_SUPPORT, config); }); }); })(new Response)); const getBodyLength = async (body) => { if (body == null) { return 0; } if(utils$1.isBlob(body)) { return body.size; } if(utils$1.isSpecCompliantForm(body)) { const _request = new Request(platform.origin, { method: 'POST', body, }); return (await _request.arrayBuffer()).byteLength; } if(utils$1.isArrayBufferView(body) || utils$1.isArrayBuffer(body)) { return body.byteLength; } if(utils$1.isURLSearchParams(body)) { body = body + ''; } if(utils$1.isString(body)) { return (await encodeText(body)).byteLength; } }; const resolveBodyLength = async (headers, body) => { const length = utils$1.toFiniteNumber(headers.getContentLength()); return length == null ? getBodyLength(body) : length; }; const fetchAdapter = isFetchSupported && (async (config) => { let { url, method, data, signal, cancelToken, timeout, onDownloadProgress, onUploadProgress, responseType, headers, withCredentials = 'same-origin', fetchOptions } = resolveConfig(config); responseType = responseType ? (responseType + '').toLowerCase() : 'text'; let composedSignal = composeSignals$1([signal, cancelToken && cancelToken.toAbortSignal()], timeout); let request; const unsubscribe = composedSignal && composedSignal.unsubscribe && (() => { composedSignal.unsubscribe(); }); let requestContentLength; try { if ( onUploadProgress && supportsRequestStream && method !== 'get' && method !== 'head' && (requestContentLength = await resolveBodyLength(headers, data)) !== 0 ) { let _request = new Request(url, { method: 'POST', body: data, duplex: "half" }); let contentTypeHeader; if (utils$1.isFormData(data) && (contentTypeHeader = _request.headers.get('content-type'))) { headers.setContentType(contentTypeHeader); } if (_request.body) { const [onProgress, flush] = progressEventDecorator( requestContentLength, progressEventReducer(asyncDecorator(onUploadProgress)) ); data = trackStream(_request.body, DEFAULT_CHUNK_SIZE, onProgress, flush); } } if (!utils$1.isString(withCredentials)) { withCredentials = withCredentials ? 'include' : 'omit'; } // Cloudflare Workers throws when credentials are defined // see https://github.com/cloudflare/workerd/issues/902 const isCredentialsSupported = "credentials" in Request.prototype; request = new Request(url, { ...fetchOptions, signal: composedSignal, method: method.toUpperCase(), headers: headers.normalize().toJSON(), body: data, duplex: "half", credentials: isCredentialsSupported ? withCredentials : undefined }); let response = await fetch(request, fetchOptions); const isStreamResponse = supportsResponseStream && (responseType === 'stream' || responseType === 'response'); if (supportsResponseStream && (onDownloadProgress || (isStreamResponse && unsubscribe))) { const options = {}; ['status', 'statusText', 'headers'].forEach(prop => { options[prop] = response[prop]; }); const responseContentLength = utils$1.toFiniteNumber(response.headers.get('content-length')); const [onProgress, flush] = onDownloadProgress && progressEventDecorator( responseContentLength, progressEventReducer(asyncDecorator(onDownloadProgress), true) ) || []; response = new Response( trackStream(response.body, DEFAULT_CHUNK_SIZE, onProgress, () => { flush && flush(); unsubscribe && unsubscribe(); }), options ); } responseType = responseType || 'text'; let responseData = await resolvers[utils$1.findKey(resolvers, responseType) || 'text'](response, config); !isStreamResponse && unsubscribe && unsubscribe(); return await new Promise((resolve, reject) => { settle(resolve, reject, { data: responseData, headers: AxiosHeaders$1.from(response.headers), status: response.status, statusText: response.statusText, config, request }); }) } catch (err) { unsubscribe && unsubscribe(); if (err && err.name === 'TypeError' && /Load failed|fetch/i.test(err.message)) { throw Object.assign( new AxiosError('Network Error', AxiosError.ERR_NETWORK, config, request), { cause: err.cause || err } ) } throw AxiosError.from(err, err && err.code, config, request); } }); const knownAdapters = { http: httpAdapter, xhr: xhrAdapter, fetch: fetchAdapter }; utils$1.forEach(knownAdapters, (fn, value) => { if (fn) { try { Object.defineProperty(fn, 'name', {value}); } catch (e) { // eslint-disable-next-line no-empty } Object.defineProperty(fn, 'adapterName', {value}); } }); const renderReason = (reason) => `- ${reason}`; const isResolvedHandle = (adapter) => utils$1.isFunction(adapter) || adapter === null || adapter === false; const adapters = { getAdapter: (adapters) => { adapters = utils$1.isArray(adapters) ? adapters : [adapters]; const {length} = adapters; let nameOrAdapter; let adapter; const rejectedReasons = {}; for (let i = 0; i < length; i++) { nameOrAdapter = adapters[i]; let id; adapter = nameOrAdapter; if (!isResolvedHandle(nameOrAdapter)) { adapter = knownAdapters[(id = String(nameOrAdapter)).toLowerCase()]; if (adapter === undefined) { throw new AxiosError(`Unknown adapter '${id}'`); } } if (adapter) { break; } rejectedReasons[id || '#' + i] = adapter; } if (!adapter) { const reasons = Object.entries(rejectedReasons) .map(([id, state]) => `adapter ${id} ` + (state === false ? 'is not supported by the environment' : 'is not available in the build') ); let s = length ? (reasons.length > 1 ? 'since :\n' + reasons.map(renderReason).join('\n') : ' ' + renderReason(reasons[0])) : 'as no adapter specified'; throw new AxiosError( `There is no suitable adapter to dispatch the request ` + s, 'ERR_NOT_SUPPORT' ); } return adapter; }, adapters: knownAdapters }; /** * Throws a `CanceledError` if cancellation has been requested. * * @param {Object} config The config that is to be used for the request * * @returns {void} */ function throwIfCancellationRequested(config) { if (config.cancelToken) { config.cancelToken.throwIfRequested(); } if (config.signal && config.signal.aborted) { throw new CanceledError(null, config); } } /** * Dispatch a request to the server using the configured adapter. * * @param {object} config The config that is to be used for the request * * @returns {Promise} The Promise to be fulfilled */ function dispatchRequest(config) { throwIfCancellationRequested(config); config.headers = AxiosHeaders$1.from(config.headers); // Transform request data config.data = transformData.call( config, config.transformRequest ); if (['post', 'put', 'patch'].indexOf(config.method) !== -1) { config.headers.setContentType('application/x-www-form-urlencoded', false); } const adapter = adapters.getAdapter(config.adapter || defaults$1.adapter); return adapter(config).then(function onAdapterResolution(response) { throwIfCancellationRequested(config); // Transform response data response.data = transformData.call( config, config.transformResponse, response ); response.headers = AxiosHeaders$1.from(response.headers); return response; }, function onAdapterRejection(reason) { if (!isCancel(reason)) { throwIfCancellationRequested(config); // Transform response data if (reason && reason.response) { reason.response.data = transformData.call( config, config.transformResponse, reason.response ); reason.response.headers = AxiosHeaders$1.from(reason.response.headers); } } return Promise.reject(reason); }); } const validators$1 = {}; // eslint-disable-next-line func-names ['object', 'boolean', 'number', 'function', 'string', 'symbol'].forEach((type, i) => { validators$1[type] = function validator(thing) { return typeof thing === type || 'a' + (i < 1 ? 'n ' : ' ') + type; }; }); const deprecatedWarnings = {}; /** * Transitional option validator * * @param {function|boolean?} validator - set to false if the transitional option has been removed * @param {string?} version - deprecated version / removed since version * @param {string?} message - some message with additional info * * @returns {function} */ validators$1.transitional = function transitional(validator, version, message) { function formatMessage(opt, desc) { return '[Axios v' + VERSION + '] Transitional option \'' + opt + '\'' + desc + (message ? '. ' + message : ''); } // eslint-disable-next-line func-names return (value, opt, opts) => { if (validator === false) { throw new AxiosError( formatMessage(opt, ' has been removed' + (version ? ' in ' + version : '')), AxiosError.ERR_DEPRECATED ); } if (version && !deprecatedWarnings[opt]) { deprecatedWarnings[opt] = true; // eslint-disable-next-line no-console console.warn( formatMessage( opt, ' has been deprecated since v' + version + ' and will be removed in the near future' ) ); } return validator ? validator(value, opt, opts) : true; }; }; validators$1.spelling = function spelling(correctSpelling) { return (value, opt) => { // eslint-disable-next-line no-console console.warn(`${opt} is likely a misspelling of ${correctSpelling}`); return true; } }; /** * Assert object's properties type * * @param {object} options * @param {object} schema * @param {boolean?} allowUnknown * * @returns {object} */ function assertOptions(options, schema, allowUnknown) { if (typeof options !== 'object') { throw new AxiosError('options must be an object', AxiosError.ERR_BAD_OPTION_VALUE); } const keys = Object.keys(options); let i = keys.length; while (i-- > 0) { const opt = keys[i]; const validator = schema[opt]; if (validator) { const value = options[opt]; const result = value === undefined || validator(value, opt, options); if (result !== true) { throw new AxiosError('option ' + opt + ' must be ' + result, AxiosError.ERR_BAD_OPTION_VALUE); } continue; } if (allowUnknown !== true) { throw new AxiosError('Unknown option ' + opt, AxiosError.ERR_BAD_OPTION); } } } const validator = { assertOptions, validators: validators$1 }; const validators = validator.validators; /** * Create a new instance of Axios * * @param {Object} instanceConfig The default config for the instance * * @return {Axios} A new instance of Axios */ class Axios { constructor(instanceConfig) { this.defaults = instanceConfig || {}; this.interceptors = { request: new InterceptorManager$1(), response: new InterceptorManager$1() }; } /** * Dispatch a request * * @param {String|Object} configOrUrl The config specific for this request (merged with this.defaults) * @param {?Object} config * * @returns {Promise} The Promise to be fulfilled */ async request(configOrUrl, config) { try { return await this._request(configOrUrl, config); } catch (err) { if (err instanceof Error) { let dummy = {}; Error.captureStackTrace ? Error.captureStackTrace(dummy) : (dummy = new Error()); // slice off the Error: ... line const stack = dummy.stack ? dummy.stack.replace(/^.+\n/, '') : ''; try { if (!err.stack) { err.stack = stack; // match without the 2 top stack lines } else if (stack && !String(err.stack).endsWith(stack.replace(/^.+\n.+\n/, ''))) { err.stack += '\n' + stack; } } catch (e) { // ignore the case where "stack" is an un-writable property } } throw err; } } _request(configOrUrl, config) { /*eslint no-param-reassign:0*/ // Allow for axios('example/url'[, config]) a la fetch API if (typeof configOrUrl === 'string') { config = config || {}; config.url = configOrUrl; } else { config = configOrUrl || {}; } config = mergeConfig(this.defaults, config); const {transitional, paramsSerializer, headers} = config; if (transitional !== undefined) { validator.assertOptions(transitional, { silentJSONParsing: validators.transitional(validators.boolean), forcedJSONParsing: validators.transitional(validators.boolean), clarifyTimeoutError: validators.transitional(validators.boolean) }, false); } if (paramsSerializer != null) { if (utils$1.isFunction(paramsSerializer)) { config.paramsSerializer = { serialize: paramsSerializer }; } else { validator.assertOptions(paramsSerializer, { encode: validators.function, serialize: validators.function }, true); } } // Set config.allowAbsoluteUrls if (config.allowAbsoluteUrls !== undefined) ; else if (this.defaults.allowAbsoluteUrls !== undefined) { config.allowAbsoluteUrls = this.defaults.allowAbsoluteUrls; } else { config.allowAbsoluteUrls = true; } validator.assertOptions(config, { baseUrl: validators.spelling('baseURL'), withXsrfToken: validators.spelling('withXSRFToken') }, true); // Set config.method config.method = (config.method || this.defaults.method || 'get').toLowerCase(); // Flatten headers let contextHeaders = headers && utils$1.merge( headers.common, headers[config.method] ); headers && utils$1.forEach( ['delete', 'get', 'head', 'post', 'put', 'patch', 'common'], (method) => { delete headers[method]; } ); config.headers = AxiosHeaders$1.concat(contextHeaders, headers); // filter out skipped interceptors const requestInterceptorChain = []; let synchronousRequestInterceptors = true; this.interceptors.request.forEach(function unshiftRequestInterceptors(interceptor) { if (typeof interceptor.runWhen === 'function' && interceptor.runWhen(config) === false) { return; } synchronousRequestInterceptors = synchronousRequestInterceptors && interceptor.synchronous; requestInterceptorChain.unshift(interceptor.fulfilled, interceptor.rejected); }); const responseInterceptorChain = []; this.interceptors.response.forEach(function pushResponseInterceptors(interceptor) { responseInterceptorChain.push(interceptor.fulfilled, interceptor.rejected); }); let promise; let i = 0; let len; if (!synchronousRequestInterceptors) { const chain = [dispatchRequest.bind(this), undefined]; chain.unshift.apply(chain, requestInterceptorChain); chain.push.apply(chain, responseInterceptorChain); len = chain.length; promise = Promise.resolve(config); while (i < len) { promise = promise.then(chain[i++], chain[i++]); } return promise; } len = requestInterceptorChain.length; let newConfig = config; i = 0; while (i < len) { const onFulfilled = requestInterceptorChain[i++]; const onRejected = requestInterceptorChain[i++]; try { newConfig = onFulfilled(newConfig); } catch (error) { onRejected.call(this, error); break; } } try { promise = dispatchRequest.call(this, newConfig); } catch (error) { return Promise.reject(error); } i = 0; len = responseInterceptorChain.length; while (i < len) { promise = promise.then(responseInterceptorChain[i++], responseInterceptorChain[i++]); } return promise; } getUri(config) { config = mergeConfig(this.defaults, config); const fullPath = buildFullPath(config.baseURL, config.url, config.allowAbsoluteUrls); return buildURL(fullPath, config.params, config.paramsSerializer); } } // Provide aliases for supported request methods utils$1.forEach(['delete', 'get', 'head', 'options'], function forEachMethodNoData(method) { /*eslint func-names:0*/ Axios.prototype[method] = function(url, config) { return this.request(mergeConfig(config || {}, { method, url, data: (config || {}).data })); }; }); utils$1.forEach(['post', 'put', 'patch'], function forEachMethodWithData(method) { /*eslint func-names:0*/ function generateHTTPMethod(isForm) { return function httpMethod(url, data, config) { return this.request(mergeConfig(config || {}, { method, headers: isForm ? { 'Content-Type': 'multipart/form-data' } : {}, url, data })); }; } Axios.prototype[method] = generateHTTPMethod(); Axios.prototype[method + 'Form'] = generateHTTPMethod(true); }); const Axios$1 = Axios; /** * A `CancelToken` is an object that can be used to request cancellation of an operation. * * @param {Function} executor The executor function. * * @returns {CancelToken} */ class CancelToken { constructor(executor) { if (typeof executor !== 'function') { throw new TypeError('executor must be a function.'); } let resolvePromise; this.promise = new Promise(function promiseExecutor(resolve) { resolvePromise = resolve; }); const token = this; // eslint-disable-next-line func-names this.promise.then(cancel => { if (!token._listeners) return; let i = token._listeners.length; while (i-- > 0) { token._listeners[i](cancel); } token._listeners = null; }); // eslint-disable-next-line func-names this.promise.then = onfulfilled => { let _resolve; // eslint-disable-next-line func-names const promise = new Promise(resolve => { token.subscribe(resolve); _resolve = resolve; }).then(onfulfilled); promise.cancel = function reject() { token.unsubscribe(_resolve); }; return promise; }; executor(function cancel(message, config, request) { if (token.reason) { // Cancellation has already been requested return; } token.reason = new CanceledError(message, config, request); resolvePromise(token.reason); }); } /** * Throws a `CanceledError` if cancellation has been requested. */ throwIfRequested() { if (this.reason) { throw this.reason; } } /** * Subscribe to the cancel signal */ subscribe(listener) { if (this.reason) { listener(this.reason); return; } if (this._listeners) { this._listeners.push(listener); } else { this._listeners = [listener]; } } /** * Unsubscribe from the cancel signal */ unsubscribe(listener) { if (!this._listeners) { return; } const index = this._listeners.indexOf(listener); if (index !== -1) { this._listeners.splice(index, 1); } } toAbortSignal() { const controller = new AbortController(); const abort = (err) => { controller.abort(err); }; this.subscribe(abort); controller.signal.unsubscribe = () => this.unsubscribe(abort); return controller.signal; } /** * Returns an object that contains a new `CancelToken` and a function that, when called, * cancels the `CancelToken`. */ static source() { let cancel; const token = new CancelToken(function executor(c) { cancel = c; }); return { token, cancel }; } } const CancelToken$1 = CancelToken; /** * Syntactic sugar for invoking a function and expanding an array for arguments. * * Common use case would be to use `Function.prototype.apply`. * * ```js * function f(x, y, z) {} * var args = [1, 2, 3]; * f.apply(null, args); * ``` * * With `spread` this example can be re-written. * * ```js * spread(function(x, y, z) {})([1, 2, 3]); * ``` * * @param {Function} callback * * @returns {Function} */ function spread(callback) { return function wrap(arr) { return callback.apply(null, arr); }; } /** * Determines whether the payload is an error thrown by Axios * * @param {*} payload The value to test * * @returns {boolean} True if the payload is an error thrown by Axios, otherwise false */ function isAxiosError(payload) { return utils$1.isObject(payload) && (payload.isAxiosError === true); } const HttpStatusCode = { Continue: 100, SwitchingProtocols: 101, Processing: 102, EarlyHints: 103, Ok: 200, Created: 201, Accepted: 202, NonAuthoritativeInformation: 203, NoContent: 204, ResetContent: 205, PartialContent: 206, MultiStatus: 207, AlreadyReported: 208, ImUsed: 226, MultipleChoices: 300, MovedPermanently: 301, Found: 302, SeeOther: 303, NotModified: 304, UseProxy: 305, Unused: 306, TemporaryRedirect: 307, PermanentRedirect: 308, BadRequest: 400, Unauthorized: 401, PaymentRequired: 402, Forbidden: 403, NotFound: 404, MethodNotAllowed: 405, NotAcceptable: 406, ProxyAuthenticationRequired: 407, RequestTimeout: 408, Conflict: 409, Gone: 410, LengthRequired: 411, PreconditionFailed: 412, PayloadTooLarge: 413, UriTooLong: 414, UnsupportedMediaType: 415, RangeNotSatisfiable: 416, ExpectationFailed: 417, ImATeapot: 418, MisdirectedRequest: 421, UnprocessableEntity: 422, Locked: 423, FailedDependency: 424, TooEarly: 425, UpgradeRequired: 426, PreconditionRequired: 428, TooManyRequests: 429, RequestHeaderFieldsTooLarge: 431, UnavailableForLegalReasons: 451, InternalServerError: 500, NotImplemented: 501, BadGateway: 502, ServiceUnavailable: 503, GatewayTimeout: 504, HttpVersionNotSupported: 505, VariantAlsoNegotiates: 506, InsufficientStorage: 507, LoopDetected: 508, NotExtended: 510, NetworkAuthenticationRequired: 511, }; Object.entries(HttpStatusCode).forEach(([key, value]) => { HttpStatusCode[value] = key; }); const HttpStatusCode$1 = HttpStatusCode; /** * Create an instance of Axios * * @param {Object} defaultConfig The default config for the instance * * @returns {Axios} A new instance of Axios */ function createInstance(defaultConfig) { const context = new Axios$1(defaultConfig); const instance = bind(Axios$1.prototype.request, context); // Copy axios.prototype to instance utils$1.extend(instance, Axios$1.prototype, context, {allOwnKeys: true}); // Copy context to instance utils$1.extend(instance, context, null, {allOwnKeys: true}); // Factory for creating new instances instance.create = function create(instanceConfig) { return createInstance(mergeConfig(defaultConfig, instanceConfig)); }; return instance; } // Create the default instance to be exported const axios = createInstance(defaults$1); // Expose Axios class to allow class inheritance axios.Axios = Axios$1; // Expose Cancel & CancelToken axios.CanceledError = CanceledError; axios.CancelToken = CancelToken$1; axios.isCancel = isCancel; axios.VERSION = VERSION; axios.toFormData = toFormData; // Expose AxiosError class axios.AxiosError = AxiosError; // alias for CanceledError for backward compatibility axios.Cancel = axios.CanceledError; // Expose all/spread axios.all = function all(promises) { return Promise.all(promises); }; axios.spread = spread; // Expose isAxiosError axios.isAxiosError = isAxiosError; // Expose mergeConfig axios.mergeConfig = mergeConfig; axios.AxiosHeaders = AxiosHeaders$1; axios.formToJSON = thing => formDataToJSON(utils$1.isHTMLForm(thing) ? new FormData(thing) : thing); axios.getAdapter = adapters.getAdapter; axios.HttpStatusCode = HttpStatusCode$1; axios.default = axios; module.exports = axios; //# sourceMappingURL=axios.cjs.map /***/ }), /***/ 64012: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4.0.0","preview":true,"description":"Actions cache lib","keywords":["github","actions","cache"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/cache","license":"MIT","main":"lib/cache.js","types":"lib/cache.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/cache"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"echo \\"Error: run tests from root\\" && exit 1","tsc":"tsc"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.11.1","@actions/exec":"^1.0.1","@actions/glob":"^0.1.0","@actions/http-client":"^2.1.1","@actions/io":"^1.0.1","@azure/abort-controller":"^1.1.0","@azure/ms-rest-js":"^2.6.0","@azure/storage-blob":"^12.13.0","@protobuf-ts/plugin":"^2.9.4","semver":"^6.3.1","twirp-ts":"^2.5.0"},"devDependencies":{"@types/semver":"^6.0.0","typescript":"^5.2.2"}}'); /***/ }), /***/ 72721: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('{"name":"@google-cloud/storage","description":"Cloud Storage Client Library for Node.js","version":"7.16.0","license":"Apache-2.0","author":"Google Inc.","engines":{"node":">=14"},"repository":"googleapis/nodejs-storage","main":"./build/cjs/src/index.js","types":"./build/cjs/src/index.d.ts","type":"module","exports":{".":{"import":{"types":"./build/esm/src/index.d.ts","default":"./build/esm/src/index.js"},"require":{"types":"./build/cjs/src/index.d.ts","default":"./build/cjs/src/index.js"}}},"files":["build/cjs/src","build/cjs/package.json","!build/cjs/src/**/*.map","build/esm/src","!build/esm/src/**/*.map"],"keywords":["google apis client","google api client","google apis","google api","google","google cloud platform","google cloud","cloud","google storage","storage"],"scripts":{"all-test":"npm test && npm run system-test && npm run samples-test","benchwrapper":"node bin/benchwrapper.js","check":"gts check","clean":"rm -rf build/","compile:cjs":"tsc -p ./tsconfig.cjs.json","compile:esm":"tsc -p .","compile":"npm run compile:cjs && npm run compile:esm","conformance-test":"mocha --parallel build/cjs/conformance-test/ --require build/cjs/conformance-test/globalHooks.js","docs-test":"linkinator docs","docs":"jsdoc -c .jsdoc.json","fix":"gts fix","lint":"gts check","postcompile":"cp ./src/package-json-helper.cjs ./build/cjs/src && cp ./src/package-json-helper.cjs ./build/esm/src","postcompile:cjs":"babel --plugins gapic-tools/build/src/replaceImportMetaUrl,gapic-tools/build/src/toggleESMFlagVariable build/cjs/src/util.js -o build/cjs/src/util.js && cp internal-tooling/helpers/package.cjs.json build/cjs/package.json","precompile":"rm -rf build/","preconformance-test":"npm run compile:cjs -- --sourceMap","predocs-test":"npm run docs","predocs":"npm run compile:cjs -- --sourceMap","prelint":"cd samples; npm link ../; npm install","prepare":"npm run compile","presystem-test:esm":"npm run compile:esm","presystem-test":"npm run compile -- --sourceMap","pretest":"npm run compile -- --sourceMap","samples-test":"npm link && cd samples/ && npm link ../ && npm test && cd ../","system-test:esm":"mocha build/esm/system-test --timeout 600000 --exit","system-test":"mocha build/cjs/system-test --timeout 600000 --exit","test":"c8 mocha build/cjs/test"},"dependencies":{"@google-cloud/paginator":"^5.0.0","@google-cloud/projectify":"^4.0.0","@google-cloud/promisify":"<4.1.0","abort-controller":"^3.0.0","async-retry":"^1.3.3","duplexify":"^4.1.3","fast-xml-parser":"^4.4.1","gaxios":"^6.0.2","google-auth-library":"^9.6.3","html-entities":"^2.5.2","mime":"^3.0.0","p-limit":"^3.0.1","retry-request":"^7.0.0","teeny-request":"^9.0.0","uuid":"^8.0.0"},"devDependencies":{"@babel/cli":"^7.22.10","@babel/core":"^7.22.11","@google-cloud/pubsub":"^4.0.0","@grpc/grpc-js":"^1.0.3","@grpc/proto-loader":"^0.7.0","@types/async-retry":"^1.4.3","@types/duplexify":"^3.6.4","@types/mime":"^3.0.0","@types/mocha":"^9.1.1","@types/mockery":"^1.4.29","@types/node":"^22.0.0","@types/node-fetch":"^2.1.3","@types/proxyquire":"^1.3.28","@types/request":"^2.48.4","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^8.0.0","@types/yargs":"^17.0.10","c8":"^9.0.0","form-data":"^4.0.0","gapic-tools":"^0.4.0","gts":"^5.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","linkinator":"^3.0.0","mocha":"^9.2.2","mockery":"^2.1.0","nock":"~13.5.0","node-fetch":"^2.6.7","pack-n-play":"^2.0.0","proxyquire":"^2.1.3","sinon":"^18.0.0","nise":"6.0.0","path-to-regexp":"6.3.0","tmp":"^0.2.0","typescript":"^5.1.6","yargs":"^17.3.1"}}'); /***/ }), /***/ 66495: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('{"name":"gaxios","version":"6.7.1","description":"A simple common HTTP client specifically for Google APIs and services.","main":"build/src/index.js","types":"build/src/index.d.ts","files":["build/src"],"scripts":{"lint":"gts check","test":"c8 mocha build/test","presystem-test":"npm run compile","system-test":"mocha build/system-test --timeout 80000","compile":"tsc -p .","fix":"gts fix","prepare":"npm run compile","pretest":"npm run compile","webpack":"webpack","prebrowser-test":"npm run compile","browser-test":"node build/browser-test/browser-test-runner.js","docs":"compodoc src/","docs-test":"linkinator docs","predocs-test":"npm run docs","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","prelint":"cd samples; npm link ../; npm install","clean":"gts clean","precompile":"gts clean"},"repository":"googleapis/gaxios","keywords":["google"],"engines":{"node":">=14"},"author":"Google, LLC","license":"Apache-2.0","devDependencies":{"@babel/plugin-proposal-private-methods":"^7.18.6","@compodoc/compodoc":"1.1.19","@types/cors":"^2.8.6","@types/express":"^4.16.1","@types/extend":"^3.0.1","@types/mocha":"^9.0.0","@types/multiparty":"0.0.36","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.0.0","@types/node-fetch":"^2.5.7","@types/sinon":"^17.0.0","@types/tmp":"0.2.6","@types/uuid":"^10.0.0","abort-controller":"^3.0.0","assert":"^2.0.0","browserify":"^17.0.0","c8":"^8.0.0","cheerio":"1.0.0-rc.10","cors":"^2.8.5","execa":"^5.0.0","express":"^4.16.4","form-data":"^4.0.0","gts":"^5.0.0","is-docker":"^2.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-remap-coverage":"^0.1.5","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","linkinator":"^3.0.0","mocha":"^8.0.0","multiparty":"^4.2.1","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","puppeteer":"^19.0.0","sinon":"^18.0.0","stream-browserify":"^3.0.0","tmp":"0.2.3","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.35.0","webpack-cli":"^4.0.0"},"dependencies":{"extend":"^3.0.2","https-proxy-agent":"^7.0.1","is-stream":"^2.0.0","node-fetch":"^2.6.9","uuid":"^9.0.1"}}'); /***/ }), /***/ 96066: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('{"name":"google-auth-library","version":"9.15.1","author":"Google Inc.","description":"Google APIs Authentication Client Library for Node.js","engines":{"node":">=14"},"main":"./build/src/index.js","types":"./build/src/index.d.ts","repository":"googleapis/google-auth-library-nodejs.git","keywords":["google","api","google apis","client","client library"],"dependencies":{"base64-js":"^1.3.0","ecdsa-sig-formatter":"^1.0.11","gaxios":"^6.1.1","gcp-metadata":"^6.1.0","gtoken":"^7.0.0","jws":"^4.0.0"},"devDependencies":{"@types/base64-js":"^1.2.5","@types/chai":"^4.1.7","@types/jws":"^3.1.0","@types/mocha":"^9.0.0","@types/mv":"^2.1.0","@types/ncp":"^2.0.1","@types/node":"^20.4.2","@types/sinon":"^17.0.0","assert-rejects":"^1.0.0","c8":"^8.0.0","chai":"^4.2.0","cheerio":"1.0.0-rc.12","codecov":"^3.0.2","engine.io":"6.6.2","gts":"^5.0.0","is-docker":"^2.0.0","jsdoc":"^4.0.0","jsdoc-fresh":"^3.0.0","jsdoc-region-tag":"^3.0.0","karma":"^6.0.0","karma-chrome-launcher":"^3.0.0","karma-coverage":"^2.0.0","karma-firefox-launcher":"^2.0.0","karma-mocha":"^2.0.0","karma-sourcemap-loader":"^0.4.0","karma-webpack":"5.0.0","keypair":"^1.0.4","linkinator":"^4.0.0","mocha":"^9.2.2","mv":"^2.1.1","ncp":"^2.0.0","nock":"^13.0.0","null-loader":"^4.0.0","pdfmake":"0.2.12","puppeteer":"^21.0.0","sinon":"^18.0.0","ts-loader":"^8.0.0","typescript":"^5.1.6","webpack":"^5.21.2","webpack-cli":"^4.0.0"},"files":["build/src","!build/src/**/*.map"],"scripts":{"test":"c8 mocha build/test","clean":"gts clean","prepare":"npm run compile","lint":"gts check","compile":"tsc -p .","fix":"gts fix","pretest":"npm run compile -- --sourceMap","docs":"jsdoc -c .jsdoc.json","samples-setup":"cd samples/ && npm link ../ && npm run setup && cd ../","samples-test":"cd samples/ && npm link ../ && npm test && cd ../","system-test":"mocha build/system-test --timeout 60000","presystem-test":"npm run compile -- --sourceMap","webpack":"webpack","browser-test":"karma start","docs-test":"linkinator docs","predocs-test":"npm run docs","prelint":"cd samples; npm link ../; npm install","precompile":"gts clean"},"license":"Apache-2.0"}'); /***/ }), /***/ 81813: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('{"application/1d-interleaved-parityfec":{"source":"iana"},"application/3gpdash-qoe-report+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/3gpp-ims+xml":{"source":"iana","compressible":true},"application/3gpphal+json":{"source":"iana","compressible":true},"application/3gpphalforms+json":{"source":"iana","compressible":true},"application/a2l":{"source":"iana"},"application/ace+cbor":{"source":"iana"},"application/activemessage":{"source":"iana"},"application/activity+json":{"source":"iana","compressible":true},"application/alto-costmap+json":{"source":"iana","compressible":true},"application/alto-costmapfilter+json":{"source":"iana","compressible":true},"application/alto-directory+json":{"source":"iana","compressible":true},"application/alto-endpointcost+json":{"source":"iana","compressible":true},"application/alto-endpointcostparams+json":{"source":"iana","compressible":true},"application/alto-endpointprop+json":{"source":"iana","compressible":true},"application/alto-endpointpropparams+json":{"source":"iana","compressible":true},"application/alto-error+json":{"source":"iana","compressible":true},"application/alto-networkmap+json":{"source":"iana","compressible":true},"application/alto-networkmapfilter+json":{"source":"iana","compressible":true},"application/alto-updatestreamcontrol+json":{"source":"iana","compressible":true},"application/alto-updatestreamparams+json":{"source":"iana","compressible":true},"application/aml":{"source":"iana"},"application/andrew-inset":{"source":"iana","extensions":["ez"]},"application/applefile":{"source":"iana"},"application/applixware":{"source":"apache","extensions":["aw"]},"application/at+jwt":{"source":"iana"},"application/atf":{"source":"iana"},"application/atfx":{"source":"iana"},"application/atom+xml":{"source":"iana","compressible":true,"extensions":["atom"]},"application/atomcat+xml":{"source":"iana","compressible":true,"extensions":["atomcat"]},"application/atomdeleted+xml":{"source":"iana","compressible":true,"extensions":["atomdeleted"]},"application/atomicmail":{"source":"iana"},"application/atomsvc+xml":{"source":"iana","compressible":true,"extensions":["atomsvc"]},"application/atsc-dwd+xml":{"source":"iana","compressible":true,"extensions":["dwd"]},"application/atsc-dynamic-event-message":{"source":"iana"},"application/atsc-held+xml":{"source":"iana","compressible":true,"extensions":["held"]},"application/atsc-rdt+json":{"source":"iana","compressible":true},"application/atsc-rsat+xml":{"source":"iana","compressible":true,"extensions":["rsat"]},"application/atxml":{"source":"iana"},"application/auth-policy+xml":{"source":"iana","compressible":true},"application/bacnet-xdd+zip":{"source":"iana","compressible":false},"application/batch-smtp":{"source":"iana"},"application/bdoc":{"compressible":false,"extensions":["bdoc"]},"application/beep+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/calendar+json":{"source":"iana","compressible":true},"application/calendar+xml":{"source":"iana","compressible":true,"extensions":["xcs"]},"application/call-completion":{"source":"iana"},"application/cals-1840":{"source":"iana"},"application/captive+json":{"source":"iana","compressible":true},"application/cbor":{"source":"iana"},"application/cbor-seq":{"source":"iana"},"application/cccex":{"source":"iana"},"application/ccmp+xml":{"source":"iana","compressible":true},"application/ccxml+xml":{"source":"iana","compressible":true,"extensions":["ccxml"]},"application/cdfx+xml":{"source":"iana","compressible":true,"extensions":["cdfx"]},"application/cdmi-capability":{"source":"iana","extensions":["cdmia"]},"application/cdmi-container":{"source":"iana","extensions":["cdmic"]},"application/cdmi-domain":{"source":"iana","extensions":["cdmid"]},"application/cdmi-object":{"source":"iana","extensions":["cdmio"]},"application/cdmi-queue":{"source":"iana","extensions":["cdmiq"]},"application/cdni":{"source":"iana"},"application/cea":{"source":"iana"},"application/cea-2018+xml":{"source":"iana","compressible":true},"application/cellml+xml":{"source":"iana","compressible":true},"application/cfw":{"source":"iana"},"application/city+json":{"source":"iana","compressible":true},"application/clr":{"source":"iana"},"application/clue+xml":{"source":"iana","compressible":true},"application/clue_info+xml":{"source":"iana","compressible":true},"application/cms":{"source":"iana"},"application/cnrp+xml":{"source":"iana","compressible":true},"application/coap-group+json":{"source":"iana","compressible":true},"application/coap-payload":{"source":"iana"},"application/commonground":{"source":"iana"},"application/conference-info+xml":{"source":"iana","compressible":true},"application/cose":{"source":"iana"},"application/cose-key":{"source":"iana"},"application/cose-key-set":{"source":"iana"},"application/cpl+xml":{"source":"iana","compressible":true,"extensions":["cpl"]},"application/csrattrs":{"source":"iana"},"application/csta+xml":{"source":"iana","compressible":true},"application/cstadata+xml":{"source":"iana","compressible":true},"application/csvm+json":{"source":"iana","compressible":true},"application/cu-seeme":{"source":"apache","extensions":["cu"]},"application/cwt":{"source":"iana"},"application/cybercash":{"source":"iana"},"application/dart":{"compressible":true},"application/dash+xml":{"source":"iana","compressible":true,"extensions":["mpd"]},"application/dash-patch+xml":{"source":"iana","compressible":true,"extensions":["mpp"]},"application/dashdelta":{"source":"iana"},"application/davmount+xml":{"source":"iana","compressible":true,"extensions":["davmount"]},"application/dca-rft":{"source":"iana"},"application/dcd":{"source":"iana"},"application/dec-dx":{"source":"iana"},"application/dialog-info+xml":{"source":"iana","compressible":true},"application/dicom":{"source":"iana"},"application/dicom+json":{"source":"iana","compressible":true},"application/dicom+xml":{"source":"iana","compressible":true},"application/dii":{"source":"iana"},"application/dit":{"source":"iana"},"application/dns":{"source":"iana"},"application/dns+json":{"source":"iana","compressible":true},"application/dns-message":{"source":"iana"},"application/docbook+xml":{"source":"apache","compressible":true,"extensions":["dbk"]},"application/dots+cbor":{"source":"iana"},"application/dskpp+xml":{"source":"iana","compressible":true},"application/dssc+der":{"source":"iana","extensions":["dssc"]},"application/dssc+xml":{"source":"iana","compressible":true,"extensions":["xdssc"]},"application/dvcs":{"source":"iana"},"application/ecmascript":{"source":"iana","compressible":true,"extensions":["es","ecma"]},"application/edi-consent":{"source":"iana"},"application/edi-x12":{"source":"iana","compressible":false},"application/edifact":{"source":"iana","compressible":false},"application/efi":{"source":"iana"},"application/elm+json":{"source":"iana","charset":"UTF-8","compressible":true},"application/elm+xml":{"source":"iana","compressible":true},"application/emergencycalldata.cap+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/emergencycalldata.comment+xml":{"source":"iana","compressible":true},"application/emergencycalldata.control+xml":{"source":"iana","compressible":true},"application/emergencycalldata.deviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.ecall.msd":{"source":"iana"},"application/emergencycalldata.providerinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.serviceinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.subscriberinfo+xml":{"source":"iana","compressible":true},"application/emergencycalldata.veds+xml":{"source":"iana","compressible":true},"application/emma+xml":{"source":"iana","compressible":true,"extensions":["emma"]},"application/emotionml+xml":{"source":"iana","compressible":true,"extensions":["emotionml"]},"application/encaprtp":{"source":"iana"},"application/epp+xml":{"source":"iana","compressible":true},"application/epub+zip":{"source":"iana","compressible":false,"extensions":["epub"]},"application/eshop":{"source":"iana"},"application/exi":{"source":"iana","extensions":["exi"]},"application/expect-ct-report+json":{"source":"iana","compressible":true},"application/express":{"source":"iana","extensions":["exp"]},"application/fastinfoset":{"source":"iana"},"application/fastsoap":{"source":"iana"},"application/fdt+xml":{"source":"iana","compressible":true,"extensions":["fdt"]},"application/fhir+json":{"source":"iana","charset":"UTF-8","compressible":true},"application/fhir+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/fido.trusted-apps+json":{"compressible":true},"application/fits":{"source":"iana"},"application/flexfec":{"source":"iana"},"application/font-sfnt":{"source":"iana"},"application/font-tdpfr":{"source":"iana","extensions":["pfr"]},"application/font-woff":{"source":"iana","compressible":false},"application/framework-attributes+xml":{"source":"iana","compressible":true},"application/geo+json":{"source":"iana","compressible":true,"extensions":["geojson"]},"application/geo+json-seq":{"source":"iana"},"application/geopackage+sqlite3":{"source":"iana"},"application/geoxacml+xml":{"source":"iana","compressible":true},"application/gltf-buffer":{"source":"iana"},"application/gml+xml":{"source":"iana","compressible":true,"extensions":["gml"]},"application/gpx+xml":{"source":"apache","compressible":true,"extensions":["gpx"]},"application/gxf":{"source":"apache","extensions":["gxf"]},"application/gzip":{"source":"iana","compressible":false,"extensions":["gz"]},"application/h224":{"source":"iana"},"application/held+xml":{"source":"iana","compressible":true},"application/hjson":{"extensions":["hjson"]},"application/http":{"source":"iana"},"application/hyperstudio":{"source":"iana","extensions":["stk"]},"application/ibe-key-request+xml":{"source":"iana","compressible":true},"application/ibe-pkg-reply+xml":{"source":"iana","compressible":true},"application/ibe-pp-data":{"source":"iana"},"application/iges":{"source":"iana"},"application/im-iscomposing+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/index":{"source":"iana"},"application/index.cmd":{"source":"iana"},"application/index.obj":{"source":"iana"},"application/index.response":{"source":"iana"},"application/index.vnd":{"source":"iana"},"application/inkml+xml":{"source":"iana","compressible":true,"extensions":["ink","inkml"]},"application/iotp":{"source":"iana"},"application/ipfix":{"source":"iana","extensions":["ipfix"]},"application/ipp":{"source":"iana"},"application/isup":{"source":"iana"},"application/its+xml":{"source":"iana","compressible":true,"extensions":["its"]},"application/java-archive":{"source":"apache","compressible":false,"extensions":["jar","war","ear"]},"application/java-serialized-object":{"source":"apache","compressible":false,"extensions":["ser"]},"application/java-vm":{"source":"apache","compressible":false,"extensions":["class"]},"application/javascript":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["js","mjs"]},"application/jf2feed+json":{"source":"iana","compressible":true},"application/jose":{"source":"iana"},"application/jose+json":{"source":"iana","compressible":true},"application/jrd+json":{"source":"iana","compressible":true},"application/jscalendar+json":{"source":"iana","compressible":true},"application/json":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["json","map"]},"application/json-patch+json":{"source":"iana","compressible":true},"application/json-seq":{"source":"iana"},"application/json5":{"extensions":["json5"]},"application/jsonml+json":{"source":"apache","compressible":true,"extensions":["jsonml"]},"application/jwk+json":{"source":"iana","compressible":true},"application/jwk-set+json":{"source":"iana","compressible":true},"application/jwt":{"source":"iana"},"application/kpml-request+xml":{"source":"iana","compressible":true},"application/kpml-response+xml":{"source":"iana","compressible":true},"application/ld+json":{"source":"iana","compressible":true,"extensions":["jsonld"]},"application/lgr+xml":{"source":"iana","compressible":true,"extensions":["lgr"]},"application/link-format":{"source":"iana"},"application/load-control+xml":{"source":"iana","compressible":true},"application/lost+xml":{"source":"iana","compressible":true,"extensions":["lostxml"]},"application/lostsync+xml":{"source":"iana","compressible":true},"application/lpf+zip":{"source":"iana","compressible":false},"application/lxf":{"source":"iana"},"application/mac-binhex40":{"source":"iana","extensions":["hqx"]},"application/mac-compactpro":{"source":"apache","extensions":["cpt"]},"application/macwriteii":{"source":"iana"},"application/mads+xml":{"source":"iana","compressible":true,"extensions":["mads"]},"application/manifest+json":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["webmanifest"]},"application/marc":{"source":"iana","extensions":["mrc"]},"application/marcxml+xml":{"source":"iana","compressible":true,"extensions":["mrcx"]},"application/mathematica":{"source":"iana","extensions":["ma","nb","mb"]},"application/mathml+xml":{"source":"iana","compressible":true,"extensions":["mathml"]},"application/mathml-content+xml":{"source":"iana","compressible":true},"application/mathml-presentation+xml":{"source":"iana","compressible":true},"application/mbms-associated-procedure-description+xml":{"source":"iana","compressible":true},"application/mbms-deregister+xml":{"source":"iana","compressible":true},"application/mbms-envelope+xml":{"source":"iana","compressible":true},"application/mbms-msk+xml":{"source":"iana","compressible":true},"application/mbms-msk-response+xml":{"source":"iana","compressible":true},"application/mbms-protection-description+xml":{"source":"iana","compressible":true},"application/mbms-reception-report+xml":{"source":"iana","compressible":true},"application/mbms-register+xml":{"source":"iana","compressible":true},"application/mbms-register-response+xml":{"source":"iana","compressible":true},"application/mbms-schedule+xml":{"source":"iana","compressible":true},"application/mbms-user-service-description+xml":{"source":"iana","compressible":true},"application/mbox":{"source":"iana","extensions":["mbox"]},"application/media-policy-dataset+xml":{"source":"iana","compressible":true,"extensions":["mpf"]},"application/media_control+xml":{"source":"iana","compressible":true},"application/mediaservercontrol+xml":{"source":"iana","compressible":true,"extensions":["mscml"]},"application/merge-patch+json":{"source":"iana","compressible":true},"application/metalink+xml":{"source":"apache","compressible":true,"extensions":["metalink"]},"application/metalink4+xml":{"source":"iana","compressible":true,"extensions":["meta4"]},"application/mets+xml":{"source":"iana","compressible":true,"extensions":["mets"]},"application/mf4":{"source":"iana"},"application/mikey":{"source":"iana"},"application/mipc":{"source":"iana"},"application/missing-blocks+cbor-seq":{"source":"iana"},"application/mmt-aei+xml":{"source":"iana","compressible":true,"extensions":["maei"]},"application/mmt-usd+xml":{"source":"iana","compressible":true,"extensions":["musd"]},"application/mods+xml":{"source":"iana","compressible":true,"extensions":["mods"]},"application/moss-keys":{"source":"iana"},"application/moss-signature":{"source":"iana"},"application/mosskey-data":{"source":"iana"},"application/mosskey-request":{"source":"iana"},"application/mp21":{"source":"iana","extensions":["m21","mp21"]},"application/mp4":{"source":"iana","extensions":["mp4s","m4p"]},"application/mpeg4-generic":{"source":"iana"},"application/mpeg4-iod":{"source":"iana"},"application/mpeg4-iod-xmt":{"source":"iana"},"application/mrb-consumer+xml":{"source":"iana","compressible":true},"application/mrb-publish+xml":{"source":"iana","compressible":true},"application/msc-ivr+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/msc-mixer+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/msword":{"source":"iana","compressible":false,"extensions":["doc","dot"]},"application/mud+json":{"source":"iana","compressible":true},"application/multipart-core":{"source":"iana"},"application/mxf":{"source":"iana","extensions":["mxf"]},"application/n-quads":{"source":"iana","extensions":["nq"]},"application/n-triples":{"source":"iana","extensions":["nt"]},"application/nasdata":{"source":"iana"},"application/news-checkgroups":{"source":"iana","charset":"US-ASCII"},"application/news-groupinfo":{"source":"iana","charset":"US-ASCII"},"application/news-transmission":{"source":"iana"},"application/nlsml+xml":{"source":"iana","compressible":true},"application/node":{"source":"iana","extensions":["cjs"]},"application/nss":{"source":"iana"},"application/oauth-authz-req+jwt":{"source":"iana"},"application/oblivious-dns-message":{"source":"iana"},"application/ocsp-request":{"source":"iana"},"application/ocsp-response":{"source":"iana"},"application/octet-stream":{"source":"iana","compressible":false,"extensions":["bin","dms","lrf","mar","so","dist","distz","pkg","bpk","dump","elc","deploy","exe","dll","deb","dmg","iso","img","msi","msp","msm","buffer"]},"application/oda":{"source":"iana","extensions":["oda"]},"application/odm+xml":{"source":"iana","compressible":true},"application/odx":{"source":"iana"},"application/oebps-package+xml":{"source":"iana","compressible":true,"extensions":["opf"]},"application/ogg":{"source":"iana","compressible":false,"extensions":["ogx"]},"application/omdoc+xml":{"source":"apache","compressible":true,"extensions":["omdoc"]},"application/onenote":{"source":"apache","extensions":["onetoc","onetoc2","onetmp","onepkg"]},"application/opc-nodeset+xml":{"source":"iana","compressible":true},"application/oscore":{"source":"iana"},"application/oxps":{"source":"iana","extensions":["oxps"]},"application/p21":{"source":"iana"},"application/p21+zip":{"source":"iana","compressible":false},"application/p2p-overlay+xml":{"source":"iana","compressible":true,"extensions":["relo"]},"application/parityfec":{"source":"iana"},"application/passport":{"source":"iana"},"application/patch-ops-error+xml":{"source":"iana","compressible":true,"extensions":["xer"]},"application/pdf":{"source":"iana","compressible":false,"extensions":["pdf"]},"application/pdx":{"source":"iana"},"application/pem-certificate-chain":{"source":"iana"},"application/pgp-encrypted":{"source":"iana","compressible":false,"extensions":["pgp"]},"application/pgp-keys":{"source":"iana","extensions":["asc"]},"application/pgp-signature":{"source":"iana","extensions":["asc","sig"]},"application/pics-rules":{"source":"apache","extensions":["prf"]},"application/pidf+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/pidf-diff+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/pkcs10":{"source":"iana","extensions":["p10"]},"application/pkcs12":{"source":"iana"},"application/pkcs7-mime":{"source":"iana","extensions":["p7m","p7c"]},"application/pkcs7-signature":{"source":"iana","extensions":["p7s"]},"application/pkcs8":{"source":"iana","extensions":["p8"]},"application/pkcs8-encrypted":{"source":"iana"},"application/pkix-attr-cert":{"source":"iana","extensions":["ac"]},"application/pkix-cert":{"source":"iana","extensions":["cer"]},"application/pkix-crl":{"source":"iana","extensions":["crl"]},"application/pkix-pkipath":{"source":"iana","extensions":["pkipath"]},"application/pkixcmp":{"source":"iana","extensions":["pki"]},"application/pls+xml":{"source":"iana","compressible":true,"extensions":["pls"]},"application/poc-settings+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/postscript":{"source":"iana","compressible":true,"extensions":["ai","eps","ps"]},"application/ppsp-tracker+json":{"source":"iana","compressible":true},"application/problem+json":{"source":"iana","compressible":true},"application/problem+xml":{"source":"iana","compressible":true},"application/provenance+xml":{"source":"iana","compressible":true,"extensions":["provx"]},"application/prs.alvestrand.titrax-sheet":{"source":"iana"},"application/prs.cww":{"source":"iana","extensions":["cww"]},"application/prs.cyn":{"source":"iana","charset":"7-BIT"},"application/prs.hpub+zip":{"source":"iana","compressible":false},"application/prs.nprend":{"source":"iana"},"application/prs.plucker":{"source":"iana"},"application/prs.rdf-xml-crypt":{"source":"iana"},"application/prs.xsf+xml":{"source":"iana","compressible":true},"application/pskc+xml":{"source":"iana","compressible":true,"extensions":["pskcxml"]},"application/pvd+json":{"source":"iana","compressible":true},"application/qsig":{"source":"iana"},"application/raml+yaml":{"compressible":true,"extensions":["raml"]},"application/raptorfec":{"source":"iana"},"application/rdap+json":{"source":"iana","compressible":true},"application/rdf+xml":{"source":"iana","compressible":true,"extensions":["rdf","owl"]},"application/reginfo+xml":{"source":"iana","compressible":true,"extensions":["rif"]},"application/relax-ng-compact-syntax":{"source":"iana","extensions":["rnc"]},"application/remote-printing":{"source":"iana"},"application/reputon+json":{"source":"iana","compressible":true},"application/resource-lists+xml":{"source":"iana","compressible":true,"extensions":["rl"]},"application/resource-lists-diff+xml":{"source":"iana","compressible":true,"extensions":["rld"]},"application/rfc+xml":{"source":"iana","compressible":true},"application/riscos":{"source":"iana"},"application/rlmi+xml":{"source":"iana","compressible":true},"application/rls-services+xml":{"source":"iana","compressible":true,"extensions":["rs"]},"application/route-apd+xml":{"source":"iana","compressible":true,"extensions":["rapd"]},"application/route-s-tsid+xml":{"source":"iana","compressible":true,"extensions":["sls"]},"application/route-usd+xml":{"source":"iana","compressible":true,"extensions":["rusd"]},"application/rpki-ghostbusters":{"source":"iana","extensions":["gbr"]},"application/rpki-manifest":{"source":"iana","extensions":["mft"]},"application/rpki-publication":{"source":"iana"},"application/rpki-roa":{"source":"iana","extensions":["roa"]},"application/rpki-updown":{"source":"iana"},"application/rsd+xml":{"source":"apache","compressible":true,"extensions":["rsd"]},"application/rss+xml":{"source":"apache","compressible":true,"extensions":["rss"]},"application/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"application/rtploopback":{"source":"iana"},"application/rtx":{"source":"iana"},"application/samlassertion+xml":{"source":"iana","compressible":true},"application/samlmetadata+xml":{"source":"iana","compressible":true},"application/sarif+json":{"source":"iana","compressible":true},"application/sarif-external-properties+json":{"source":"iana","compressible":true},"application/sbe":{"source":"iana"},"application/sbml+xml":{"source":"iana","compressible":true,"extensions":["sbml"]},"application/scaip+xml":{"source":"iana","compressible":true},"application/scim+json":{"source":"iana","compressible":true},"application/scvp-cv-request":{"source":"iana","extensions":["scq"]},"application/scvp-cv-response":{"source":"iana","extensions":["scs"]},"application/scvp-vp-request":{"source":"iana","extensions":["spq"]},"application/scvp-vp-response":{"source":"iana","extensions":["spp"]},"application/sdp":{"source":"iana","extensions":["sdp"]},"application/secevent+jwt":{"source":"iana"},"application/senml+cbor":{"source":"iana"},"application/senml+json":{"source":"iana","compressible":true},"application/senml+xml":{"source":"iana","compressible":true,"extensions":["senmlx"]},"application/senml-etch+cbor":{"source":"iana"},"application/senml-etch+json":{"source":"iana","compressible":true},"application/senml-exi":{"source":"iana"},"application/sensml+cbor":{"source":"iana"},"application/sensml+json":{"source":"iana","compressible":true},"application/sensml+xml":{"source":"iana","compressible":true,"extensions":["sensmlx"]},"application/sensml-exi":{"source":"iana"},"application/sep+xml":{"source":"iana","compressible":true},"application/sep-exi":{"source":"iana"},"application/session-info":{"source":"iana"},"application/set-payment":{"source":"iana"},"application/set-payment-initiation":{"source":"iana","extensions":["setpay"]},"application/set-registration":{"source":"iana"},"application/set-registration-initiation":{"source":"iana","extensions":["setreg"]},"application/sgml":{"source":"iana"},"application/sgml-open-catalog":{"source":"iana"},"application/shf+xml":{"source":"iana","compressible":true,"extensions":["shf"]},"application/sieve":{"source":"iana","extensions":["siv","sieve"]},"application/simple-filter+xml":{"source":"iana","compressible":true},"application/simple-message-summary":{"source":"iana"},"application/simplesymbolcontainer":{"source":"iana"},"application/sipc":{"source":"iana"},"application/slate":{"source":"iana"},"application/smil":{"source":"iana"},"application/smil+xml":{"source":"iana","compressible":true,"extensions":["smi","smil"]},"application/smpte336m":{"source":"iana"},"application/soap+fastinfoset":{"source":"iana"},"application/soap+xml":{"source":"iana","compressible":true},"application/sparql-query":{"source":"iana","extensions":["rq"]},"application/sparql-results+xml":{"source":"iana","compressible":true,"extensions":["srx"]},"application/spdx+json":{"source":"iana","compressible":true},"application/spirits-event+xml":{"source":"iana","compressible":true},"application/sql":{"source":"iana"},"application/srgs":{"source":"iana","extensions":["gram"]},"application/srgs+xml":{"source":"iana","compressible":true,"extensions":["grxml"]},"application/sru+xml":{"source":"iana","compressible":true,"extensions":["sru"]},"application/ssdl+xml":{"source":"apache","compressible":true,"extensions":["ssdl"]},"application/ssml+xml":{"source":"iana","compressible":true,"extensions":["ssml"]},"application/stix+json":{"source":"iana","compressible":true},"application/swid+xml":{"source":"iana","compressible":true,"extensions":["swidtag"]},"application/tamp-apex-update":{"source":"iana"},"application/tamp-apex-update-confirm":{"source":"iana"},"application/tamp-community-update":{"source":"iana"},"application/tamp-community-update-confirm":{"source":"iana"},"application/tamp-error":{"source":"iana"},"application/tamp-sequence-adjust":{"source":"iana"},"application/tamp-sequence-adjust-confirm":{"source":"iana"},"application/tamp-status-query":{"source":"iana"},"application/tamp-status-response":{"source":"iana"},"application/tamp-update":{"source":"iana"},"application/tamp-update-confirm":{"source":"iana"},"application/tar":{"compressible":true},"application/taxii+json":{"source":"iana","compressible":true},"application/td+json":{"source":"iana","compressible":true},"application/tei+xml":{"source":"iana","compressible":true,"extensions":["tei","teicorpus"]},"application/tetra_isi":{"source":"iana"},"application/thraud+xml":{"source":"iana","compressible":true,"extensions":["tfi"]},"application/timestamp-query":{"source":"iana"},"application/timestamp-reply":{"source":"iana"},"application/timestamped-data":{"source":"iana","extensions":["tsd"]},"application/tlsrpt+gzip":{"source":"iana"},"application/tlsrpt+json":{"source":"iana","compressible":true},"application/tnauthlist":{"source":"iana"},"application/token-introspection+jwt":{"source":"iana"},"application/toml":{"compressible":true,"extensions":["toml"]},"application/trickle-ice-sdpfrag":{"source":"iana"},"application/trig":{"source":"iana","extensions":["trig"]},"application/ttml+xml":{"source":"iana","compressible":true,"extensions":["ttml"]},"application/tve-trigger":{"source":"iana"},"application/tzif":{"source":"iana"},"application/tzif-leap":{"source":"iana"},"application/ubjson":{"compressible":false,"extensions":["ubj"]},"application/ulpfec":{"source":"iana"},"application/urc-grpsheet+xml":{"source":"iana","compressible":true},"application/urc-ressheet+xml":{"source":"iana","compressible":true,"extensions":["rsheet"]},"application/urc-targetdesc+xml":{"source":"iana","compressible":true,"extensions":["td"]},"application/urc-uisocketdesc+xml":{"source":"iana","compressible":true},"application/vcard+json":{"source":"iana","compressible":true},"application/vcard+xml":{"source":"iana","compressible":true},"application/vemmi":{"source":"iana"},"application/vividence.scriptfile":{"source":"apache"},"application/vnd.1000minds.decision-model+xml":{"source":"iana","compressible":true,"extensions":["1km"]},"application/vnd.3gpp-prose+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-prose-pc3ch+xml":{"source":"iana","compressible":true},"application/vnd.3gpp-v2x-local-service-information":{"source":"iana"},"application/vnd.3gpp.5gnas":{"source":"iana"},"application/vnd.3gpp.access-transfer-events+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.bsf+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.gmop+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.gtpc":{"source":"iana"},"application/vnd.3gpp.interworking-data":{"source":"iana"},"application/vnd.3gpp.lpp":{"source":"iana"},"application/vnd.3gpp.mc-signalling-ear":{"source":"iana"},"application/vnd.3gpp.mcdata-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-payload":{"source":"iana"},"application/vnd.3gpp.mcdata-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-signalling":{"source":"iana"},"application/vnd.3gpp.mcdata-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcdata-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-floor-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-signed+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-ue-init-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcptt-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-command+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-affiliation-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-location-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-mbms-usage-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-service-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-transmission-request+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-ue-config+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mcvideo-user-profile+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.mid-call+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.ngap":{"source":"iana"},"application/vnd.3gpp.pfcp":{"source":"iana"},"application/vnd.3gpp.pic-bw-large":{"source":"iana","extensions":["plb"]},"application/vnd.3gpp.pic-bw-small":{"source":"iana","extensions":["psb"]},"application/vnd.3gpp.pic-bw-var":{"source":"iana","extensions":["pvb"]},"application/vnd.3gpp.s1ap":{"source":"iana"},"application/vnd.3gpp.sms":{"source":"iana"},"application/vnd.3gpp.sms+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-ext+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.srvcc-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.state-and-event-info+xml":{"source":"iana","compressible":true},"application/vnd.3gpp.ussd+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.bcmcsinfo+xml":{"source":"iana","compressible":true},"application/vnd.3gpp2.sms":{"source":"iana"},"application/vnd.3gpp2.tcap":{"source":"iana","extensions":["tcap"]},"application/vnd.3lightssoftware.imagescal":{"source":"iana"},"application/vnd.3m.post-it-notes":{"source":"iana","extensions":["pwn"]},"application/vnd.accpac.simply.aso":{"source":"iana","extensions":["aso"]},"application/vnd.accpac.simply.imp":{"source":"iana","extensions":["imp"]},"application/vnd.acucobol":{"source":"iana","extensions":["acu"]},"application/vnd.acucorp":{"source":"iana","extensions":["atc","acutc"]},"application/vnd.adobe.air-application-installer-package+zip":{"source":"apache","compressible":false,"extensions":["air"]},"application/vnd.adobe.flash.movie":{"source":"iana"},"application/vnd.adobe.formscentral.fcdt":{"source":"iana","extensions":["fcdt"]},"application/vnd.adobe.fxp":{"source":"iana","extensions":["fxp","fxpl"]},"application/vnd.adobe.partial-upload":{"source":"iana"},"application/vnd.adobe.xdp+xml":{"source":"iana","compressible":true,"extensions":["xdp"]},"application/vnd.adobe.xfdf":{"source":"iana","extensions":["xfdf"]},"application/vnd.aether.imp":{"source":"iana"},"application/vnd.afpc.afplinedata":{"source":"iana"},"application/vnd.afpc.afplinedata-pagedef":{"source":"iana"},"application/vnd.afpc.cmoca-cmresource":{"source":"iana"},"application/vnd.afpc.foca-charset":{"source":"iana"},"application/vnd.afpc.foca-codedfont":{"source":"iana"},"application/vnd.afpc.foca-codepage":{"source":"iana"},"application/vnd.afpc.modca":{"source":"iana"},"application/vnd.afpc.modca-cmtable":{"source":"iana"},"application/vnd.afpc.modca-formdef":{"source":"iana"},"application/vnd.afpc.modca-mediummap":{"source":"iana"},"application/vnd.afpc.modca-objectcontainer":{"source":"iana"},"application/vnd.afpc.modca-overlay":{"source":"iana"},"application/vnd.afpc.modca-pagesegment":{"source":"iana"},"application/vnd.age":{"source":"iana","extensions":["age"]},"application/vnd.ah-barcode":{"source":"iana"},"application/vnd.ahead.space":{"source":"iana","extensions":["ahead"]},"application/vnd.airzip.filesecure.azf":{"source":"iana","extensions":["azf"]},"application/vnd.airzip.filesecure.azs":{"source":"iana","extensions":["azs"]},"application/vnd.amadeus+json":{"source":"iana","compressible":true},"application/vnd.amazon.ebook":{"source":"apache","extensions":["azw"]},"application/vnd.amazon.mobi8-ebook":{"source":"iana"},"application/vnd.americandynamics.acc":{"source":"iana","extensions":["acc"]},"application/vnd.amiga.ami":{"source":"iana","extensions":["ami"]},"application/vnd.amundsen.maze+xml":{"source":"iana","compressible":true},"application/vnd.android.ota":{"source":"iana"},"application/vnd.android.package-archive":{"source":"apache","compressible":false,"extensions":["apk"]},"application/vnd.anki":{"source":"iana"},"application/vnd.anser-web-certificate-issue-initiation":{"source":"iana","extensions":["cii"]},"application/vnd.anser-web-funds-transfer-initiation":{"source":"apache","extensions":["fti"]},"application/vnd.antix.game-component":{"source":"iana","extensions":["atx"]},"application/vnd.apache.arrow.file":{"source":"iana"},"application/vnd.apache.arrow.stream":{"source":"iana"},"application/vnd.apache.thrift.binary":{"source":"iana"},"application/vnd.apache.thrift.compact":{"source":"iana"},"application/vnd.apache.thrift.json":{"source":"iana"},"application/vnd.api+json":{"source":"iana","compressible":true},"application/vnd.aplextor.warrp+json":{"source":"iana","compressible":true},"application/vnd.apothekende.reservation+json":{"source":"iana","compressible":true},"application/vnd.apple.installer+xml":{"source":"iana","compressible":true,"extensions":["mpkg"]},"application/vnd.apple.keynote":{"source":"iana","extensions":["key"]},"application/vnd.apple.mpegurl":{"source":"iana","extensions":["m3u8"]},"application/vnd.apple.numbers":{"source":"iana","extensions":["numbers"]},"application/vnd.apple.pages":{"source":"iana","extensions":["pages"]},"application/vnd.apple.pkpass":{"compressible":false,"extensions":["pkpass"]},"application/vnd.arastra.swi":{"source":"iana"},"application/vnd.aristanetworks.swi":{"source":"iana","extensions":["swi"]},"application/vnd.artisan+json":{"source":"iana","compressible":true},"application/vnd.artsquare":{"source":"iana"},"application/vnd.astraea-software.iota":{"source":"iana","extensions":["iota"]},"application/vnd.audiograph":{"source":"iana","extensions":["aep"]},"application/vnd.autopackage":{"source":"iana"},"application/vnd.avalon+json":{"source":"iana","compressible":true},"application/vnd.avistar+xml":{"source":"iana","compressible":true},"application/vnd.balsamiq.bmml+xml":{"source":"iana","compressible":true,"extensions":["bmml"]},"application/vnd.balsamiq.bmpr":{"source":"iana"},"application/vnd.banana-accounting":{"source":"iana"},"application/vnd.bbf.usp.error":{"source":"iana"},"application/vnd.bbf.usp.msg":{"source":"iana"},"application/vnd.bbf.usp.msg+json":{"source":"iana","compressible":true},"application/vnd.bekitzur-stech+json":{"source":"iana","compressible":true},"application/vnd.bint.med-content":{"source":"iana"},"application/vnd.biopax.rdf+xml":{"source":"iana","compressible":true},"application/vnd.blink-idb-value-wrapper":{"source":"iana"},"application/vnd.blueice.multipass":{"source":"iana","extensions":["mpm"]},"application/vnd.bluetooth.ep.oob":{"source":"iana"},"application/vnd.bluetooth.le.oob":{"source":"iana"},"application/vnd.bmi":{"source":"iana","extensions":["bmi"]},"application/vnd.bpf":{"source":"iana"},"application/vnd.bpf3":{"source":"iana"},"application/vnd.businessobjects":{"source":"iana","extensions":["rep"]},"application/vnd.byu.uapi+json":{"source":"iana","compressible":true},"application/vnd.cab-jscript":{"source":"iana"},"application/vnd.canon-cpdl":{"source":"iana"},"application/vnd.canon-lips":{"source":"iana"},"application/vnd.capasystems-pg+json":{"source":"iana","compressible":true},"application/vnd.cendio.thinlinc.clientconf":{"source":"iana"},"application/vnd.century-systems.tcp_stream":{"source":"iana"},"application/vnd.chemdraw+xml":{"source":"iana","compressible":true,"extensions":["cdxml"]},"application/vnd.chess-pgn":{"source":"iana"},"application/vnd.chipnuts.karaoke-mmd":{"source":"iana","extensions":["mmd"]},"application/vnd.ciedi":{"source":"iana"},"application/vnd.cinderella":{"source":"iana","extensions":["cdy"]},"application/vnd.cirpack.isdn-ext":{"source":"iana"},"application/vnd.citationstyles.style+xml":{"source":"iana","compressible":true,"extensions":["csl"]},"application/vnd.claymore":{"source":"iana","extensions":["cla"]},"application/vnd.cloanto.rp9":{"source":"iana","extensions":["rp9"]},"application/vnd.clonk.c4group":{"source":"iana","extensions":["c4g","c4d","c4f","c4p","c4u"]},"application/vnd.cluetrust.cartomobile-config":{"source":"iana","extensions":["c11amc"]},"application/vnd.cluetrust.cartomobile-config-pkg":{"source":"iana","extensions":["c11amz"]},"application/vnd.coffeescript":{"source":"iana"},"application/vnd.collabio.xodocuments.document":{"source":"iana"},"application/vnd.collabio.xodocuments.document-template":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation":{"source":"iana"},"application/vnd.collabio.xodocuments.presentation-template":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet":{"source":"iana"},"application/vnd.collabio.xodocuments.spreadsheet-template":{"source":"iana"},"application/vnd.collection+json":{"source":"iana","compressible":true},"application/vnd.collection.doc+json":{"source":"iana","compressible":true},"application/vnd.collection.next+json":{"source":"iana","compressible":true},"application/vnd.comicbook+zip":{"source":"iana","compressible":false},"application/vnd.comicbook-rar":{"source":"iana"},"application/vnd.commerce-battelle":{"source":"iana"},"application/vnd.commonspace":{"source":"iana","extensions":["csp"]},"application/vnd.contact.cmsg":{"source":"iana","extensions":["cdbcmsg"]},"application/vnd.coreos.ignition+json":{"source":"iana","compressible":true},"application/vnd.cosmocaller":{"source":"iana","extensions":["cmc"]},"application/vnd.crick.clicker":{"source":"iana","extensions":["clkx"]},"application/vnd.crick.clicker.keyboard":{"source":"iana","extensions":["clkk"]},"application/vnd.crick.clicker.palette":{"source":"iana","extensions":["clkp"]},"application/vnd.crick.clicker.template":{"source":"iana","extensions":["clkt"]},"application/vnd.crick.clicker.wordbank":{"source":"iana","extensions":["clkw"]},"application/vnd.criticaltools.wbs+xml":{"source":"iana","compressible":true,"extensions":["wbs"]},"application/vnd.cryptii.pipe+json":{"source":"iana","compressible":true},"application/vnd.crypto-shade-file":{"source":"iana"},"application/vnd.cryptomator.encrypted":{"source":"iana"},"application/vnd.cryptomator.vault":{"source":"iana"},"application/vnd.ctc-posml":{"source":"iana","extensions":["pml"]},"application/vnd.ctct.ws+xml":{"source":"iana","compressible":true},"application/vnd.cups-pdf":{"source":"iana"},"application/vnd.cups-postscript":{"source":"iana"},"application/vnd.cups-ppd":{"source":"iana","extensions":["ppd"]},"application/vnd.cups-raster":{"source":"iana"},"application/vnd.cups-raw":{"source":"iana"},"application/vnd.curl":{"source":"iana"},"application/vnd.curl.car":{"source":"apache","extensions":["car"]},"application/vnd.curl.pcurl":{"source":"apache","extensions":["pcurl"]},"application/vnd.cyan.dean.root+xml":{"source":"iana","compressible":true},"application/vnd.cybank":{"source":"iana"},"application/vnd.cyclonedx+json":{"source":"iana","compressible":true},"application/vnd.cyclonedx+xml":{"source":"iana","compressible":true},"application/vnd.d2l.coursepackage1p0+zip":{"source":"iana","compressible":false},"application/vnd.d3m-dataset":{"source":"iana"},"application/vnd.d3m-problem":{"source":"iana"},"application/vnd.dart":{"source":"iana","compressible":true,"extensions":["dart"]},"application/vnd.data-vision.rdz":{"source":"iana","extensions":["rdz"]},"application/vnd.datapackage+json":{"source":"iana","compressible":true},"application/vnd.dataresource+json":{"source":"iana","compressible":true},"application/vnd.dbf":{"source":"iana","extensions":["dbf"]},"application/vnd.debian.binary-package":{"source":"iana"},"application/vnd.dece.data":{"source":"iana","extensions":["uvf","uvvf","uvd","uvvd"]},"application/vnd.dece.ttml+xml":{"source":"iana","compressible":true,"extensions":["uvt","uvvt"]},"application/vnd.dece.unspecified":{"source":"iana","extensions":["uvx","uvvx"]},"application/vnd.dece.zip":{"source":"iana","extensions":["uvz","uvvz"]},"application/vnd.denovo.fcselayout-link":{"source":"iana","extensions":["fe_launch"]},"application/vnd.desmume.movie":{"source":"iana"},"application/vnd.dir-bi.plate-dl-nosuffix":{"source":"iana"},"application/vnd.dm.delegation+xml":{"source":"iana","compressible":true},"application/vnd.dna":{"source":"iana","extensions":["dna"]},"application/vnd.document+json":{"source":"iana","compressible":true},"application/vnd.dolby.mlp":{"source":"apache","extensions":["mlp"]},"application/vnd.dolby.mobile.1":{"source":"iana"},"application/vnd.dolby.mobile.2":{"source":"iana"},"application/vnd.doremir.scorecloud-binary-document":{"source":"iana"},"application/vnd.dpgraph":{"source":"iana","extensions":["dpg"]},"application/vnd.dreamfactory":{"source":"iana","extensions":["dfac"]},"application/vnd.drive+json":{"source":"iana","compressible":true},"application/vnd.ds-keypoint":{"source":"apache","extensions":["kpxx"]},"application/vnd.dtg.local":{"source":"iana"},"application/vnd.dtg.local.flash":{"source":"iana"},"application/vnd.dtg.local.html":{"source":"iana"},"application/vnd.dvb.ait":{"source":"iana","extensions":["ait"]},"application/vnd.dvb.dvbisl+xml":{"source":"iana","compressible":true},"application/vnd.dvb.dvbj":{"source":"iana"},"application/vnd.dvb.esgcontainer":{"source":"iana"},"application/vnd.dvb.ipdcdftnotifaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess":{"source":"iana"},"application/vnd.dvb.ipdcesgaccess2":{"source":"iana"},"application/vnd.dvb.ipdcesgpdd":{"source":"iana"},"application/vnd.dvb.ipdcroaming":{"source":"iana"},"application/vnd.dvb.iptv.alfec-base":{"source":"iana"},"application/vnd.dvb.iptv.alfec-enhancement":{"source":"iana"},"application/vnd.dvb.notif-aggregate-root+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-container+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-generic+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-msglist+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-request+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-ia-registration-response+xml":{"source":"iana","compressible":true},"application/vnd.dvb.notif-init+xml":{"source":"iana","compressible":true},"application/vnd.dvb.pfr":{"source":"iana"},"application/vnd.dvb.service":{"source":"iana","extensions":["svc"]},"application/vnd.dxr":{"source":"iana"},"application/vnd.dynageo":{"source":"iana","extensions":["geo"]},"application/vnd.dzr":{"source":"iana"},"application/vnd.easykaraoke.cdgdownload":{"source":"iana"},"application/vnd.ecdis-update":{"source":"iana"},"application/vnd.ecip.rlp":{"source":"iana"},"application/vnd.eclipse.ditto+json":{"source":"iana","compressible":true},"application/vnd.ecowin.chart":{"source":"iana","extensions":["mag"]},"application/vnd.ecowin.filerequest":{"source":"iana"},"application/vnd.ecowin.fileupdate":{"source":"iana"},"application/vnd.ecowin.series":{"source":"iana"},"application/vnd.ecowin.seriesrequest":{"source":"iana"},"application/vnd.ecowin.seriesupdate":{"source":"iana"},"application/vnd.efi.img":{"source":"iana"},"application/vnd.efi.iso":{"source":"iana"},"application/vnd.emclient.accessrequest+xml":{"source":"iana","compressible":true},"application/vnd.enliven":{"source":"iana","extensions":["nml"]},"application/vnd.enphase.envoy":{"source":"iana"},"application/vnd.eprints.data+xml":{"source":"iana","compressible":true},"application/vnd.epson.esf":{"source":"iana","extensions":["esf"]},"application/vnd.epson.msf":{"source":"iana","extensions":["msf"]},"application/vnd.epson.quickanime":{"source":"iana","extensions":["qam"]},"application/vnd.epson.salt":{"source":"iana","extensions":["slt"]},"application/vnd.epson.ssf":{"source":"iana","extensions":["ssf"]},"application/vnd.ericsson.quickcall":{"source":"iana"},"application/vnd.espass-espass+zip":{"source":"iana","compressible":false},"application/vnd.eszigno3+xml":{"source":"iana","compressible":true,"extensions":["es3","et3"]},"application/vnd.etsi.aoc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.asic-e+zip":{"source":"iana","compressible":false},"application/vnd.etsi.asic-s+zip":{"source":"iana","compressible":false},"application/vnd.etsi.cug+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvcommand+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-bc+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-cod+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsad-npvr+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvservice+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvsync+xml":{"source":"iana","compressible":true},"application/vnd.etsi.iptvueprofile+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mcid+xml":{"source":"iana","compressible":true},"application/vnd.etsi.mheg5":{"source":"iana"},"application/vnd.etsi.overload-control-policy-dataset+xml":{"source":"iana","compressible":true},"application/vnd.etsi.pstn+xml":{"source":"iana","compressible":true},"application/vnd.etsi.sci+xml":{"source":"iana","compressible":true},"application/vnd.etsi.simservs+xml":{"source":"iana","compressible":true},"application/vnd.etsi.timestamp-token":{"source":"iana"},"application/vnd.etsi.tsl+xml":{"source":"iana","compressible":true},"application/vnd.etsi.tsl.der":{"source":"iana"},"application/vnd.eu.kasparian.car+json":{"source":"iana","compressible":true},"application/vnd.eudora.data":{"source":"iana"},"application/vnd.evolv.ecig.profile":{"source":"iana"},"application/vnd.evolv.ecig.settings":{"source":"iana"},"application/vnd.evolv.ecig.theme":{"source":"iana"},"application/vnd.exstream-empower+zip":{"source":"iana","compressible":false},"application/vnd.exstream-package":{"source":"iana"},"application/vnd.ezpix-album":{"source":"iana","extensions":["ez2"]},"application/vnd.ezpix-package":{"source":"iana","extensions":["ez3"]},"application/vnd.f-secure.mobile":{"source":"iana"},"application/vnd.familysearch.gedcom+zip":{"source":"iana","compressible":false},"application/vnd.fastcopy-disk-image":{"source":"iana"},"application/vnd.fdf":{"source":"iana","extensions":["fdf"]},"application/vnd.fdsn.mseed":{"source":"iana","extensions":["mseed"]},"application/vnd.fdsn.seed":{"source":"iana","extensions":["seed","dataless"]},"application/vnd.ffsns":{"source":"iana"},"application/vnd.ficlab.flb+zip":{"source":"iana","compressible":false},"application/vnd.filmit.zfc":{"source":"iana"},"application/vnd.fints":{"source":"iana"},"application/vnd.firemonkeys.cloudcell":{"source":"iana"},"application/vnd.flographit":{"source":"iana","extensions":["gph"]},"application/vnd.fluxtime.clip":{"source":"iana","extensions":["ftc"]},"application/vnd.font-fontforge-sfd":{"source":"iana"},"application/vnd.framemaker":{"source":"iana","extensions":["fm","frame","maker","book"]},"application/vnd.frogans.fnc":{"source":"iana","extensions":["fnc"]},"application/vnd.frogans.ltf":{"source":"iana","extensions":["ltf"]},"application/vnd.fsc.weblaunch":{"source":"iana","extensions":["fsc"]},"application/vnd.fujifilm.fb.docuworks":{"source":"iana"},"application/vnd.fujifilm.fb.docuworks.binder":{"source":"iana"},"application/vnd.fujifilm.fb.docuworks.container":{"source":"iana"},"application/vnd.fujifilm.fb.jfi+xml":{"source":"iana","compressible":true},"application/vnd.fujitsu.oasys":{"source":"iana","extensions":["oas"]},"application/vnd.fujitsu.oasys2":{"source":"iana","extensions":["oa2"]},"application/vnd.fujitsu.oasys3":{"source":"iana","extensions":["oa3"]},"application/vnd.fujitsu.oasysgp":{"source":"iana","extensions":["fg5"]},"application/vnd.fujitsu.oasysprs":{"source":"iana","extensions":["bh2"]},"application/vnd.fujixerox.art-ex":{"source":"iana"},"application/vnd.fujixerox.art4":{"source":"iana"},"application/vnd.fujixerox.ddd":{"source":"iana","extensions":["ddd"]},"application/vnd.fujixerox.docuworks":{"source":"iana","extensions":["xdw"]},"application/vnd.fujixerox.docuworks.binder":{"source":"iana","extensions":["xbd"]},"application/vnd.fujixerox.docuworks.container":{"source":"iana"},"application/vnd.fujixerox.hbpl":{"source":"iana"},"application/vnd.fut-misnet":{"source":"iana"},"application/vnd.futoin+cbor":{"source":"iana"},"application/vnd.futoin+json":{"source":"iana","compressible":true},"application/vnd.fuzzysheet":{"source":"iana","extensions":["fzs"]},"application/vnd.genomatix.tuxedo":{"source":"iana","extensions":["txd"]},"application/vnd.gentics.grd+json":{"source":"iana","compressible":true},"application/vnd.geo+json":{"source":"iana","compressible":true},"application/vnd.geocube+xml":{"source":"iana","compressible":true},"application/vnd.geogebra.file":{"source":"iana","extensions":["ggb"]},"application/vnd.geogebra.slides":{"source":"iana"},"application/vnd.geogebra.tool":{"source":"iana","extensions":["ggt"]},"application/vnd.geometry-explorer":{"source":"iana","extensions":["gex","gre"]},"application/vnd.geonext":{"source":"iana","extensions":["gxt"]},"application/vnd.geoplan":{"source":"iana","extensions":["g2w"]},"application/vnd.geospace":{"source":"iana","extensions":["g3w"]},"application/vnd.gerber":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt":{"source":"iana"},"application/vnd.globalplatform.card-content-mgt-response":{"source":"iana"},"application/vnd.gmx":{"source":"iana","extensions":["gmx"]},"application/vnd.google-apps.document":{"compressible":false,"extensions":["gdoc"]},"application/vnd.google-apps.presentation":{"compressible":false,"extensions":["gslides"]},"application/vnd.google-apps.spreadsheet":{"compressible":false,"extensions":["gsheet"]},"application/vnd.google-earth.kml+xml":{"source":"iana","compressible":true,"extensions":["kml"]},"application/vnd.google-earth.kmz":{"source":"iana","compressible":false,"extensions":["kmz"]},"application/vnd.gov.sk.e-form+xml":{"source":"iana","compressible":true},"application/vnd.gov.sk.e-form+zip":{"source":"iana","compressible":false},"application/vnd.gov.sk.xmldatacontainer+xml":{"source":"iana","compressible":true},"application/vnd.grafeq":{"source":"iana","extensions":["gqf","gqs"]},"application/vnd.gridmp":{"source":"iana"},"application/vnd.groove-account":{"source":"iana","extensions":["gac"]},"application/vnd.groove-help":{"source":"iana","extensions":["ghf"]},"application/vnd.groove-identity-message":{"source":"iana","extensions":["gim"]},"application/vnd.groove-injector":{"source":"iana","extensions":["grv"]},"application/vnd.groove-tool-message":{"source":"iana","extensions":["gtm"]},"application/vnd.groove-tool-template":{"source":"iana","extensions":["tpl"]},"application/vnd.groove-vcard":{"source":"iana","extensions":["vcg"]},"application/vnd.hal+json":{"source":"iana","compressible":true},"application/vnd.hal+xml":{"source":"iana","compressible":true,"extensions":["hal"]},"application/vnd.handheld-entertainment+xml":{"source":"iana","compressible":true,"extensions":["zmm"]},"application/vnd.hbci":{"source":"iana","extensions":["hbci"]},"application/vnd.hc+json":{"source":"iana","compressible":true},"application/vnd.hcl-bireports":{"source":"iana"},"application/vnd.hdt":{"source":"iana"},"application/vnd.heroku+json":{"source":"iana","compressible":true},"application/vnd.hhe.lesson-player":{"source":"iana","extensions":["les"]},"application/vnd.hl7cda+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.hl7v2+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.hp-hpgl":{"source":"iana","extensions":["hpgl"]},"application/vnd.hp-hpid":{"source":"iana","extensions":["hpid"]},"application/vnd.hp-hps":{"source":"iana","extensions":["hps"]},"application/vnd.hp-jlyt":{"source":"iana","extensions":["jlt"]},"application/vnd.hp-pcl":{"source":"iana","extensions":["pcl"]},"application/vnd.hp-pclxl":{"source":"iana","extensions":["pclxl"]},"application/vnd.httphone":{"source":"iana"},"application/vnd.hydrostatix.sof-data":{"source":"iana","extensions":["sfd-hdstx"]},"application/vnd.hyper+json":{"source":"iana","compressible":true},"application/vnd.hyper-item+json":{"source":"iana","compressible":true},"application/vnd.hyperdrive+json":{"source":"iana","compressible":true},"application/vnd.hzn-3d-crossword":{"source":"iana"},"application/vnd.ibm.afplinedata":{"source":"iana"},"application/vnd.ibm.electronic-media":{"source":"iana"},"application/vnd.ibm.minipay":{"source":"iana","extensions":["mpy"]},"application/vnd.ibm.modcap":{"source":"iana","extensions":["afp","listafp","list3820"]},"application/vnd.ibm.rights-management":{"source":"iana","extensions":["irm"]},"application/vnd.ibm.secure-container":{"source":"iana","extensions":["sc"]},"application/vnd.iccprofile":{"source":"iana","extensions":["icc","icm"]},"application/vnd.ieee.1905":{"source":"iana"},"application/vnd.igloader":{"source":"iana","extensions":["igl"]},"application/vnd.imagemeter.folder+zip":{"source":"iana","compressible":false},"application/vnd.imagemeter.image+zip":{"source":"iana","compressible":false},"application/vnd.immervision-ivp":{"source":"iana","extensions":["ivp"]},"application/vnd.immervision-ivu":{"source":"iana","extensions":["ivu"]},"application/vnd.ims.imsccv1p1":{"source":"iana"},"application/vnd.ims.imsccv1p2":{"source":"iana"},"application/vnd.ims.imsccv1p3":{"source":"iana"},"application/vnd.ims.lis.v2.result+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolconsumerprofile+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolproxy.id+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings+json":{"source":"iana","compressible":true},"application/vnd.ims.lti.v2.toolsettings.simple+json":{"source":"iana","compressible":true},"application/vnd.informedcontrol.rms+xml":{"source":"iana","compressible":true},"application/vnd.informix-visionary":{"source":"iana"},"application/vnd.infotech.project":{"source":"iana"},"application/vnd.infotech.project+xml":{"source":"iana","compressible":true},"application/vnd.innopath.wamp.notification":{"source":"iana"},"application/vnd.insors.igm":{"source":"iana","extensions":["igm"]},"application/vnd.intercon.formnet":{"source":"iana","extensions":["xpw","xpx"]},"application/vnd.intergeo":{"source":"iana","extensions":["i2g"]},"application/vnd.intertrust.digibox":{"source":"iana"},"application/vnd.intertrust.nncp":{"source":"iana"},"application/vnd.intu.qbo":{"source":"iana","extensions":["qbo"]},"application/vnd.intu.qfx":{"source":"iana","extensions":["qfx"]},"application/vnd.iptc.g2.catalogitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.conceptitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.knowledgeitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.newsmessage+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.packageitem+xml":{"source":"iana","compressible":true},"application/vnd.iptc.g2.planningitem+xml":{"source":"iana","compressible":true},"application/vnd.ipunplugged.rcprofile":{"source":"iana","extensions":["rcprofile"]},"application/vnd.irepository.package+xml":{"source":"iana","compressible":true,"extensions":["irp"]},"application/vnd.is-xpr":{"source":"iana","extensions":["xpr"]},"application/vnd.isac.fcs":{"source":"iana","extensions":["fcs"]},"application/vnd.iso11783-10+zip":{"source":"iana","compressible":false},"application/vnd.jam":{"source":"iana","extensions":["jam"]},"application/vnd.japannet-directory-service":{"source":"iana"},"application/vnd.japannet-jpnstore-wakeup":{"source":"iana"},"application/vnd.japannet-payment-wakeup":{"source":"iana"},"application/vnd.japannet-registration":{"source":"iana"},"application/vnd.japannet-registration-wakeup":{"source":"iana"},"application/vnd.japannet-setstore-wakeup":{"source":"iana"},"application/vnd.japannet-verification":{"source":"iana"},"application/vnd.japannet-verification-wakeup":{"source":"iana"},"application/vnd.jcp.javame.midlet-rms":{"source":"iana","extensions":["rms"]},"application/vnd.jisp":{"source":"iana","extensions":["jisp"]},"application/vnd.joost.joda-archive":{"source":"iana","extensions":["joda"]},"application/vnd.jsk.isdn-ngn":{"source":"iana"},"application/vnd.kahootz":{"source":"iana","extensions":["ktz","ktr"]},"application/vnd.kde.karbon":{"source":"iana","extensions":["karbon"]},"application/vnd.kde.kchart":{"source":"iana","extensions":["chrt"]},"application/vnd.kde.kformula":{"source":"iana","extensions":["kfo"]},"application/vnd.kde.kivio":{"source":"iana","extensions":["flw"]},"application/vnd.kde.kontour":{"source":"iana","extensions":["kon"]},"application/vnd.kde.kpresenter":{"source":"iana","extensions":["kpr","kpt"]},"application/vnd.kde.kspread":{"source":"iana","extensions":["ksp"]},"application/vnd.kde.kword":{"source":"iana","extensions":["kwd","kwt"]},"application/vnd.kenameaapp":{"source":"iana","extensions":["htke"]},"application/vnd.kidspiration":{"source":"iana","extensions":["kia"]},"application/vnd.kinar":{"source":"iana","extensions":["kne","knp"]},"application/vnd.koan":{"source":"iana","extensions":["skp","skd","skt","skm"]},"application/vnd.kodak-descriptor":{"source":"iana","extensions":["sse"]},"application/vnd.las":{"source":"iana"},"application/vnd.las.las+json":{"source":"iana","compressible":true},"application/vnd.las.las+xml":{"source":"iana","compressible":true,"extensions":["lasxml"]},"application/vnd.laszip":{"source":"iana"},"application/vnd.leap+json":{"source":"iana","compressible":true},"application/vnd.liberty-request+xml":{"source":"iana","compressible":true},"application/vnd.llamagraphics.life-balance.desktop":{"source":"iana","extensions":["lbd"]},"application/vnd.llamagraphics.life-balance.exchange+xml":{"source":"iana","compressible":true,"extensions":["lbe"]},"application/vnd.logipipe.circuit+zip":{"source":"iana","compressible":false},"application/vnd.loom":{"source":"iana"},"application/vnd.lotus-1-2-3":{"source":"iana","extensions":["123"]},"application/vnd.lotus-approach":{"source":"iana","extensions":["apr"]},"application/vnd.lotus-freelance":{"source":"iana","extensions":["pre"]},"application/vnd.lotus-notes":{"source":"iana","extensions":["nsf"]},"application/vnd.lotus-organizer":{"source":"iana","extensions":["org"]},"application/vnd.lotus-screencam":{"source":"iana","extensions":["scm"]},"application/vnd.lotus-wordpro":{"source":"iana","extensions":["lwp"]},"application/vnd.macports.portpkg":{"source":"iana","extensions":["portpkg"]},"application/vnd.mapbox-vector-tile":{"source":"iana","extensions":["mvt"]},"application/vnd.marlin.drm.actiontoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.conftoken+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.license+xml":{"source":"iana","compressible":true},"application/vnd.marlin.drm.mdcf":{"source":"iana"},"application/vnd.mason+json":{"source":"iana","compressible":true},"application/vnd.maxar.archive.3tz+zip":{"source":"iana","compressible":false},"application/vnd.maxmind.maxmind-db":{"source":"iana"},"application/vnd.mcd":{"source":"iana","extensions":["mcd"]},"application/vnd.medcalcdata":{"source":"iana","extensions":["mc1"]},"application/vnd.mediastation.cdkey":{"source":"iana","extensions":["cdkey"]},"application/vnd.meridian-slingshot":{"source":"iana"},"application/vnd.mfer":{"source":"iana","extensions":["mwf"]},"application/vnd.mfmp":{"source":"iana","extensions":["mfm"]},"application/vnd.micro+json":{"source":"iana","compressible":true},"application/vnd.micrografx.flo":{"source":"iana","extensions":["flo"]},"application/vnd.micrografx.igx":{"source":"iana","extensions":["igx"]},"application/vnd.microsoft.portable-executable":{"source":"iana"},"application/vnd.microsoft.windows.thumbnail-cache":{"source":"iana"},"application/vnd.miele+json":{"source":"iana","compressible":true},"application/vnd.mif":{"source":"iana","extensions":["mif"]},"application/vnd.minisoft-hp3000-save":{"source":"iana"},"application/vnd.mitsubishi.misty-guard.trustweb":{"source":"iana"},"application/vnd.mobius.daf":{"source":"iana","extensions":["daf"]},"application/vnd.mobius.dis":{"source":"iana","extensions":["dis"]},"application/vnd.mobius.mbk":{"source":"iana","extensions":["mbk"]},"application/vnd.mobius.mqy":{"source":"iana","extensions":["mqy"]},"application/vnd.mobius.msl":{"source":"iana","extensions":["msl"]},"application/vnd.mobius.plc":{"source":"iana","extensions":["plc"]},"application/vnd.mobius.txf":{"source":"iana","extensions":["txf"]},"application/vnd.mophun.application":{"source":"iana","extensions":["mpn"]},"application/vnd.mophun.certificate":{"source":"iana","extensions":["mpc"]},"application/vnd.motorola.flexsuite":{"source":"iana"},"application/vnd.motorola.flexsuite.adsi":{"source":"iana"},"application/vnd.motorola.flexsuite.fis":{"source":"iana"},"application/vnd.motorola.flexsuite.gotap":{"source":"iana"},"application/vnd.motorola.flexsuite.kmr":{"source":"iana"},"application/vnd.motorola.flexsuite.ttc":{"source":"iana"},"application/vnd.motorola.flexsuite.wem":{"source":"iana"},"application/vnd.motorola.iprm":{"source":"iana"},"application/vnd.mozilla.xul+xml":{"source":"iana","compressible":true,"extensions":["xul"]},"application/vnd.ms-3mfdocument":{"source":"iana"},"application/vnd.ms-artgalry":{"source":"iana","extensions":["cil"]},"application/vnd.ms-asf":{"source":"iana"},"application/vnd.ms-cab-compressed":{"source":"iana","extensions":["cab"]},"application/vnd.ms-color.iccprofile":{"source":"apache"},"application/vnd.ms-excel":{"source":"iana","compressible":false,"extensions":["xls","xlm","xla","xlc","xlt","xlw"]},"application/vnd.ms-excel.addin.macroenabled.12":{"source":"iana","extensions":["xlam"]},"application/vnd.ms-excel.sheet.binary.macroenabled.12":{"source":"iana","extensions":["xlsb"]},"application/vnd.ms-excel.sheet.macroenabled.12":{"source":"iana","extensions":["xlsm"]},"application/vnd.ms-excel.template.macroenabled.12":{"source":"iana","extensions":["xltm"]},"application/vnd.ms-fontobject":{"source":"iana","compressible":true,"extensions":["eot"]},"application/vnd.ms-htmlhelp":{"source":"iana","extensions":["chm"]},"application/vnd.ms-ims":{"source":"iana","extensions":["ims"]},"application/vnd.ms-lrm":{"source":"iana","extensions":["lrm"]},"application/vnd.ms-office.activex+xml":{"source":"iana","compressible":true},"application/vnd.ms-officetheme":{"source":"iana","extensions":["thmx"]},"application/vnd.ms-opentype":{"source":"apache","compressible":true},"application/vnd.ms-outlook":{"compressible":false,"extensions":["msg"]},"application/vnd.ms-package.obfuscated-opentype":{"source":"apache"},"application/vnd.ms-pki.seccat":{"source":"apache","extensions":["cat"]},"application/vnd.ms-pki.stl":{"source":"apache","extensions":["stl"]},"application/vnd.ms-playready.initiator+xml":{"source":"iana","compressible":true},"application/vnd.ms-powerpoint":{"source":"iana","compressible":false,"extensions":["ppt","pps","pot"]},"application/vnd.ms-powerpoint.addin.macroenabled.12":{"source":"iana","extensions":["ppam"]},"application/vnd.ms-powerpoint.presentation.macroenabled.12":{"source":"iana","extensions":["pptm"]},"application/vnd.ms-powerpoint.slide.macroenabled.12":{"source":"iana","extensions":["sldm"]},"application/vnd.ms-powerpoint.slideshow.macroenabled.12":{"source":"iana","extensions":["ppsm"]},"application/vnd.ms-powerpoint.template.macroenabled.12":{"source":"iana","extensions":["potm"]},"application/vnd.ms-printdevicecapabilities+xml":{"source":"iana","compressible":true},"application/vnd.ms-printing.printticket+xml":{"source":"apache","compressible":true},"application/vnd.ms-printschematicket+xml":{"source":"iana","compressible":true},"application/vnd.ms-project":{"source":"iana","extensions":["mpp","mpt"]},"application/vnd.ms-tnef":{"source":"iana"},"application/vnd.ms-windows.devicepairing":{"source":"iana"},"application/vnd.ms-windows.nwprinting.oob":{"source":"iana"},"application/vnd.ms-windows.printerpairing":{"source":"iana"},"application/vnd.ms-windows.wsd.oob":{"source":"iana"},"application/vnd.ms-wmdrm.lic-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.lic-resp":{"source":"iana"},"application/vnd.ms-wmdrm.meter-chlg-req":{"source":"iana"},"application/vnd.ms-wmdrm.meter-resp":{"source":"iana"},"application/vnd.ms-word.document.macroenabled.12":{"source":"iana","extensions":["docm"]},"application/vnd.ms-word.template.macroenabled.12":{"source":"iana","extensions":["dotm"]},"application/vnd.ms-works":{"source":"iana","extensions":["wps","wks","wcm","wdb"]},"application/vnd.ms-wpl":{"source":"iana","extensions":["wpl"]},"application/vnd.ms-xpsdocument":{"source":"iana","compressible":false,"extensions":["xps"]},"application/vnd.msa-disk-image":{"source":"iana"},"application/vnd.mseq":{"source":"iana","extensions":["mseq"]},"application/vnd.msign":{"source":"iana"},"application/vnd.multiad.creator":{"source":"iana"},"application/vnd.multiad.creator.cif":{"source":"iana"},"application/vnd.music-niff":{"source":"iana"},"application/vnd.musician":{"source":"iana","extensions":["mus"]},"application/vnd.muvee.style":{"source":"iana","extensions":["msty"]},"application/vnd.mynfc":{"source":"iana","extensions":["taglet"]},"application/vnd.nacamar.ybrid+json":{"source":"iana","compressible":true},"application/vnd.ncd.control":{"source":"iana"},"application/vnd.ncd.reference":{"source":"iana"},"application/vnd.nearst.inv+json":{"source":"iana","compressible":true},"application/vnd.nebumind.line":{"source":"iana"},"application/vnd.nervana":{"source":"iana"},"application/vnd.netfpx":{"source":"iana"},"application/vnd.neurolanguage.nlu":{"source":"iana","extensions":["nlu"]},"application/vnd.nimn":{"source":"iana"},"application/vnd.nintendo.nitro.rom":{"source":"iana"},"application/vnd.nintendo.snes.rom":{"source":"iana"},"application/vnd.nitf":{"source":"iana","extensions":["ntf","nitf"]},"application/vnd.noblenet-directory":{"source":"iana","extensions":["nnd"]},"application/vnd.noblenet-sealer":{"source":"iana","extensions":["nns"]},"application/vnd.noblenet-web":{"source":"iana","extensions":["nnw"]},"application/vnd.nokia.catalogs":{"source":"iana"},"application/vnd.nokia.conml+wbxml":{"source":"iana"},"application/vnd.nokia.conml+xml":{"source":"iana","compressible":true},"application/vnd.nokia.iptv.config+xml":{"source":"iana","compressible":true},"application/vnd.nokia.isds-radio-presets":{"source":"iana"},"application/vnd.nokia.landmark+wbxml":{"source":"iana"},"application/vnd.nokia.landmark+xml":{"source":"iana","compressible":true},"application/vnd.nokia.landmarkcollection+xml":{"source":"iana","compressible":true},"application/vnd.nokia.n-gage.ac+xml":{"source":"iana","compressible":true,"extensions":["ac"]},"application/vnd.nokia.n-gage.data":{"source":"iana","extensions":["ngdat"]},"application/vnd.nokia.n-gage.symbian.install":{"source":"iana","extensions":["n-gage"]},"application/vnd.nokia.ncd":{"source":"iana"},"application/vnd.nokia.pcd+wbxml":{"source":"iana"},"application/vnd.nokia.pcd+xml":{"source":"iana","compressible":true},"application/vnd.nokia.radio-preset":{"source":"iana","extensions":["rpst"]},"application/vnd.nokia.radio-presets":{"source":"iana","extensions":["rpss"]},"application/vnd.novadigm.edm":{"source":"iana","extensions":["edm"]},"application/vnd.novadigm.edx":{"source":"iana","extensions":["edx"]},"application/vnd.novadigm.ext":{"source":"iana","extensions":["ext"]},"application/vnd.ntt-local.content-share":{"source":"iana"},"application/vnd.ntt-local.file-transfer":{"source":"iana"},"application/vnd.ntt-local.ogw_remote-access":{"source":"iana"},"application/vnd.ntt-local.sip-ta_remote":{"source":"iana"},"application/vnd.ntt-local.sip-ta_tcp_stream":{"source":"iana"},"application/vnd.oasis.opendocument.chart":{"source":"iana","extensions":["odc"]},"application/vnd.oasis.opendocument.chart-template":{"source":"iana","extensions":["otc"]},"application/vnd.oasis.opendocument.database":{"source":"iana","extensions":["odb"]},"application/vnd.oasis.opendocument.formula":{"source":"iana","extensions":["odf"]},"application/vnd.oasis.opendocument.formula-template":{"source":"iana","extensions":["odft"]},"application/vnd.oasis.opendocument.graphics":{"source":"iana","compressible":false,"extensions":["odg"]},"application/vnd.oasis.opendocument.graphics-template":{"source":"iana","extensions":["otg"]},"application/vnd.oasis.opendocument.image":{"source":"iana","extensions":["odi"]},"application/vnd.oasis.opendocument.image-template":{"source":"iana","extensions":["oti"]},"application/vnd.oasis.opendocument.presentation":{"source":"iana","compressible":false,"extensions":["odp"]},"application/vnd.oasis.opendocument.presentation-template":{"source":"iana","extensions":["otp"]},"application/vnd.oasis.opendocument.spreadsheet":{"source":"iana","compressible":false,"extensions":["ods"]},"application/vnd.oasis.opendocument.spreadsheet-template":{"source":"iana","extensions":["ots"]},"application/vnd.oasis.opendocument.text":{"source":"iana","compressible":false,"extensions":["odt"]},"application/vnd.oasis.opendocument.text-master":{"source":"iana","extensions":["odm"]},"application/vnd.oasis.opendocument.text-template":{"source":"iana","extensions":["ott"]},"application/vnd.oasis.opendocument.text-web":{"source":"iana","extensions":["oth"]},"application/vnd.obn":{"source":"iana"},"application/vnd.ocf+cbor":{"source":"iana"},"application/vnd.oci.image.manifest.v1+json":{"source":"iana","compressible":true},"application/vnd.oftn.l10n+json":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessdownload+xml":{"source":"iana","compressible":true},"application/vnd.oipf.contentaccessstreaming+xml":{"source":"iana","compressible":true},"application/vnd.oipf.cspg-hexbinary":{"source":"iana"},"application/vnd.oipf.dae.svg+xml":{"source":"iana","compressible":true},"application/vnd.oipf.dae.xhtml+xml":{"source":"iana","compressible":true},"application/vnd.oipf.mippvcontrolmessage+xml":{"source":"iana","compressible":true},"application/vnd.oipf.pae.gem":{"source":"iana"},"application/vnd.oipf.spdiscovery+xml":{"source":"iana","compressible":true},"application/vnd.oipf.spdlist+xml":{"source":"iana","compressible":true},"application/vnd.oipf.ueprofile+xml":{"source":"iana","compressible":true},"application/vnd.oipf.userprofile+xml":{"source":"iana","compressible":true},"application/vnd.olpc-sugar":{"source":"iana","extensions":["xo"]},"application/vnd.oma-scws-config":{"source":"iana"},"application/vnd.oma-scws-http-request":{"source":"iana"},"application/vnd.oma-scws-http-response":{"source":"iana"},"application/vnd.oma.bcast.associated-procedure-parameter+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.drm-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.imd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.ltkm":{"source":"iana"},"application/vnd.oma.bcast.notification+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.provisioningtrigger":{"source":"iana"},"application/vnd.oma.bcast.sgboot":{"source":"iana"},"application/vnd.oma.bcast.sgdd+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sgdu":{"source":"iana"},"application/vnd.oma.bcast.simple-symbol-container":{"source":"iana"},"application/vnd.oma.bcast.smartcard-trigger+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.sprov+xml":{"source":"iana","compressible":true},"application/vnd.oma.bcast.stkm":{"source":"iana"},"application/vnd.oma.cab-address-book+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-feature-handler+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-pcc+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-subs-invite+xml":{"source":"iana","compressible":true},"application/vnd.oma.cab-user-prefs+xml":{"source":"iana","compressible":true},"application/vnd.oma.dcd":{"source":"iana"},"application/vnd.oma.dcdc":{"source":"iana"},"application/vnd.oma.dd2+xml":{"source":"iana","compressible":true,"extensions":["dd2"]},"application/vnd.oma.drm.risd+xml":{"source":"iana","compressible":true},"application/vnd.oma.group-usage-list+xml":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+cbor":{"source":"iana"},"application/vnd.oma.lwm2m+json":{"source":"iana","compressible":true},"application/vnd.oma.lwm2m+tlv":{"source":"iana"},"application/vnd.oma.pal+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.detailed-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.final-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.groups+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.invocation-descriptor+xml":{"source":"iana","compressible":true},"application/vnd.oma.poc.optimized-progress-report+xml":{"source":"iana","compressible":true},"application/vnd.oma.push":{"source":"iana"},"application/vnd.oma.scidm.messages+xml":{"source":"iana","compressible":true},"application/vnd.oma.xcap-directory+xml":{"source":"iana","compressible":true},"application/vnd.omads-email+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omads-file+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omads-folder+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.omaloc-supl-init":{"source":"iana"},"application/vnd.onepager":{"source":"iana"},"application/vnd.onepagertamp":{"source":"iana"},"application/vnd.onepagertamx":{"source":"iana"},"application/vnd.onepagertat":{"source":"iana"},"application/vnd.onepagertatp":{"source":"iana"},"application/vnd.onepagertatx":{"source":"iana"},"application/vnd.openblox.game+xml":{"source":"iana","compressible":true,"extensions":["obgx"]},"application/vnd.openblox.game-binary":{"source":"iana"},"application/vnd.openeye.oeb":{"source":"iana"},"application/vnd.openofficeorg.extension":{"source":"apache","extensions":["oxt"]},"application/vnd.openstreetmap.data+xml":{"source":"iana","compressible":true,"extensions":["osm"]},"application/vnd.opentimestamps.ots":{"source":"iana"},"application/vnd.openxmlformats-officedocument.custom-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.customxmlproperties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawing+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chart+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.chartshapes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramcolors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramdata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramlayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.drawingml.diagramstyle+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.extended-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.commentauthors+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.handoutmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesmaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.notesslide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presentation":{"source":"iana","compressible":false,"extensions":["pptx"]},"application/vnd.openxmlformats-officedocument.presentationml.presentation.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.presprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slide":{"source":"iana","extensions":["sldx"]},"application/vnd.openxmlformats-officedocument.presentationml.slide+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidelayout+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slidemaster+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideshow":{"source":"iana","extensions":["ppsx"]},"application/vnd.openxmlformats-officedocument.presentationml.slideshow.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.slideupdateinfo+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tablestyles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.tags+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.template":{"source":"iana","extensions":["potx"]},"application/vnd.openxmlformats-officedocument.presentationml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.presentationml.viewprops+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.calcchain+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.chartsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.connections+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.dialogsheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.externallink+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcachedefinition+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivotcacherecords+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.pivottable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.querytable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionheaders+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.revisionlog+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sharedstrings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet":{"source":"iana","compressible":false,"extensions":["xlsx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.sheetmetadata+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.table+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.tablesinglecells+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.template":{"source":"iana","extensions":["xltx"]},"application/vnd.openxmlformats-officedocument.spreadsheetml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.usernames+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.volatiledependencies+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.spreadsheetml.worksheet+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.theme+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.themeoverride+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.vmldrawing":{"source":"iana"},"application/vnd.openxmlformats-officedocument.wordprocessingml.comments+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document":{"source":"iana","compressible":false,"extensions":["docx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.glossary+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.endnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.fonttable+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footer+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.footnotes+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.numbering+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.template":{"source":"iana","extensions":["dotx"]},"application/vnd.openxmlformats-officedocument.wordprocessingml.template.main+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-officedocument.wordprocessingml.websettings+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.core-properties+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.digital-signature-xmlsignature+xml":{"source":"iana","compressible":true},"application/vnd.openxmlformats-package.relationships+xml":{"source":"iana","compressible":true},"application/vnd.oracle.resource+json":{"source":"iana","compressible":true},"application/vnd.orange.indata":{"source":"iana"},"application/vnd.osa.netdeploy":{"source":"iana"},"application/vnd.osgeo.mapguide.package":{"source":"iana","extensions":["mgp"]},"application/vnd.osgi.bundle":{"source":"iana"},"application/vnd.osgi.dp":{"source":"iana","extensions":["dp"]},"application/vnd.osgi.subsystem":{"source":"iana","extensions":["esa"]},"application/vnd.otps.ct-kip+xml":{"source":"iana","compressible":true},"application/vnd.oxli.countgraph":{"source":"iana"},"application/vnd.pagerduty+json":{"source":"iana","compressible":true},"application/vnd.palm":{"source":"iana","extensions":["pdb","pqa","oprc"]},"application/vnd.panoply":{"source":"iana"},"application/vnd.paos.xml":{"source":"iana"},"application/vnd.patentdive":{"source":"iana"},"application/vnd.patientecommsdoc":{"source":"iana"},"application/vnd.pawaafile":{"source":"iana","extensions":["paw"]},"application/vnd.pcos":{"source":"iana"},"application/vnd.pg.format":{"source":"iana","extensions":["str"]},"application/vnd.pg.osasli":{"source":"iana","extensions":["ei6"]},"application/vnd.piaccess.application-licence":{"source":"iana"},"application/vnd.picsel":{"source":"iana","extensions":["efif"]},"application/vnd.pmi.widget":{"source":"iana","extensions":["wg"]},"application/vnd.poc.group-advertisement+xml":{"source":"iana","compressible":true},"application/vnd.pocketlearn":{"source":"iana","extensions":["plf"]},"application/vnd.powerbuilder6":{"source":"iana","extensions":["pbd"]},"application/vnd.powerbuilder6-s":{"source":"iana"},"application/vnd.powerbuilder7":{"source":"iana"},"application/vnd.powerbuilder7-s":{"source":"iana"},"application/vnd.powerbuilder75":{"source":"iana"},"application/vnd.powerbuilder75-s":{"source":"iana"},"application/vnd.preminet":{"source":"iana"},"application/vnd.previewsystems.box":{"source":"iana","extensions":["box"]},"application/vnd.proteus.magazine":{"source":"iana","extensions":["mgz"]},"application/vnd.psfs":{"source":"iana"},"application/vnd.publishare-delta-tree":{"source":"iana","extensions":["qps"]},"application/vnd.pvi.ptid1":{"source":"iana","extensions":["ptid"]},"application/vnd.pwg-multiplexed":{"source":"iana"},"application/vnd.pwg-xhtml-print+xml":{"source":"iana","compressible":true},"application/vnd.qualcomm.brew-app-res":{"source":"iana"},"application/vnd.quarantainenet":{"source":"iana"},"application/vnd.quark.quarkxpress":{"source":"iana","extensions":["qxd","qxt","qwd","qwt","qxl","qxb"]},"application/vnd.quobject-quoxdocument":{"source":"iana"},"application/vnd.radisys.moml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-conn+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-audit-stream+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-conf+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-base+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-detect+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-fax-sendrecv+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-group+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-speech+xml":{"source":"iana","compressible":true},"application/vnd.radisys.msml-dialog-transform+xml":{"source":"iana","compressible":true},"application/vnd.rainstor.data":{"source":"iana"},"application/vnd.rapid":{"source":"iana"},"application/vnd.rar":{"source":"iana","extensions":["rar"]},"application/vnd.realvnc.bed":{"source":"iana","extensions":["bed"]},"application/vnd.recordare.musicxml":{"source":"iana","extensions":["mxl"]},"application/vnd.recordare.musicxml+xml":{"source":"iana","compressible":true,"extensions":["musicxml"]},"application/vnd.renlearn.rlprint":{"source":"iana"},"application/vnd.resilient.logic":{"source":"iana"},"application/vnd.restful+json":{"source":"iana","compressible":true},"application/vnd.rig.cryptonote":{"source":"iana","extensions":["cryptonote"]},"application/vnd.rim.cod":{"source":"apache","extensions":["cod"]},"application/vnd.rn-realmedia":{"source":"apache","extensions":["rm"]},"application/vnd.rn-realmedia-vbr":{"source":"apache","extensions":["rmvb"]},"application/vnd.route66.link66+xml":{"source":"iana","compressible":true,"extensions":["link66"]},"application/vnd.rs-274x":{"source":"iana"},"application/vnd.ruckus.download":{"source":"iana"},"application/vnd.s3sms":{"source":"iana"},"application/vnd.sailingtracker.track":{"source":"iana","extensions":["st"]},"application/vnd.sar":{"source":"iana"},"application/vnd.sbm.cid":{"source":"iana"},"application/vnd.sbm.mid2":{"source":"iana"},"application/vnd.scribus":{"source":"iana"},"application/vnd.sealed.3df":{"source":"iana"},"application/vnd.sealed.csf":{"source":"iana"},"application/vnd.sealed.doc":{"source":"iana"},"application/vnd.sealed.eml":{"source":"iana"},"application/vnd.sealed.mht":{"source":"iana"},"application/vnd.sealed.net":{"source":"iana"},"application/vnd.sealed.ppt":{"source":"iana"},"application/vnd.sealed.tiff":{"source":"iana"},"application/vnd.sealed.xls":{"source":"iana"},"application/vnd.sealedmedia.softseal.html":{"source":"iana"},"application/vnd.sealedmedia.softseal.pdf":{"source":"iana"},"application/vnd.seemail":{"source":"iana","extensions":["see"]},"application/vnd.seis+json":{"source":"iana","compressible":true},"application/vnd.sema":{"source":"iana","extensions":["sema"]},"application/vnd.semd":{"source":"iana","extensions":["semd"]},"application/vnd.semf":{"source":"iana","extensions":["semf"]},"application/vnd.shade-save-file":{"source":"iana"},"application/vnd.shana.informed.formdata":{"source":"iana","extensions":["ifm"]},"application/vnd.shana.informed.formtemplate":{"source":"iana","extensions":["itp"]},"application/vnd.shana.informed.interchange":{"source":"iana","extensions":["iif"]},"application/vnd.shana.informed.package":{"source":"iana","extensions":["ipk"]},"application/vnd.shootproof+json":{"source":"iana","compressible":true},"application/vnd.shopkick+json":{"source":"iana","compressible":true},"application/vnd.shp":{"source":"iana"},"application/vnd.shx":{"source":"iana"},"application/vnd.sigrok.session":{"source":"iana"},"application/vnd.simtech-mindmapper":{"source":"iana","extensions":["twd","twds"]},"application/vnd.siren+json":{"source":"iana","compressible":true},"application/vnd.smaf":{"source":"iana","extensions":["mmf"]},"application/vnd.smart.notebook":{"source":"iana"},"application/vnd.smart.teacher":{"source":"iana","extensions":["teacher"]},"application/vnd.snesdev-page-table":{"source":"iana"},"application/vnd.software602.filler.form+xml":{"source":"iana","compressible":true,"extensions":["fo"]},"application/vnd.software602.filler.form-xml-zip":{"source":"iana"},"application/vnd.solent.sdkm+xml":{"source":"iana","compressible":true,"extensions":["sdkm","sdkd"]},"application/vnd.spotfire.dxp":{"source":"iana","extensions":["dxp"]},"application/vnd.spotfire.sfs":{"source":"iana","extensions":["sfs"]},"application/vnd.sqlite3":{"source":"iana"},"application/vnd.sss-cod":{"source":"iana"},"application/vnd.sss-dtf":{"source":"iana"},"application/vnd.sss-ntf":{"source":"iana"},"application/vnd.stardivision.calc":{"source":"apache","extensions":["sdc"]},"application/vnd.stardivision.draw":{"source":"apache","extensions":["sda"]},"application/vnd.stardivision.impress":{"source":"apache","extensions":["sdd"]},"application/vnd.stardivision.math":{"source":"apache","extensions":["smf"]},"application/vnd.stardivision.writer":{"source":"apache","extensions":["sdw","vor"]},"application/vnd.stardivision.writer-global":{"source":"apache","extensions":["sgl"]},"application/vnd.stepmania.package":{"source":"iana","extensions":["smzip"]},"application/vnd.stepmania.stepchart":{"source":"iana","extensions":["sm"]},"application/vnd.street-stream":{"source":"iana"},"application/vnd.sun.wadl+xml":{"source":"iana","compressible":true,"extensions":["wadl"]},"application/vnd.sun.xml.calc":{"source":"apache","extensions":["sxc"]},"application/vnd.sun.xml.calc.template":{"source":"apache","extensions":["stc"]},"application/vnd.sun.xml.draw":{"source":"apache","extensions":["sxd"]},"application/vnd.sun.xml.draw.template":{"source":"apache","extensions":["std"]},"application/vnd.sun.xml.impress":{"source":"apache","extensions":["sxi"]},"application/vnd.sun.xml.impress.template":{"source":"apache","extensions":["sti"]},"application/vnd.sun.xml.math":{"source":"apache","extensions":["sxm"]},"application/vnd.sun.xml.writer":{"source":"apache","extensions":["sxw"]},"application/vnd.sun.xml.writer.global":{"source":"apache","extensions":["sxg"]},"application/vnd.sun.xml.writer.template":{"source":"apache","extensions":["stw"]},"application/vnd.sus-calendar":{"source":"iana","extensions":["sus","susp"]},"application/vnd.svd":{"source":"iana","extensions":["svd"]},"application/vnd.swiftview-ics":{"source":"iana"},"application/vnd.sycle+xml":{"source":"iana","compressible":true},"application/vnd.syft+json":{"source":"iana","compressible":true},"application/vnd.symbian.install":{"source":"apache","extensions":["sis","sisx"]},"application/vnd.syncml+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["xsm"]},"application/vnd.syncml.dm+wbxml":{"source":"iana","charset":"UTF-8","extensions":["bdm"]},"application/vnd.syncml.dm+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["xdm"]},"application/vnd.syncml.dm.notification":{"source":"iana"},"application/vnd.syncml.dmddf+wbxml":{"source":"iana"},"application/vnd.syncml.dmddf+xml":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["ddf"]},"application/vnd.syncml.dmtnds+wbxml":{"source":"iana"},"application/vnd.syncml.dmtnds+xml":{"source":"iana","charset":"UTF-8","compressible":true},"application/vnd.syncml.ds.notification":{"source":"iana"},"application/vnd.tableschema+json":{"source":"iana","compressible":true},"application/vnd.tao.intent-module-archive":{"source":"iana","extensions":["tao"]},"application/vnd.tcpdump.pcap":{"source":"iana","extensions":["pcap","cap","dmp"]},"application/vnd.think-cell.ppttc+json":{"source":"iana","compressible":true},"application/vnd.tmd.mediaflex.api+xml":{"source":"iana","compressible":true},"application/vnd.tml":{"source":"iana"},"application/vnd.tmobile-livetv":{"source":"iana","extensions":["tmo"]},"application/vnd.tri.onesource":{"source":"iana"},"application/vnd.trid.tpt":{"source":"iana","extensions":["tpt"]},"application/vnd.triscape.mxs":{"source":"iana","extensions":["mxs"]},"application/vnd.trueapp":{"source":"iana","extensions":["tra"]},"application/vnd.truedoc":{"source":"iana"},"application/vnd.ubisoft.webplayer":{"source":"iana"},"application/vnd.ufdl":{"source":"iana","extensions":["ufd","ufdl"]},"application/vnd.uiq.theme":{"source":"iana","extensions":["utz"]},"application/vnd.umajin":{"source":"iana","extensions":["umj"]},"application/vnd.unity":{"source":"iana","extensions":["unityweb"]},"application/vnd.uoml+xml":{"source":"iana","compressible":true,"extensions":["uoml"]},"application/vnd.uplanet.alert":{"source":"iana"},"application/vnd.uplanet.alert-wbxml":{"source":"iana"},"application/vnd.uplanet.bearer-choice":{"source":"iana"},"application/vnd.uplanet.bearer-choice-wbxml":{"source":"iana"},"application/vnd.uplanet.cacheop":{"source":"iana"},"application/vnd.uplanet.cacheop-wbxml":{"source":"iana"},"application/vnd.uplanet.channel":{"source":"iana"},"application/vnd.uplanet.channel-wbxml":{"source":"iana"},"application/vnd.uplanet.list":{"source":"iana"},"application/vnd.uplanet.list-wbxml":{"source":"iana"},"application/vnd.uplanet.listcmd":{"source":"iana"},"application/vnd.uplanet.listcmd-wbxml":{"source":"iana"},"application/vnd.uplanet.signal":{"source":"iana"},"application/vnd.uri-map":{"source":"iana"},"application/vnd.valve.source.material":{"source":"iana"},"application/vnd.vcx":{"source":"iana","extensions":["vcx"]},"application/vnd.vd-study":{"source":"iana"},"application/vnd.vectorworks":{"source":"iana"},"application/vnd.vel+json":{"source":"iana","compressible":true},"application/vnd.verimatrix.vcas":{"source":"iana"},"application/vnd.veritone.aion+json":{"source":"iana","compressible":true},"application/vnd.veryant.thin":{"source":"iana"},"application/vnd.ves.encrypted":{"source":"iana"},"application/vnd.vidsoft.vidconference":{"source":"iana"},"application/vnd.visio":{"source":"iana","extensions":["vsd","vst","vss","vsw"]},"application/vnd.visionary":{"source":"iana","extensions":["vis"]},"application/vnd.vividence.scriptfile":{"source":"iana"},"application/vnd.vsf":{"source":"iana","extensions":["vsf"]},"application/vnd.wap.sic":{"source":"iana"},"application/vnd.wap.slc":{"source":"iana"},"application/vnd.wap.wbxml":{"source":"iana","charset":"UTF-8","extensions":["wbxml"]},"application/vnd.wap.wmlc":{"source":"iana","extensions":["wmlc"]},"application/vnd.wap.wmlscriptc":{"source":"iana","extensions":["wmlsc"]},"application/vnd.webturbo":{"source":"iana","extensions":["wtb"]},"application/vnd.wfa.dpp":{"source":"iana"},"application/vnd.wfa.p2p":{"source":"iana"},"application/vnd.wfa.wsc":{"source":"iana"},"application/vnd.windows.devicepairing":{"source":"iana"},"application/vnd.wmc":{"source":"iana"},"application/vnd.wmf.bootstrap":{"source":"iana"},"application/vnd.wolfram.mathematica":{"source":"iana"},"application/vnd.wolfram.mathematica.package":{"source":"iana"},"application/vnd.wolfram.player":{"source":"iana","extensions":["nbp"]},"application/vnd.wordperfect":{"source":"iana","extensions":["wpd"]},"application/vnd.wqd":{"source":"iana","extensions":["wqd"]},"application/vnd.wrq-hp3000-labelled":{"source":"iana"},"application/vnd.wt.stf":{"source":"iana","extensions":["stf"]},"application/vnd.wv.csp+wbxml":{"source":"iana"},"application/vnd.wv.csp+xml":{"source":"iana","compressible":true},"application/vnd.wv.ssp+xml":{"source":"iana","compressible":true},"application/vnd.xacml+json":{"source":"iana","compressible":true},"application/vnd.xara":{"source":"iana","extensions":["xar"]},"application/vnd.xfdl":{"source":"iana","extensions":["xfdl"]},"application/vnd.xfdl.webform":{"source":"iana"},"application/vnd.xmi+xml":{"source":"iana","compressible":true},"application/vnd.xmpie.cpkg":{"source":"iana"},"application/vnd.xmpie.dpkg":{"source":"iana"},"application/vnd.xmpie.plan":{"source":"iana"},"application/vnd.xmpie.ppkg":{"source":"iana"},"application/vnd.xmpie.xlim":{"source":"iana"},"application/vnd.yamaha.hv-dic":{"source":"iana","extensions":["hvd"]},"application/vnd.yamaha.hv-script":{"source":"iana","extensions":["hvs"]},"application/vnd.yamaha.hv-voice":{"source":"iana","extensions":["hvp"]},"application/vnd.yamaha.openscoreformat":{"source":"iana","extensions":["osf"]},"application/vnd.yamaha.openscoreformat.osfpvg+xml":{"source":"iana","compressible":true,"extensions":["osfpvg"]},"application/vnd.yamaha.remote-setup":{"source":"iana"},"application/vnd.yamaha.smaf-audio":{"source":"iana","extensions":["saf"]},"application/vnd.yamaha.smaf-phrase":{"source":"iana","extensions":["spf"]},"application/vnd.yamaha.through-ngn":{"source":"iana"},"application/vnd.yamaha.tunnel-udpencap":{"source":"iana"},"application/vnd.yaoweme":{"source":"iana"},"application/vnd.yellowriver-custom-menu":{"source":"iana","extensions":["cmp"]},"application/vnd.youtube.yt":{"source":"iana"},"application/vnd.zul":{"source":"iana","extensions":["zir","zirz"]},"application/vnd.zzazz.deck+xml":{"source":"iana","compressible":true,"extensions":["zaz"]},"application/voicexml+xml":{"source":"iana","compressible":true,"extensions":["vxml"]},"application/voucher-cms+json":{"source":"iana","compressible":true},"application/vq-rtcpxr":{"source":"iana"},"application/wasm":{"source":"iana","compressible":true,"extensions":["wasm"]},"application/watcherinfo+xml":{"source":"iana","compressible":true,"extensions":["wif"]},"application/webpush-options+json":{"source":"iana","compressible":true},"application/whoispp-query":{"source":"iana"},"application/whoispp-response":{"source":"iana"},"application/widget":{"source":"iana","extensions":["wgt"]},"application/winhlp":{"source":"apache","extensions":["hlp"]},"application/wita":{"source":"iana"},"application/wordperfect5.1":{"source":"iana"},"application/wsdl+xml":{"source":"iana","compressible":true,"extensions":["wsdl"]},"application/wspolicy+xml":{"source":"iana","compressible":true,"extensions":["wspolicy"]},"application/x-7z-compressed":{"source":"apache","compressible":false,"extensions":["7z"]},"application/x-abiword":{"source":"apache","extensions":["abw"]},"application/x-ace-compressed":{"source":"apache","extensions":["ace"]},"application/x-amf":{"source":"apache"},"application/x-apple-diskimage":{"source":"apache","extensions":["dmg"]},"application/x-arj":{"compressible":false,"extensions":["arj"]},"application/x-authorware-bin":{"source":"apache","extensions":["aab","x32","u32","vox"]},"application/x-authorware-map":{"source":"apache","extensions":["aam"]},"application/x-authorware-seg":{"source":"apache","extensions":["aas"]},"application/x-bcpio":{"source":"apache","extensions":["bcpio"]},"application/x-bdoc":{"compressible":false,"extensions":["bdoc"]},"application/x-bittorrent":{"source":"apache","extensions":["torrent"]},"application/x-blorb":{"source":"apache","extensions":["blb","blorb"]},"application/x-bzip":{"source":"apache","compressible":false,"extensions":["bz"]},"application/x-bzip2":{"source":"apache","compressible":false,"extensions":["bz2","boz"]},"application/x-cbr":{"source":"apache","extensions":["cbr","cba","cbt","cbz","cb7"]},"application/x-cdlink":{"source":"apache","extensions":["vcd"]},"application/x-cfs-compressed":{"source":"apache","extensions":["cfs"]},"application/x-chat":{"source":"apache","extensions":["chat"]},"application/x-chess-pgn":{"source":"apache","extensions":["pgn"]},"application/x-chrome-extension":{"extensions":["crx"]},"application/x-cocoa":{"source":"nginx","extensions":["cco"]},"application/x-compress":{"source":"apache"},"application/x-conference":{"source":"apache","extensions":["nsc"]},"application/x-cpio":{"source":"apache","extensions":["cpio"]},"application/x-csh":{"source":"apache","extensions":["csh"]},"application/x-deb":{"compressible":false},"application/x-debian-package":{"source":"apache","extensions":["deb","udeb"]},"application/x-dgc-compressed":{"source":"apache","extensions":["dgc"]},"application/x-director":{"source":"apache","extensions":["dir","dcr","dxr","cst","cct","cxt","w3d","fgd","swa"]},"application/x-doom":{"source":"apache","extensions":["wad"]},"application/x-dtbncx+xml":{"source":"apache","compressible":true,"extensions":["ncx"]},"application/x-dtbook+xml":{"source":"apache","compressible":true,"extensions":["dtb"]},"application/x-dtbresource+xml":{"source":"apache","compressible":true,"extensions":["res"]},"application/x-dvi":{"source":"apache","compressible":false,"extensions":["dvi"]},"application/x-envoy":{"source":"apache","extensions":["evy"]},"application/x-eva":{"source":"apache","extensions":["eva"]},"application/x-font-bdf":{"source":"apache","extensions":["bdf"]},"application/x-font-dos":{"source":"apache"},"application/x-font-framemaker":{"source":"apache"},"application/x-font-ghostscript":{"source":"apache","extensions":["gsf"]},"application/x-font-libgrx":{"source":"apache"},"application/x-font-linux-psf":{"source":"apache","extensions":["psf"]},"application/x-font-pcf":{"source":"apache","extensions":["pcf"]},"application/x-font-snf":{"source":"apache","extensions":["snf"]},"application/x-font-speedo":{"source":"apache"},"application/x-font-sunos-news":{"source":"apache"},"application/x-font-type1":{"source":"apache","extensions":["pfa","pfb","pfm","afm"]},"application/x-font-vfont":{"source":"apache"},"application/x-freearc":{"source":"apache","extensions":["arc"]},"application/x-futuresplash":{"source":"apache","extensions":["spl"]},"application/x-gca-compressed":{"source":"apache","extensions":["gca"]},"application/x-glulx":{"source":"apache","extensions":["ulx"]},"application/x-gnumeric":{"source":"apache","extensions":["gnumeric"]},"application/x-gramps-xml":{"source":"apache","extensions":["gramps"]},"application/x-gtar":{"source":"apache","extensions":["gtar"]},"application/x-gzip":{"source":"apache"},"application/x-hdf":{"source":"apache","extensions":["hdf"]},"application/x-httpd-php":{"compressible":true,"extensions":["php"]},"application/x-install-instructions":{"source":"apache","extensions":["install"]},"application/x-iso9660-image":{"source":"apache","extensions":["iso"]},"application/x-iwork-keynote-sffkey":{"extensions":["key"]},"application/x-iwork-numbers-sffnumbers":{"extensions":["numbers"]},"application/x-iwork-pages-sffpages":{"extensions":["pages"]},"application/x-java-archive-diff":{"source":"nginx","extensions":["jardiff"]},"application/x-java-jnlp-file":{"source":"apache","compressible":false,"extensions":["jnlp"]},"application/x-javascript":{"compressible":true},"application/x-keepass2":{"extensions":["kdbx"]},"application/x-latex":{"source":"apache","compressible":false,"extensions":["latex"]},"application/x-lua-bytecode":{"extensions":["luac"]},"application/x-lzh-compressed":{"source":"apache","extensions":["lzh","lha"]},"application/x-makeself":{"source":"nginx","extensions":["run"]},"application/x-mie":{"source":"apache","extensions":["mie"]},"application/x-mobipocket-ebook":{"source":"apache","extensions":["prc","mobi"]},"application/x-mpegurl":{"compressible":false},"application/x-ms-application":{"source":"apache","extensions":["application"]},"application/x-ms-shortcut":{"source":"apache","extensions":["lnk"]},"application/x-ms-wmd":{"source":"apache","extensions":["wmd"]},"application/x-ms-wmz":{"source":"apache","extensions":["wmz"]},"application/x-ms-xbap":{"source":"apache","extensions":["xbap"]},"application/x-msaccess":{"source":"apache","extensions":["mdb"]},"application/x-msbinder":{"source":"apache","extensions":["obd"]},"application/x-mscardfile":{"source":"apache","extensions":["crd"]},"application/x-msclip":{"source":"apache","extensions":["clp"]},"application/x-msdos-program":{"extensions":["exe"]},"application/x-msdownload":{"source":"apache","extensions":["exe","dll","com","bat","msi"]},"application/x-msmediaview":{"source":"apache","extensions":["mvb","m13","m14"]},"application/x-msmetafile":{"source":"apache","extensions":["wmf","wmz","emf","emz"]},"application/x-msmoney":{"source":"apache","extensions":["mny"]},"application/x-mspublisher":{"source":"apache","extensions":["pub"]},"application/x-msschedule":{"source":"apache","extensions":["scd"]},"application/x-msterminal":{"source":"apache","extensions":["trm"]},"application/x-mswrite":{"source":"apache","extensions":["wri"]},"application/x-netcdf":{"source":"apache","extensions":["nc","cdf"]},"application/x-ns-proxy-autoconfig":{"compressible":true,"extensions":["pac"]},"application/x-nzb":{"source":"apache","extensions":["nzb"]},"application/x-perl":{"source":"nginx","extensions":["pl","pm"]},"application/x-pilot":{"source":"nginx","extensions":["prc","pdb"]},"application/x-pkcs12":{"source":"apache","compressible":false,"extensions":["p12","pfx"]},"application/x-pkcs7-certificates":{"source":"apache","extensions":["p7b","spc"]},"application/x-pkcs7-certreqresp":{"source":"apache","extensions":["p7r"]},"application/x-pki-message":{"source":"iana"},"application/x-rar-compressed":{"source":"apache","compressible":false,"extensions":["rar"]},"application/x-redhat-package-manager":{"source":"nginx","extensions":["rpm"]},"application/x-research-info-systems":{"source":"apache","extensions":["ris"]},"application/x-sea":{"source":"nginx","extensions":["sea"]},"application/x-sh":{"source":"apache","compressible":true,"extensions":["sh"]},"application/x-shar":{"source":"apache","extensions":["shar"]},"application/x-shockwave-flash":{"source":"apache","compressible":false,"extensions":["swf"]},"application/x-silverlight-app":{"source":"apache","extensions":["xap"]},"application/x-sql":{"source":"apache","extensions":["sql"]},"application/x-stuffit":{"source":"apache","compressible":false,"extensions":["sit"]},"application/x-stuffitx":{"source":"apache","extensions":["sitx"]},"application/x-subrip":{"source":"apache","extensions":["srt"]},"application/x-sv4cpio":{"source":"apache","extensions":["sv4cpio"]},"application/x-sv4crc":{"source":"apache","extensions":["sv4crc"]},"application/x-t3vm-image":{"source":"apache","extensions":["t3"]},"application/x-tads":{"source":"apache","extensions":["gam"]},"application/x-tar":{"source":"apache","compressible":true,"extensions":["tar"]},"application/x-tcl":{"source":"apache","extensions":["tcl","tk"]},"application/x-tex":{"source":"apache","extensions":["tex"]},"application/x-tex-tfm":{"source":"apache","extensions":["tfm"]},"application/x-texinfo":{"source":"apache","extensions":["texinfo","texi"]},"application/x-tgif":{"source":"apache","extensions":["obj"]},"application/x-ustar":{"source":"apache","extensions":["ustar"]},"application/x-virtualbox-hdd":{"compressible":true,"extensions":["hdd"]},"application/x-virtualbox-ova":{"compressible":true,"extensions":["ova"]},"application/x-virtualbox-ovf":{"compressible":true,"extensions":["ovf"]},"application/x-virtualbox-vbox":{"compressible":true,"extensions":["vbox"]},"application/x-virtualbox-vbox-extpack":{"compressible":false,"extensions":["vbox-extpack"]},"application/x-virtualbox-vdi":{"compressible":true,"extensions":["vdi"]},"application/x-virtualbox-vhd":{"compressible":true,"extensions":["vhd"]},"application/x-virtualbox-vmdk":{"compressible":true,"extensions":["vmdk"]},"application/x-wais-source":{"source":"apache","extensions":["src"]},"application/x-web-app-manifest+json":{"compressible":true,"extensions":["webapp"]},"application/x-www-form-urlencoded":{"source":"iana","compressible":true},"application/x-x509-ca-cert":{"source":"iana","extensions":["der","crt","pem"]},"application/x-x509-ca-ra-cert":{"source":"iana"},"application/x-x509-next-ca-cert":{"source":"iana"},"application/x-xfig":{"source":"apache","extensions":["fig"]},"application/x-xliff+xml":{"source":"apache","compressible":true,"extensions":["xlf"]},"application/x-xpinstall":{"source":"apache","compressible":false,"extensions":["xpi"]},"application/x-xz":{"source":"apache","extensions":["xz"]},"application/x-zmachine":{"source":"apache","extensions":["z1","z2","z3","z4","z5","z6","z7","z8"]},"application/x400-bp":{"source":"iana"},"application/xacml+xml":{"source":"iana","compressible":true},"application/xaml+xml":{"source":"apache","compressible":true,"extensions":["xaml"]},"application/xcap-att+xml":{"source":"iana","compressible":true,"extensions":["xav"]},"application/xcap-caps+xml":{"source":"iana","compressible":true,"extensions":["xca"]},"application/xcap-diff+xml":{"source":"iana","compressible":true,"extensions":["xdf"]},"application/xcap-el+xml":{"source":"iana","compressible":true,"extensions":["xel"]},"application/xcap-error+xml":{"source":"iana","compressible":true},"application/xcap-ns+xml":{"source":"iana","compressible":true,"extensions":["xns"]},"application/xcon-conference-info+xml":{"source":"iana","compressible":true},"application/xcon-conference-info-diff+xml":{"source":"iana","compressible":true},"application/xenc+xml":{"source":"iana","compressible":true,"extensions":["xenc"]},"application/xhtml+xml":{"source":"iana","compressible":true,"extensions":["xhtml","xht"]},"application/xhtml-voice+xml":{"source":"apache","compressible":true},"application/xliff+xml":{"source":"iana","compressible":true,"extensions":["xlf"]},"application/xml":{"source":"iana","compressible":true,"extensions":["xml","xsl","xsd","rng"]},"application/xml-dtd":{"source":"iana","compressible":true,"extensions":["dtd"]},"application/xml-external-parsed-entity":{"source":"iana"},"application/xml-patch+xml":{"source":"iana","compressible":true},"application/xmpp+xml":{"source":"iana","compressible":true},"application/xop+xml":{"source":"iana","compressible":true,"extensions":["xop"]},"application/xproc+xml":{"source":"apache","compressible":true,"extensions":["xpl"]},"application/xslt+xml":{"source":"iana","compressible":true,"extensions":["xsl","xslt"]},"application/xspf+xml":{"source":"apache","compressible":true,"extensions":["xspf"]},"application/xv+xml":{"source":"iana","compressible":true,"extensions":["mxml","xhvml","xvml","xvm"]},"application/yang":{"source":"iana","extensions":["yang"]},"application/yang-data+json":{"source":"iana","compressible":true},"application/yang-data+xml":{"source":"iana","compressible":true},"application/yang-patch+json":{"source":"iana","compressible":true},"application/yang-patch+xml":{"source":"iana","compressible":true},"application/yin+xml":{"source":"iana","compressible":true,"extensions":["yin"]},"application/zip":{"source":"iana","compressible":false,"extensions":["zip"]},"application/zlib":{"source":"iana"},"application/zstd":{"source":"iana"},"audio/1d-interleaved-parityfec":{"source":"iana"},"audio/32kadpcm":{"source":"iana"},"audio/3gpp":{"source":"iana","compressible":false,"extensions":["3gpp"]},"audio/3gpp2":{"source":"iana"},"audio/aac":{"source":"iana"},"audio/ac3":{"source":"iana"},"audio/adpcm":{"source":"apache","extensions":["adp"]},"audio/amr":{"source":"iana","extensions":["amr"]},"audio/amr-wb":{"source":"iana"},"audio/amr-wb+":{"source":"iana"},"audio/aptx":{"source":"iana"},"audio/asc":{"source":"iana"},"audio/atrac-advanced-lossless":{"source":"iana"},"audio/atrac-x":{"source":"iana"},"audio/atrac3":{"source":"iana"},"audio/basic":{"source":"iana","compressible":false,"extensions":["au","snd"]},"audio/bv16":{"source":"iana"},"audio/bv32":{"source":"iana"},"audio/clearmode":{"source":"iana"},"audio/cn":{"source":"iana"},"audio/dat12":{"source":"iana"},"audio/dls":{"source":"iana"},"audio/dsr-es201108":{"source":"iana"},"audio/dsr-es202050":{"source":"iana"},"audio/dsr-es202211":{"source":"iana"},"audio/dsr-es202212":{"source":"iana"},"audio/dv":{"source":"iana"},"audio/dvi4":{"source":"iana"},"audio/eac3":{"source":"iana"},"audio/encaprtp":{"source":"iana"},"audio/evrc":{"source":"iana"},"audio/evrc-qcp":{"source":"iana"},"audio/evrc0":{"source":"iana"},"audio/evrc1":{"source":"iana"},"audio/evrcb":{"source":"iana"},"audio/evrcb0":{"source":"iana"},"audio/evrcb1":{"source":"iana"},"audio/evrcnw":{"source":"iana"},"audio/evrcnw0":{"source":"iana"},"audio/evrcnw1":{"source":"iana"},"audio/evrcwb":{"source":"iana"},"audio/evrcwb0":{"source":"iana"},"audio/evrcwb1":{"source":"iana"},"audio/evs":{"source":"iana"},"audio/flexfec":{"source":"iana"},"audio/fwdred":{"source":"iana"},"audio/g711-0":{"source":"iana"},"audio/g719":{"source":"iana"},"audio/g722":{"source":"iana"},"audio/g7221":{"source":"iana"},"audio/g723":{"source":"iana"},"audio/g726-16":{"source":"iana"},"audio/g726-24":{"source":"iana"},"audio/g726-32":{"source":"iana"},"audio/g726-40":{"source":"iana"},"audio/g728":{"source":"iana"},"audio/g729":{"source":"iana"},"audio/g7291":{"source":"iana"},"audio/g729d":{"source":"iana"},"audio/g729e":{"source":"iana"},"audio/gsm":{"source":"iana"},"audio/gsm-efr":{"source":"iana"},"audio/gsm-hr-08":{"source":"iana"},"audio/ilbc":{"source":"iana"},"audio/ip-mr_v2.5":{"source":"iana"},"audio/isac":{"source":"apache"},"audio/l16":{"source":"iana"},"audio/l20":{"source":"iana"},"audio/l24":{"source":"iana","compressible":false},"audio/l8":{"source":"iana"},"audio/lpc":{"source":"iana"},"audio/melp":{"source":"iana"},"audio/melp1200":{"source":"iana"},"audio/melp2400":{"source":"iana"},"audio/melp600":{"source":"iana"},"audio/mhas":{"source":"iana"},"audio/midi":{"source":"apache","extensions":["mid","midi","kar","rmi"]},"audio/mobile-xmf":{"source":"iana","extensions":["mxmf"]},"audio/mp3":{"compressible":false,"extensions":["mp3"]},"audio/mp4":{"source":"iana","compressible":false,"extensions":["m4a","mp4a"]},"audio/mp4a-latm":{"source":"iana"},"audio/mpa":{"source":"iana"},"audio/mpa-robust":{"source":"iana"},"audio/mpeg":{"source":"iana","compressible":false,"extensions":["mpga","mp2","mp2a","mp3","m2a","m3a"]},"audio/mpeg4-generic":{"source":"iana"},"audio/musepack":{"source":"apache"},"audio/ogg":{"source":"iana","compressible":false,"extensions":["oga","ogg","spx","opus"]},"audio/opus":{"source":"iana"},"audio/parityfec":{"source":"iana"},"audio/pcma":{"source":"iana"},"audio/pcma-wb":{"source":"iana"},"audio/pcmu":{"source":"iana"},"audio/pcmu-wb":{"source":"iana"},"audio/prs.sid":{"source":"iana"},"audio/qcelp":{"source":"iana"},"audio/raptorfec":{"source":"iana"},"audio/red":{"source":"iana"},"audio/rtp-enc-aescm128":{"source":"iana"},"audio/rtp-midi":{"source":"iana"},"audio/rtploopback":{"source":"iana"},"audio/rtx":{"source":"iana"},"audio/s3m":{"source":"apache","extensions":["s3m"]},"audio/scip":{"source":"iana"},"audio/silk":{"source":"apache","extensions":["sil"]},"audio/smv":{"source":"iana"},"audio/smv-qcp":{"source":"iana"},"audio/smv0":{"source":"iana"},"audio/sofa":{"source":"iana"},"audio/sp-midi":{"source":"iana"},"audio/speex":{"source":"iana"},"audio/t140c":{"source":"iana"},"audio/t38":{"source":"iana"},"audio/telephone-event":{"source":"iana"},"audio/tetra_acelp":{"source":"iana"},"audio/tetra_acelp_bb":{"source":"iana"},"audio/tone":{"source":"iana"},"audio/tsvcis":{"source":"iana"},"audio/uemclip":{"source":"iana"},"audio/ulpfec":{"source":"iana"},"audio/usac":{"source":"iana"},"audio/vdvi":{"source":"iana"},"audio/vmr-wb":{"source":"iana"},"audio/vnd.3gpp.iufp":{"source":"iana"},"audio/vnd.4sb":{"source":"iana"},"audio/vnd.audiokoz":{"source":"iana"},"audio/vnd.celp":{"source":"iana"},"audio/vnd.cisco.nse":{"source":"iana"},"audio/vnd.cmles.radio-events":{"source":"iana"},"audio/vnd.cns.anp1":{"source":"iana"},"audio/vnd.cns.inf1":{"source":"iana"},"audio/vnd.dece.audio":{"source":"iana","extensions":["uva","uvva"]},"audio/vnd.digital-winds":{"source":"iana","extensions":["eol"]},"audio/vnd.dlna.adts":{"source":"iana"},"audio/vnd.dolby.heaac.1":{"source":"iana"},"audio/vnd.dolby.heaac.2":{"source":"iana"},"audio/vnd.dolby.mlp":{"source":"iana"},"audio/vnd.dolby.mps":{"source":"iana"},"audio/vnd.dolby.pl2":{"source":"iana"},"audio/vnd.dolby.pl2x":{"source":"iana"},"audio/vnd.dolby.pl2z":{"source":"iana"},"audio/vnd.dolby.pulse.1":{"source":"iana"},"audio/vnd.dra":{"source":"iana","extensions":["dra"]},"audio/vnd.dts":{"source":"iana","extensions":["dts"]},"audio/vnd.dts.hd":{"source":"iana","extensions":["dtshd"]},"audio/vnd.dts.uhd":{"source":"iana"},"audio/vnd.dvb.file":{"source":"iana"},"audio/vnd.everad.plj":{"source":"iana"},"audio/vnd.hns.audio":{"source":"iana"},"audio/vnd.lucent.voice":{"source":"iana","extensions":["lvp"]},"audio/vnd.ms-playready.media.pya":{"source":"iana","extensions":["pya"]},"audio/vnd.nokia.mobile-xmf":{"source":"iana"},"audio/vnd.nortel.vbk":{"source":"iana"},"audio/vnd.nuera.ecelp4800":{"source":"iana","extensions":["ecelp4800"]},"audio/vnd.nuera.ecelp7470":{"source":"iana","extensions":["ecelp7470"]},"audio/vnd.nuera.ecelp9600":{"source":"iana","extensions":["ecelp9600"]},"audio/vnd.octel.sbc":{"source":"iana"},"audio/vnd.presonus.multitrack":{"source":"iana"},"audio/vnd.qcelp":{"source":"iana"},"audio/vnd.rhetorex.32kadpcm":{"source":"iana"},"audio/vnd.rip":{"source":"iana","extensions":["rip"]},"audio/vnd.rn-realaudio":{"compressible":false},"audio/vnd.sealedmedia.softseal.mpeg":{"source":"iana"},"audio/vnd.vmx.cvsd":{"source":"iana"},"audio/vnd.wave":{"compressible":false},"audio/vorbis":{"source":"iana","compressible":false},"audio/vorbis-config":{"source":"iana"},"audio/wav":{"compressible":false,"extensions":["wav"]},"audio/wave":{"compressible":false,"extensions":["wav"]},"audio/webm":{"source":"apache","compressible":false,"extensions":["weba"]},"audio/x-aac":{"source":"apache","compressible":false,"extensions":["aac"]},"audio/x-aiff":{"source":"apache","extensions":["aif","aiff","aifc"]},"audio/x-caf":{"source":"apache","compressible":false,"extensions":["caf"]},"audio/x-flac":{"source":"apache","extensions":["flac"]},"audio/x-m4a":{"source":"nginx","extensions":["m4a"]},"audio/x-matroska":{"source":"apache","extensions":["mka"]},"audio/x-mpegurl":{"source":"apache","extensions":["m3u"]},"audio/x-ms-wax":{"source":"apache","extensions":["wax"]},"audio/x-ms-wma":{"source":"apache","extensions":["wma"]},"audio/x-pn-realaudio":{"source":"apache","extensions":["ram","ra"]},"audio/x-pn-realaudio-plugin":{"source":"apache","extensions":["rmp"]},"audio/x-realaudio":{"source":"nginx","extensions":["ra"]},"audio/x-tta":{"source":"apache"},"audio/x-wav":{"source":"apache","extensions":["wav"]},"audio/xm":{"source":"apache","extensions":["xm"]},"chemical/x-cdx":{"source":"apache","extensions":["cdx"]},"chemical/x-cif":{"source":"apache","extensions":["cif"]},"chemical/x-cmdf":{"source":"apache","extensions":["cmdf"]},"chemical/x-cml":{"source":"apache","extensions":["cml"]},"chemical/x-csml":{"source":"apache","extensions":["csml"]},"chemical/x-pdb":{"source":"apache"},"chemical/x-xyz":{"source":"apache","extensions":["xyz"]},"font/collection":{"source":"iana","extensions":["ttc"]},"font/otf":{"source":"iana","compressible":true,"extensions":["otf"]},"font/sfnt":{"source":"iana"},"font/ttf":{"source":"iana","compressible":true,"extensions":["ttf"]},"font/woff":{"source":"iana","extensions":["woff"]},"font/woff2":{"source":"iana","extensions":["woff2"]},"image/aces":{"source":"iana","extensions":["exr"]},"image/apng":{"compressible":false,"extensions":["apng"]},"image/avci":{"source":"iana","extensions":["avci"]},"image/avcs":{"source":"iana","extensions":["avcs"]},"image/avif":{"source":"iana","compressible":false,"extensions":["avif"]},"image/bmp":{"source":"iana","compressible":true,"extensions":["bmp"]},"image/cgm":{"source":"iana","extensions":["cgm"]},"image/dicom-rle":{"source":"iana","extensions":["drle"]},"image/emf":{"source":"iana","extensions":["emf"]},"image/fits":{"source":"iana","extensions":["fits"]},"image/g3fax":{"source":"iana","extensions":["g3"]},"image/gif":{"source":"iana","compressible":false,"extensions":["gif"]},"image/heic":{"source":"iana","extensions":["heic"]},"image/heic-sequence":{"source":"iana","extensions":["heics"]},"image/heif":{"source":"iana","extensions":["heif"]},"image/heif-sequence":{"source":"iana","extensions":["heifs"]},"image/hej2k":{"source":"iana","extensions":["hej2"]},"image/hsj2":{"source":"iana","extensions":["hsj2"]},"image/ief":{"source":"iana","extensions":["ief"]},"image/jls":{"source":"iana","extensions":["jls"]},"image/jp2":{"source":"iana","compressible":false,"extensions":["jp2","jpg2"]},"image/jpeg":{"source":"iana","compressible":false,"extensions":["jpeg","jpg","jpe"]},"image/jph":{"source":"iana","extensions":["jph"]},"image/jphc":{"source":"iana","extensions":["jhc"]},"image/jpm":{"source":"iana","compressible":false,"extensions":["jpm"]},"image/jpx":{"source":"iana","compressible":false,"extensions":["jpx","jpf"]},"image/jxr":{"source":"iana","extensions":["jxr"]},"image/jxra":{"source":"iana","extensions":["jxra"]},"image/jxrs":{"source":"iana","extensions":["jxrs"]},"image/jxs":{"source":"iana","extensions":["jxs"]},"image/jxsc":{"source":"iana","extensions":["jxsc"]},"image/jxsi":{"source":"iana","extensions":["jxsi"]},"image/jxss":{"source":"iana","extensions":["jxss"]},"image/ktx":{"source":"iana","extensions":["ktx"]},"image/ktx2":{"source":"iana","extensions":["ktx2"]},"image/naplps":{"source":"iana"},"image/pjpeg":{"compressible":false},"image/png":{"source":"iana","compressible":false,"extensions":["png"]},"image/prs.btif":{"source":"iana","extensions":["btif"]},"image/prs.pti":{"source":"iana","extensions":["pti"]},"image/pwg-raster":{"source":"iana"},"image/sgi":{"source":"apache","extensions":["sgi"]},"image/svg+xml":{"source":"iana","compressible":true,"extensions":["svg","svgz"]},"image/t38":{"source":"iana","extensions":["t38"]},"image/tiff":{"source":"iana","compressible":false,"extensions":["tif","tiff"]},"image/tiff-fx":{"source":"iana","extensions":["tfx"]},"image/vnd.adobe.photoshop":{"source":"iana","compressible":true,"extensions":["psd"]},"image/vnd.airzip.accelerator.azv":{"source":"iana","extensions":["azv"]},"image/vnd.cns.inf2":{"source":"iana"},"image/vnd.dece.graphic":{"source":"iana","extensions":["uvi","uvvi","uvg","uvvg"]},"image/vnd.djvu":{"source":"iana","extensions":["djvu","djv"]},"image/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"image/vnd.dwg":{"source":"iana","extensions":["dwg"]},"image/vnd.dxf":{"source":"iana","extensions":["dxf"]},"image/vnd.fastbidsheet":{"source":"iana","extensions":["fbs"]},"image/vnd.fpx":{"source":"iana","extensions":["fpx"]},"image/vnd.fst":{"source":"iana","extensions":["fst"]},"image/vnd.fujixerox.edmics-mmr":{"source":"iana","extensions":["mmr"]},"image/vnd.fujixerox.edmics-rlc":{"source":"iana","extensions":["rlc"]},"image/vnd.globalgraphics.pgb":{"source":"iana"},"image/vnd.microsoft.icon":{"source":"iana","compressible":true,"extensions":["ico"]},"image/vnd.mix":{"source":"iana"},"image/vnd.mozilla.apng":{"source":"iana"},"image/vnd.ms-dds":{"compressible":true,"extensions":["dds"]},"image/vnd.ms-modi":{"source":"iana","extensions":["mdi"]},"image/vnd.ms-photo":{"source":"apache","extensions":["wdp"]},"image/vnd.net-fpx":{"source":"iana","extensions":["npx"]},"image/vnd.pco.b16":{"source":"iana","extensions":["b16"]},"image/vnd.radiance":{"source":"iana"},"image/vnd.sealed.png":{"source":"iana"},"image/vnd.sealedmedia.softseal.gif":{"source":"iana"},"image/vnd.sealedmedia.softseal.jpg":{"source":"iana"},"image/vnd.svf":{"source":"iana"},"image/vnd.tencent.tap":{"source":"iana","extensions":["tap"]},"image/vnd.valve.source.texture":{"source":"iana","extensions":["vtf"]},"image/vnd.wap.wbmp":{"source":"iana","extensions":["wbmp"]},"image/vnd.xiff":{"source":"iana","extensions":["xif"]},"image/vnd.zbrush.pcx":{"source":"iana","extensions":["pcx"]},"image/webp":{"source":"apache","extensions":["webp"]},"image/wmf":{"source":"iana","extensions":["wmf"]},"image/x-3ds":{"source":"apache","extensions":["3ds"]},"image/x-cmu-raster":{"source":"apache","extensions":["ras"]},"image/x-cmx":{"source":"apache","extensions":["cmx"]},"image/x-freehand":{"source":"apache","extensions":["fh","fhc","fh4","fh5","fh7"]},"image/x-icon":{"source":"apache","compressible":true,"extensions":["ico"]},"image/x-jng":{"source":"nginx","extensions":["jng"]},"image/x-mrsid-image":{"source":"apache","extensions":["sid"]},"image/x-ms-bmp":{"source":"nginx","compressible":true,"extensions":["bmp"]},"image/x-pcx":{"source":"apache","extensions":["pcx"]},"image/x-pict":{"source":"apache","extensions":["pic","pct"]},"image/x-portable-anymap":{"source":"apache","extensions":["pnm"]},"image/x-portable-bitmap":{"source":"apache","extensions":["pbm"]},"image/x-portable-graymap":{"source":"apache","extensions":["pgm"]},"image/x-portable-pixmap":{"source":"apache","extensions":["ppm"]},"image/x-rgb":{"source":"apache","extensions":["rgb"]},"image/x-tga":{"source":"apache","extensions":["tga"]},"image/x-xbitmap":{"source":"apache","extensions":["xbm"]},"image/x-xcf":{"compressible":false},"image/x-xpixmap":{"source":"apache","extensions":["xpm"]},"image/x-xwindowdump":{"source":"apache","extensions":["xwd"]},"message/cpim":{"source":"iana"},"message/delivery-status":{"source":"iana"},"message/disposition-notification":{"source":"iana","extensions":["disposition-notification"]},"message/external-body":{"source":"iana"},"message/feedback-report":{"source":"iana"},"message/global":{"source":"iana","extensions":["u8msg"]},"message/global-delivery-status":{"source":"iana","extensions":["u8dsn"]},"message/global-disposition-notification":{"source":"iana","extensions":["u8mdn"]},"message/global-headers":{"source":"iana","extensions":["u8hdr"]},"message/http":{"source":"iana","compressible":false},"message/imdn+xml":{"source":"iana","compressible":true},"message/news":{"source":"iana"},"message/partial":{"source":"iana","compressible":false},"message/rfc822":{"source":"iana","compressible":true,"extensions":["eml","mime"]},"message/s-http":{"source":"iana"},"message/sip":{"source":"iana"},"message/sipfrag":{"source":"iana"},"message/tracking-status":{"source":"iana"},"message/vnd.si.simp":{"source":"iana"},"message/vnd.wfa.wsc":{"source":"iana","extensions":["wsc"]},"model/3mf":{"source":"iana","extensions":["3mf"]},"model/e57":{"source":"iana"},"model/gltf+json":{"source":"iana","compressible":true,"extensions":["gltf"]},"model/gltf-binary":{"source":"iana","compressible":true,"extensions":["glb"]},"model/iges":{"source":"iana","compressible":false,"extensions":["igs","iges"]},"model/mesh":{"source":"iana","compressible":false,"extensions":["msh","mesh","silo"]},"model/mtl":{"source":"iana","extensions":["mtl"]},"model/obj":{"source":"iana","extensions":["obj"]},"model/step":{"source":"iana"},"model/step+xml":{"source":"iana","compressible":true,"extensions":["stpx"]},"model/step+zip":{"source":"iana","compressible":false,"extensions":["stpz"]},"model/step-xml+zip":{"source":"iana","compressible":false,"extensions":["stpxz"]},"model/stl":{"source":"iana","extensions":["stl"]},"model/vnd.collada+xml":{"source":"iana","compressible":true,"extensions":["dae"]},"model/vnd.dwf":{"source":"iana","extensions":["dwf"]},"model/vnd.flatland.3dml":{"source":"iana"},"model/vnd.gdl":{"source":"iana","extensions":["gdl"]},"model/vnd.gs-gdl":{"source":"apache"},"model/vnd.gs.gdl":{"source":"iana"},"model/vnd.gtw":{"source":"iana","extensions":["gtw"]},"model/vnd.moml+xml":{"source":"iana","compressible":true},"model/vnd.mts":{"source":"iana","extensions":["mts"]},"model/vnd.opengex":{"source":"iana","extensions":["ogex"]},"model/vnd.parasolid.transmit.binary":{"source":"iana","extensions":["x_b"]},"model/vnd.parasolid.transmit.text":{"source":"iana","extensions":["x_t"]},"model/vnd.pytha.pyox":{"source":"iana"},"model/vnd.rosette.annotated-data-model":{"source":"iana"},"model/vnd.sap.vds":{"source":"iana","extensions":["vds"]},"model/vnd.usdz+zip":{"source":"iana","compressible":false,"extensions":["usdz"]},"model/vnd.valve.source.compiled-map":{"source":"iana","extensions":["bsp"]},"model/vnd.vtu":{"source":"iana","extensions":["vtu"]},"model/vrml":{"source":"iana","compressible":false,"extensions":["wrl","vrml"]},"model/x3d+binary":{"source":"apache","compressible":false,"extensions":["x3db","x3dbz"]},"model/x3d+fastinfoset":{"source":"iana","extensions":["x3db"]},"model/x3d+vrml":{"source":"apache","compressible":false,"extensions":["x3dv","x3dvz"]},"model/x3d+xml":{"source":"iana","compressible":true,"extensions":["x3d","x3dz"]},"model/x3d-vrml":{"source":"iana","extensions":["x3dv"]},"multipart/alternative":{"source":"iana","compressible":false},"multipart/appledouble":{"source":"iana"},"multipart/byteranges":{"source":"iana"},"multipart/digest":{"source":"iana"},"multipart/encrypted":{"source":"iana","compressible":false},"multipart/form-data":{"source":"iana","compressible":false},"multipart/header-set":{"source":"iana"},"multipart/mixed":{"source":"iana"},"multipart/multilingual":{"source":"iana"},"multipart/parallel":{"source":"iana"},"multipart/related":{"source":"iana","compressible":false},"multipart/report":{"source":"iana"},"multipart/signed":{"source":"iana","compressible":false},"multipart/vnd.bint.med-plus":{"source":"iana"},"multipart/voice-message":{"source":"iana"},"multipart/x-mixed-replace":{"source":"iana"},"text/1d-interleaved-parityfec":{"source":"iana"},"text/cache-manifest":{"source":"iana","compressible":true,"extensions":["appcache","manifest"]},"text/calendar":{"source":"iana","extensions":["ics","ifb"]},"text/calender":{"compressible":true},"text/cmd":{"compressible":true},"text/coffeescript":{"extensions":["coffee","litcoffee"]},"text/cql":{"source":"iana"},"text/cql-expression":{"source":"iana"},"text/cql-identifier":{"source":"iana"},"text/css":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["css"]},"text/csv":{"source":"iana","compressible":true,"extensions":["csv"]},"text/csv-schema":{"source":"iana"},"text/directory":{"source":"iana"},"text/dns":{"source":"iana"},"text/ecmascript":{"source":"iana"},"text/encaprtp":{"source":"iana"},"text/enriched":{"source":"iana"},"text/fhirpath":{"source":"iana"},"text/flexfec":{"source":"iana"},"text/fwdred":{"source":"iana"},"text/gff3":{"source":"iana"},"text/grammar-ref-list":{"source":"iana"},"text/html":{"source":"iana","compressible":true,"extensions":["html","htm","shtml"]},"text/jade":{"extensions":["jade"]},"text/javascript":{"source":"iana","compressible":true},"text/jcr-cnd":{"source":"iana"},"text/jsx":{"compressible":true,"extensions":["jsx"]},"text/less":{"compressible":true,"extensions":["less"]},"text/markdown":{"source":"iana","compressible":true,"extensions":["markdown","md"]},"text/mathml":{"source":"nginx","extensions":["mml"]},"text/mdx":{"compressible":true,"extensions":["mdx"]},"text/mizar":{"source":"iana"},"text/n3":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["n3"]},"text/parameters":{"source":"iana","charset":"UTF-8"},"text/parityfec":{"source":"iana"},"text/plain":{"source":"iana","compressible":true,"extensions":["txt","text","conf","def","list","log","in","ini"]},"text/provenance-notation":{"source":"iana","charset":"UTF-8"},"text/prs.fallenstein.rst":{"source":"iana"},"text/prs.lines.tag":{"source":"iana","extensions":["dsc"]},"text/prs.prop.logic":{"source":"iana"},"text/raptorfec":{"source":"iana"},"text/red":{"source":"iana"},"text/rfc822-headers":{"source":"iana"},"text/richtext":{"source":"iana","compressible":true,"extensions":["rtx"]},"text/rtf":{"source":"iana","compressible":true,"extensions":["rtf"]},"text/rtp-enc-aescm128":{"source":"iana"},"text/rtploopback":{"source":"iana"},"text/rtx":{"source":"iana"},"text/sgml":{"source":"iana","extensions":["sgml","sgm"]},"text/shaclc":{"source":"iana"},"text/shex":{"source":"iana","extensions":["shex"]},"text/slim":{"extensions":["slim","slm"]},"text/spdx":{"source":"iana","extensions":["spdx"]},"text/strings":{"source":"iana"},"text/stylus":{"extensions":["stylus","styl"]},"text/t140":{"source":"iana"},"text/tab-separated-values":{"source":"iana","compressible":true,"extensions":["tsv"]},"text/troff":{"source":"iana","extensions":["t","tr","roff","man","me","ms"]},"text/turtle":{"source":"iana","charset":"UTF-8","extensions":["ttl"]},"text/ulpfec":{"source":"iana"},"text/uri-list":{"source":"iana","compressible":true,"extensions":["uri","uris","urls"]},"text/vcard":{"source":"iana","compressible":true,"extensions":["vcard"]},"text/vnd.a":{"source":"iana"},"text/vnd.abc":{"source":"iana"},"text/vnd.ascii-art":{"source":"iana"},"text/vnd.curl":{"source":"iana","extensions":["curl"]},"text/vnd.curl.dcurl":{"source":"apache","extensions":["dcurl"]},"text/vnd.curl.mcurl":{"source":"apache","extensions":["mcurl"]},"text/vnd.curl.scurl":{"source":"apache","extensions":["scurl"]},"text/vnd.debian.copyright":{"source":"iana","charset":"UTF-8"},"text/vnd.dmclientscript":{"source":"iana"},"text/vnd.dvb.subtitle":{"source":"iana","extensions":["sub"]},"text/vnd.esmertec.theme-descriptor":{"source":"iana","charset":"UTF-8"},"text/vnd.familysearch.gedcom":{"source":"iana","extensions":["ged"]},"text/vnd.ficlab.flt":{"source":"iana"},"text/vnd.fly":{"source":"iana","extensions":["fly"]},"text/vnd.fmi.flexstor":{"source":"iana","extensions":["flx"]},"text/vnd.gml":{"source":"iana"},"text/vnd.graphviz":{"source":"iana","extensions":["gv"]},"text/vnd.hans":{"source":"iana"},"text/vnd.hgl":{"source":"iana"},"text/vnd.in3d.3dml":{"source":"iana","extensions":["3dml"]},"text/vnd.in3d.spot":{"source":"iana","extensions":["spot"]},"text/vnd.iptc.newsml":{"source":"iana"},"text/vnd.iptc.nitf":{"source":"iana"},"text/vnd.latex-z":{"source":"iana"},"text/vnd.motorola.reflex":{"source":"iana"},"text/vnd.ms-mediapackage":{"source":"iana"},"text/vnd.net2phone.commcenter.command":{"source":"iana"},"text/vnd.radisys.msml-basic-layout":{"source":"iana"},"text/vnd.senx.warpscript":{"source":"iana"},"text/vnd.si.uricatalogue":{"source":"iana"},"text/vnd.sosi":{"source":"iana"},"text/vnd.sun.j2me.app-descriptor":{"source":"iana","charset":"UTF-8","extensions":["jad"]},"text/vnd.trolltech.linguist":{"source":"iana","charset":"UTF-8"},"text/vnd.wap.si":{"source":"iana"},"text/vnd.wap.sl":{"source":"iana"},"text/vnd.wap.wml":{"source":"iana","extensions":["wml"]},"text/vnd.wap.wmlscript":{"source":"iana","extensions":["wmls"]},"text/vtt":{"source":"iana","charset":"UTF-8","compressible":true,"extensions":["vtt"]},"text/x-asm":{"source":"apache","extensions":["s","asm"]},"text/x-c":{"source":"apache","extensions":["c","cc","cxx","cpp","h","hh","dic"]},"text/x-component":{"source":"nginx","extensions":["htc"]},"text/x-fortran":{"source":"apache","extensions":["f","for","f77","f90"]},"text/x-gwt-rpc":{"compressible":true},"text/x-handlebars-template":{"extensions":["hbs"]},"text/x-java-source":{"source":"apache","extensions":["java"]},"text/x-jquery-tmpl":{"compressible":true},"text/x-lua":{"extensions":["lua"]},"text/x-markdown":{"compressible":true,"extensions":["mkd"]},"text/x-nfo":{"source":"apache","extensions":["nfo"]},"text/x-opml":{"source":"apache","extensions":["opml"]},"text/x-org":{"compressible":true,"extensions":["org"]},"text/x-pascal":{"source":"apache","extensions":["p","pas"]},"text/x-processing":{"compressible":true,"extensions":["pde"]},"text/x-sass":{"extensions":["sass"]},"text/x-scss":{"extensions":["scss"]},"text/x-setext":{"source":"apache","extensions":["etx"]},"text/x-sfv":{"source":"apache","extensions":["sfv"]},"text/x-suse-ymp":{"compressible":true,"extensions":["ymp"]},"text/x-uuencode":{"source":"apache","extensions":["uu"]},"text/x-vcalendar":{"source":"apache","extensions":["vcs"]},"text/x-vcard":{"source":"apache","extensions":["vcf"]},"text/xml":{"source":"iana","compressible":true,"extensions":["xml"]},"text/xml-external-parsed-entity":{"source":"iana"},"text/yaml":{"compressible":true,"extensions":["yaml","yml"]},"video/1d-interleaved-parityfec":{"source":"iana"},"video/3gpp":{"source":"iana","extensions":["3gp","3gpp"]},"video/3gpp-tt":{"source":"iana"},"video/3gpp2":{"source":"iana","extensions":["3g2"]},"video/av1":{"source":"iana"},"video/bmpeg":{"source":"iana"},"video/bt656":{"source":"iana"},"video/celb":{"source":"iana"},"video/dv":{"source":"iana"},"video/encaprtp":{"source":"iana"},"video/ffv1":{"source":"iana"},"video/flexfec":{"source":"iana"},"video/h261":{"source":"iana","extensions":["h261"]},"video/h263":{"source":"iana","extensions":["h263"]},"video/h263-1998":{"source":"iana"},"video/h263-2000":{"source":"iana"},"video/h264":{"source":"iana","extensions":["h264"]},"video/h264-rcdo":{"source":"iana"},"video/h264-svc":{"source":"iana"},"video/h265":{"source":"iana"},"video/iso.segment":{"source":"iana","extensions":["m4s"]},"video/jpeg":{"source":"iana","extensions":["jpgv"]},"video/jpeg2000":{"source":"iana"},"video/jpm":{"source":"apache","extensions":["jpm","jpgm"]},"video/jxsv":{"source":"iana"},"video/mj2":{"source":"iana","extensions":["mj2","mjp2"]},"video/mp1s":{"source":"iana"},"video/mp2p":{"source":"iana"},"video/mp2t":{"source":"iana","extensions":["ts"]},"video/mp4":{"source":"iana","compressible":false,"extensions":["mp4","mp4v","mpg4"]},"video/mp4v-es":{"source":"iana"},"video/mpeg":{"source":"iana","compressible":false,"extensions":["mpeg","mpg","mpe","m1v","m2v"]},"video/mpeg4-generic":{"source":"iana"},"video/mpv":{"source":"iana"},"video/nv":{"source":"iana"},"video/ogg":{"source":"iana","compressible":false,"extensions":["ogv"]},"video/parityfec":{"source":"iana"},"video/pointer":{"source":"iana"},"video/quicktime":{"source":"iana","compressible":false,"extensions":["qt","mov"]},"video/raptorfec":{"source":"iana"},"video/raw":{"source":"iana"},"video/rtp-enc-aescm128":{"source":"iana"},"video/rtploopback":{"source":"iana"},"video/rtx":{"source":"iana"},"video/scip":{"source":"iana"},"video/smpte291":{"source":"iana"},"video/smpte292m":{"source":"iana"},"video/ulpfec":{"source":"iana"},"video/vc1":{"source":"iana"},"video/vc2":{"source":"iana"},"video/vnd.cctv":{"source":"iana"},"video/vnd.dece.hd":{"source":"iana","extensions":["uvh","uvvh"]},"video/vnd.dece.mobile":{"source":"iana","extensions":["uvm","uvvm"]},"video/vnd.dece.mp4":{"source":"iana"},"video/vnd.dece.pd":{"source":"iana","extensions":["uvp","uvvp"]},"video/vnd.dece.sd":{"source":"iana","extensions":["uvs","uvvs"]},"video/vnd.dece.video":{"source":"iana","extensions":["uvv","uvvv"]},"video/vnd.directv.mpeg":{"source":"iana"},"video/vnd.directv.mpeg-tts":{"source":"iana"},"video/vnd.dlna.mpeg-tts":{"source":"iana"},"video/vnd.dvb.file":{"source":"iana","extensions":["dvb"]},"video/vnd.fvt":{"source":"iana","extensions":["fvt"]},"video/vnd.hns.video":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.1dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-1010":{"source":"iana"},"video/vnd.iptvforum.2dparityfec-2005":{"source":"iana"},"video/vnd.iptvforum.ttsavc":{"source":"iana"},"video/vnd.iptvforum.ttsmpeg2":{"source":"iana"},"video/vnd.motorola.video":{"source":"iana"},"video/vnd.motorola.videop":{"source":"iana"},"video/vnd.mpegurl":{"source":"iana","extensions":["mxu","m4u"]},"video/vnd.ms-playready.media.pyv":{"source":"iana","extensions":["pyv"]},"video/vnd.nokia.interleaved-multimedia":{"source":"iana"},"video/vnd.nokia.mp4vr":{"source":"iana"},"video/vnd.nokia.videovoip":{"source":"iana"},"video/vnd.objectvideo":{"source":"iana"},"video/vnd.radgamettools.bink":{"source":"iana"},"video/vnd.radgamettools.smacker":{"source":"iana"},"video/vnd.sealed.mpeg1":{"source":"iana"},"video/vnd.sealed.mpeg4":{"source":"iana"},"video/vnd.sealed.swf":{"source":"iana"},"video/vnd.sealedmedia.softseal.mov":{"source":"iana"},"video/vnd.uvvu.mp4":{"source":"iana","extensions":["uvu","uvvu"]},"video/vnd.vivo":{"source":"iana","extensions":["viv"]},"video/vnd.youtube.yt":{"source":"iana"},"video/vp8":{"source":"iana"},"video/vp9":{"source":"iana"},"video/webm":{"source":"apache","compressible":false,"extensions":["webm"]},"video/x-f4v":{"source":"apache","extensions":["f4v"]},"video/x-fli":{"source":"apache","extensions":["fli"]},"video/x-flv":{"source":"apache","compressible":false,"extensions":["flv"]},"video/x-m4v":{"source":"apache","extensions":["m4v"]},"video/x-matroska":{"source":"apache","compressible":false,"extensions":["mkv","mk3d","mks"]},"video/x-mng":{"source":"apache","extensions":["mng"]},"video/x-ms-asf":{"source":"apache","extensions":["asf","asx"]},"video/x-ms-vob":{"source":"apache","extensions":["vob"]},"video/x-ms-wm":{"source":"apache","extensions":["wm"]},"video/x-ms-wmv":{"source":"apache","compressible":false,"extensions":["wmv"]},"video/x-ms-wmx":{"source":"apache","extensions":["wmx"]},"video/x-ms-wvx":{"source":"apache","extensions":["wvx"]},"video/x-msvideo":{"source":"apache","extensions":["avi"]},"video/x-sgi-movie":{"source":"apache","extensions":["movie"]},"video/x-smv":{"source":"apache","extensions":["smv"]},"x-conference/x-cooltalk":{"source":"apache","extensions":["ice"]},"x-shader/x-fragment":{"compressible":true},"x-shader/x-vertex":{"compressible":true}}'); /***/ }), /***/ 92472: /***/ ((module) => { "use strict"; module.exports = /*#__PURE__*/JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45,46],"valid"],[[47,47],"disallowed_STD3_valid"],[[48,57],"valid"],[[58,64],"disallowed_STD3_valid"],[[65,65],"mapped",[97]],[[66,66],"mapped",[98]],[[67,67],"mapped",[99]],[[68,68],"mapped",[100]],[[69,69],"mapped",[101]],[[70,70],"mapped",[102]],[[71,71],"mapped",[103]],[[72,72],"mapped",[104]],[[73,73],"mapped",[105]],[[74,74],"mapped",[106]],[[75,75],"mapped",[107]],[[76,76],"mapped",[108]],[[77,77],"mapped",[109]],[[78,78],"mapped",[110]],[[79,79],"mapped",[111]],[[80,80],"mapped",[112]],[[81,81],"mapped",[113]],[[82,82],"mapped",[114]],[[83,83],"mapped",[115]],[[84,84],"mapped",[116]],[[85,85],"mapped",[117]],[[86,86],"mapped",[118]],[[87,87],"mapped",[119]],[[88,88],"mapped",[120]],[[89,89],"mapped",[121]],[[90,90],"mapped",[122]],[[91,96],"disallowed_STD3_valid"],[[97,122],"valid"],[[123,127],"disallowed_STD3_valid"],[[128,159],"disallowed"],[[160,160],"disallowed_STD3_mapped",[32]],[[161,167],"valid",[],"NV8"],[[168,168],"disallowed_STD3_mapped",[32,776]],[[169,169],"valid",[],"NV8"],[[170,170],"mapped",[97]],[[171,172],"valid",[],"NV8"],[[173,173],"ignored"],[[174,174],"valid",[],"NV8"],[[175,175],"disallowed_STD3_mapped",[32,772]],[[176,177],"valid",[],"NV8"],[[178,178],"mapped",[50]],[[179,179],"mapped",[51]],[[180,180],"disallowed_STD3_mapped",[32,769]],[[181,181],"mapped",[956]],[[182,182],"valid",[],"NV8"],[[183,183],"valid"],[[184,184],"disallowed_STD3_mapped",[32,807]],[[185,185],"mapped",[49]],[[186,186],"mapped",[111]],[[187,187],"valid",[],"NV8"],[[188,188],"mapped",[49,8260,52]],[[189,189],"mapped",[49,8260,50]],[[190,190],"mapped",[51,8260,52]],[[191,191],"valid",[],"NV8"],[[192,192],"mapped",[224]],[[193,193],"mapped",[225]],[[194,194],"mapped",[226]],[[195,195],"mapped",[227]],[[196,196],"mapped",[228]],[[197,197],"mapped",[229]],[[198,198],"mapped",[230]],[[199,199],"mapped",[231]],[[200,200],"mapped",[232]],[[201,201],"mapped",[233]],[[202,202],"mapped",[234]],[[203,203],"mapped",[235]],[[204,204],"mapped",[236]],[[205,205],"mapped",[237]],[[206,206],"mapped",[238]],[[207,207],"mapped",[239]],[[208,208],"mapped",[240]],[[209,209],"mapped",[241]],[[210,210],"mapped",[242]],[[211,211],"mapped",[243]],[[212,212],"mapped",[244]],[[213,213],"mapped",[245]],[[214,214],"mapped",[246]],[[215,215],"valid",[],"NV8"],[[216,216],"mapped",[248]],[[217,217],"mapped",[249]],[[218,218],"mapped",[250]],[[219,219],"mapped",[251]],[[220,220],"mapped",[252]],[[221,221],"mapped",[253]],[[222,222],"mapped",[254]],[[223,223],"deviation",[115,115]],[[224,246],"valid"],[[247,247],"valid",[],"NV8"],[[248,255],"valid"],[[256,256],"mapped",[257]],[[257,257],"valid"],[[258,258],"mapped",[259]],[[259,259],"valid"],[[260,260],"mapped",[261]],[[261,261],"valid"],[[262,262],"mapped",[263]],[[263,263],"valid"],[[264,264],"mapped",[265]],[[265,265],"valid"],[[266,266],"mapped",[267]],[[267,267],"valid"],[[268,268],"mapped",[269]],[[269,269],"valid"],[[270,270],"mapped",[271]],[[271,271],"valid"],[[272,272],"mapped",[273]],[[273,273],"valid"],[[274,274],"mapped",[275]],[[275,275],"valid"],[[276,276],"mapped",[277]],[[277,277],"valid"],[[278,278],"mapped",[279]],[[279,279],"valid"],[[280,280],"mapped",[281]],[[281,281],"valid"],[[282,282],"mapped",[283]],[[283,283],"valid"],[[284,284],"mapped",[285]],[[285,285],"valid"],[[286,286],"mapped",[287]],[[287,287],"valid"],[[288,288],"mapped",[289]],[[289,289],"valid"],[[290,290],"mapped",[291]],[[291,291],"valid"],[[292,292],"mapped",[293]],[[293,293],"valid"],[[294,294],"mapped",[295]],[[295,295],"valid"],[[296,296],"mapped",[297]],[[297,297],"valid"],[[298,298],"mapped",[299]],[[299,299],"valid"],[[300,300],"mapped",[301]],[[301,301],"valid"],[[302,302],"mapped",[303]],[[303,303],"valid"],[[304,304],"mapped",[105,775]],[[305,305],"valid"],[[306,307],"mapped",[105,106]],[[308,308],"mapped",[309]],[[309,309],"valid"],[[310,310],"mapped",[311]],[[311,312],"valid"],[[313,313],"mapped",[314]],[[314,314],"valid"],[[315,315],"mapped",[316]],[[316,316],"valid"],[[317,317],"mapped",[318]],[[318,318],"valid"],[[319,320],"mapped",[108,183]],[[321,321],"mapped",[322]],[[322,322],"valid"],[[323,323],"mapped",[324]],[[324,324],"valid"],[[325,325],"mapped",[326]],[[326,326],"valid"],[[327,327],"mapped",[328]],[[328,328],"valid"],[[329,329],"mapped",[700,110]],[[330,330],"mapped",[331]],[[331,331],"valid"],[[332,332],"mapped",[333]],[[333,333],"valid"],[[334,334],"mapped",[335]],[[335,335],"valid"],[[336,336],"mapped",[337]],[[337,337],"valid"],[[338,338],"mapped",[339]],[[339,339],"valid"],[[340,340],"mapped",[341]],[[341,341],"valid"],[[342,342],"mapped",[343]],[[343,343],"valid"],[[344,344],"mapped",[345]],[[345,345],"valid"],[[346,346],"mapped",[347]],[[347,347],"valid"],[[348,348],"mapped",[349]],[[349,349],"valid"],[[350,350],"mapped",[351]],[[351,351],"valid"],[[352,352],"mapped",[353]],[[353,353],"valid"],[[354,354],"mapped",[355]],[[355,355],"valid"],[[356,356],"mapped",[357]],[[357,357],"valid"],[[358,358],"mapped",[359]],[[359,359],"valid"],[[360,360],"mapped",[361]],[[361,361],"valid"],[[362,362],"mapped",[363]],[[363,363],"valid"],[[364,364],"mapped",[365]],[[365,365],"valid"],[[366,366],"mapped",[367]],[[367,367],"valid"],[[368,368],"mapped",[369]],[[369,369],"valid"],[[370,370],"mapped",[371]],[[371,371],"valid"],[[372,372],"mapped",[373]],[[373,373],"valid"],[[374,374],"mapped",[375]],[[375,375],"valid"],[[376,376],"mapped",[255]],[[377,377],"mapped",[378]],[[378,378],"valid"],[[379,379],"mapped",[380]],[[380,380],"valid"],[[381,381],"mapped",[382]],[[382,382],"valid"],[[383,383],"mapped",[115]],[[384,384],"valid"],[[385,385],"mapped",[595]],[[386,386],"mapped",[387]],[[387,387],"valid"],[[388,388],"mapped",[389]],[[389,389],"valid"],[[390,390],"mapped",[596]],[[391,391],"mapped",[392]],[[392,392],"valid"],[[393,393],"mapped",[598]],[[394,394],"mapped",[599]],[[395,395],"mapped",[396]],[[396,397],"valid"],[[398,398],"mapped",[477]],[[399,399],"mapped",[601]],[[400,400],"mapped",[603]],[[401,401],"mapped",[402]],[[402,402],"valid"],[[403,403],"mapped",[608]],[[404,404],"mapped",[611]],[[405,405],"valid"],[[406,406],"mapped",[617]],[[407,407],"mapped",[616]],[[408,408],"mapped",[409]],[[409,411],"valid"],[[412,412],"mapped",[623]],[[413,413],"mapped",[626]],[[414,414],"valid"],[[415,415],"mapped",[629]],[[416,416],"mapped",[417]],[[417,417],"valid"],[[418,418],"mapped",[419]],[[419,419],"valid"],[[420,420],"mapped",[421]],[[421,421],"valid"],[[422,422],"mapped",[640]],[[423,423],"mapped",[424]],[[424,424],"valid"],[[425,425],"mapped",[643]],[[426,427],"valid"],[[428,428],"mapped",[429]],[[429,429],"valid"],[[430,430],"mapped",[648]],[[431,431],"mapped",[432]],[[432,432],"valid"],[[433,433],"mapped",[650]],[[434,434],"mapped",[651]],[[435,435],"mapped",[436]],[[436,436],"valid"],[[437,437],"mapped",[438]],[[438,438],"valid"],[[439,439],"mapped",[658]],[[440,440],"mapped",[441]],[[441,443],"valid"],[[444,444],"mapped",[445]],[[445,451],"valid"],[[452,454],"mapped",[100,382]],[[455,457],"mapped",[108,106]],[[458,460],"mapped",[110,106]],[[461,461],"mapped",[462]],[[462,462],"valid"],[[463,463],"mapped",[464]],[[464,464],"valid"],[[465,465],"mapped",[466]],[[466,466],"valid"],[[467,467],"mapped",[468]],[[468,468],"valid"],[[469,469],"mapped",[470]],[[470,470],"valid"],[[471,471],"mapped",[472]],[[472,472],"valid"],[[473,473],"mapped",[474]],[[474,474],"valid"],[[475,475],"mapped",[476]],[[476,477],"valid"],[[478,478],"mapped",[479]],[[479,479],"valid"],[[480,480],"mapped",[481]],[[481,481],"valid"],[[482,482],"mapped",[483]],[[483,483],"valid"],[[484,484],"mapped",[485]],[[485,485],"valid"],[[486,486],"mapped",[487]],[[487,487],"valid"],[[488,488],"mapped",[489]],[[489,489],"valid"],[[490,490],"mapped",[491]],[[491,491],"valid"],[[492,492],"mapped",[493]],[[493,493],"valid"],[[494,494],"mapped",[495]],[[495,496],"valid"],[[497,499],"mapped",[100,122]],[[500,500],"mapped",[501]],[[501,501],"valid"],[[502,502],"mapped",[405]],[[503,503],"mapped",[447]],[[504,504],"mapped",[505]],[[505,505],"valid"],[[506,506],"mapped",[507]],[[507,507],"valid"],[[508,508],"mapped",[509]],[[509,509],"valid"],[[510,510],"mapped",[511]],[[511,511],"valid"],[[512,512],"mapped",[513]],[[513,513],"valid"],[[514,514],"mapped",[515]],[[515,515],"valid"],[[516,516],"mapped",[517]],[[517,517],"valid"],[[518,518],"mapped",[519]],[[519,519],"valid"],[[520,520],"mapped",[521]],[[521,521],"valid"],[[522,522],"mapped",[523]],[[523,523],"valid"],[[524,524],"mapped",[525]],[[525,525],"valid"],[[526,526],"mapped",[527]],[[527,527],"valid"],[[528,528],"mapped",[529]],[[529,529],"valid"],[[530,530],"mapped",[531]],[[531,531],"valid"],[[532,532],"mapped",[533]],[[533,533],"valid"],[[534,534],"mapped",[535]],[[535,535],"valid"],[[536,536],"mapped",[537]],[[537,537],"valid"],[[538,538],"mapped",[539]],[[539,539],"valid"],[[540,540],"mapped",[541]],[[541,541],"valid"],[[542,542],"mapped",[543]],[[543,543],"valid"],[[544,544],"mapped",[414]],[[545,545],"valid"],[[546,546],"mapped",[547]],[[547,547],"valid"],[[548,548],"mapped",[549]],[[549,549],"valid"],[[550,550],"mapped",[551]],[[551,551],"valid"],[[552,552],"mapped",[553]],[[553,553],"valid"],[[554,554],"mapped",[555]],[[555,555],"valid"],[[556,556],"mapped",[557]],[[557,557],"valid"],[[558,558],"mapped",[559]],[[559,559],"valid"],[[560,560],"mapped",[561]],[[561,561],"valid"],[[562,562],"mapped",[563]],[[563,563],"valid"],[[564,566],"valid"],[[567,569],"valid"],[[570,570],"mapped",[11365]],[[571,571],"mapped",[572]],[[572,572],"valid"],[[573,573],"mapped",[410]],[[574,574],"mapped",[11366]],[[575,576],"valid"],[[577,577],"mapped",[578]],[[578,578],"valid"],[[579,579],"mapped",[384]],[[580,580],"mapped",[649]],[[581,581],"mapped",[652]],[[582,582],"mapped",[583]],[[583,583],"valid"],[[584,584],"mapped",[585]],[[585,585],"valid"],[[586,586],"mapped",[587]],[[587,587],"valid"],[[588,588],"mapped",[589]],[[589,589],"valid"],[[590,590],"mapped",[591]],[[591,591],"valid"],[[592,680],"valid"],[[681,685],"valid"],[[686,687],"valid"],[[688,688],"mapped",[104]],[[689,689],"mapped",[614]],[[690,690],"mapped",[106]],[[691,691],"mapped",[114]],[[692,692],"mapped",[633]],[[693,693],"mapped",[635]],[[694,694],"mapped",[641]],[[695,695],"mapped",[119]],[[696,696],"mapped",[121]],[[697,705],"valid"],[[706,709],"valid",[],"NV8"],[[710,721],"valid"],[[722,727],"valid",[],"NV8"],[[728,728],"disallowed_STD3_mapped",[32,774]],[[729,729],"disallowed_STD3_mapped",[32,775]],[[730,730],"disallowed_STD3_mapped",[32,778]],[[731,731],"disallowed_STD3_mapped",[32,808]],[[732,732],"disallowed_STD3_mapped",[32,771]],[[733,733],"disallowed_STD3_mapped",[32,779]],[[734,734],"valid",[],"NV8"],[[735,735],"valid",[],"NV8"],[[736,736],"mapped",[611]],[[737,737],"mapped",[108]],[[738,738],"mapped",[115]],[[739,739],"mapped",[120]],[[740,740],"mapped",[661]],[[741,745],"valid",[],"NV8"],[[746,747],"valid",[],"NV8"],[[748,748],"valid"],[[749,749],"valid",[],"NV8"],[[750,750],"valid"],[[751,767],"valid",[],"NV8"],[[768,831],"valid"],[[832,832],"mapped",[768]],[[833,833],"mapped",[769]],[[834,834],"valid"],[[835,835],"mapped",[787]],[[836,836],"mapped",[776,769]],[[837,837],"mapped",[953]],[[838,846],"valid"],[[847,847],"ignored"],[[848,855],"valid"],[[856,860],"valid"],[[861,863],"valid"],[[864,865],"valid"],[[866,866],"valid"],[[867,879],"valid"],[[880,880],"mapped",[881]],[[881,881],"valid"],[[882,882],"mapped",[883]],[[883,883],"valid"],[[884,884],"mapped",[697]],[[885,885],"valid"],[[886,886],"mapped",[887]],[[887,887],"valid"],[[888,889],"disallowed"],[[890,890],"disallowed_STD3_mapped",[32,953]],[[891,893],"valid"],[[894,894],"disallowed_STD3_mapped",[59]],[[895,895],"mapped",[1011]],[[896,899],"disallowed"],[[900,900],"disallowed_STD3_mapped",[32,769]],[[901,901],"disallowed_STD3_mapped",[32,776,769]],[[902,902],"mapped",[940]],[[903,903],"mapped",[183]],[[904,904],"mapped",[941]],[[905,905],"mapped",[942]],[[906,906],"mapped",[943]],[[907,907],"disallowed"],[[908,908],"mapped",[972]],[[909,909],"disallowed"],[[910,910],"mapped",[973]],[[911,911],"mapped",[974]],[[912,912],"valid"],[[913,913],"mapped",[945]],[[914,914],"mapped",[946]],[[915,915],"mapped",[947]],[[916,916],"mapped",[948]],[[917,917],"mapped",[949]],[[918,918],"mapped",[950]],[[919,919],"mapped",[951]],[[920,920],"mapped",[952]],[[921,921],"mapped",[953]],[[922,922],"mapped",[954]],[[923,923],"mapped",[955]],[[924,924],"mapped",[956]],[[925,925],"mapped",[957]],[[926,926],"mapped",[958]],[[927,927],"mapped",[959]],[[928,928],"mapped",[960]],[[929,929],"mapped",[961]],[[930,930],"disallowed"],[[931,931],"mapped",[963]],[[932,932],"mapped",[964]],[[933,933],"mapped",[965]],[[934,934],"mapped",[966]],[[935,935],"mapped",[967]],[[936,936],"mapped",[968]],[[937,937],"mapped",[969]],[[938,938],"mapped",[970]],[[939,939],"mapped",[971]],[[940,961],"valid"],[[962,962],"deviation",[963]],[[963,974],"valid"],[[975,975],"mapped",[983]],[[976,976],"mapped",[946]],[[977,977],"mapped",[952]],[[978,978],"mapped",[965]],[[979,979],"mapped",[973]],[[980,980],"mapped",[971]],[[981,981],"mapped",[966]],[[982,982],"mapped",[960]],[[983,983],"valid"],[[984,984],"mapped",[985]],[[985,985],"valid"],[[986,986],"mapped",[987]],[[987,987],"valid"],[[988,988],"mapped",[989]],[[989,989],"valid"],[[990,990],"mapped",[991]],[[991,991],"valid"],[[992,992],"mapped",[993]],[[993,993],"valid"],[[994,994],"mapped",[995]],[[995,995],"valid"],[[996,996],"mapped",[997]],[[997,997],"valid"],[[998,998],"mapped",[999]],[[999,999],"valid"],[[1000,1000],"mapped",[1001]],[[1001,1001],"valid"],[[1002,1002],"mapped",[1003]],[[1003,1003],"valid"],[[1004,1004],"mapped",[1005]],[[1005,1005],"valid"],[[1006,1006],"mapped",[1007]],[[1007,1007],"valid"],[[1008,1008],"mapped",[954]],[[1009,1009],"mapped",[961]],[[1010,1010],"mapped",[963]],[[1011,1011],"valid"],[[1012,1012],"mapped",[952]],[[1013,1013],"mapped",[949]],[[1014,1014],"valid",[],"NV8"],[[1015,1015],"mapped",[1016]],[[1016,1016],"valid"],[[1017,1017],"mapped",[963]],[[1018,1018],"mapped",[1019]],[[1019,1019],"valid"],[[1020,1020],"valid"],[[1021,1021],"mapped",[891]],[[1022,1022],"mapped",[892]],[[1023,1023],"mapped",[893]],[[1024,1024],"mapped",[1104]],[[1025,1025],"mapped",[1105]],[[1026,1026],"mapped",[1106]],[[1027,1027],"mapped",[1107]],[[1028,1028],"mapped",[1108]],[[1029,1029],"mapped",[1109]],[[1030,1030],"mapped",[1110]],[[1031,1031],"mapped",[1111]],[[1032,1032],"mapped",[1112]],[[1033,1033],"mapped",[1113]],[[1034,1034],"mapped",[1114]],[[1035,1035],"mapped",[1115]],[[1036,1036],"mapped",[1116]],[[1037,1037],"mapped",[1117]],[[1038,1038],"mapped",[1118]],[[1039,1039],"mapped",[1119]],[[1040,1040],"mapped",[1072]],[[1041,1041],"mapped",[1073]],[[1042,1042],"mapped",[1074]],[[1043,1043],"mapped",[1075]],[[1044,1044],"mapped",[1076]],[[1045,1045],"mapped",[1077]],[[1046,1046],"mapped",[1078]],[[1047,1047],"mapped",[1079]],[[1048,1048],"mapped",[1080]],[[1049,1049],"mapped",[1081]],[[1050,1050],"mapped",[1082]],[[1051,1051],"mapped",[1083]],[[1052,1052],"mapped",[1084]],[[1053,1053],"mapped",[1085]],[[1054,1054],"mapped",[1086]],[[1055,1055],"mapped",[1087]],[[1056,1056],"mapped",[1088]],[[1057,1057],"mapped",[1089]],[[1058,1058],"mapped",[1090]],[[1059,1059],"mapped",[1091]],[[1060,1060],"mapped",[1092]],[[1061,1061],"mapped",[1093]],[[1062,1062],"mapped",[1094]],[[1063,1063],"mapped",[1095]],[[1064,1064],"mapped",[1096]],[[1065,1065],"mapped",[1097]],[[1066,1066],"mapped",[1098]],[[1067,1067],"mapped",[1099]],[[1068,1068],"mapped",[1100]],[[1069,1069],"mapped",[1101]],[[1070,1070],"mapped",[1102]],[[1071,1071],"mapped",[1103]],[[1072,1103],"valid"],[[1104,1104],"valid"],[[1105,1116],"valid"],[[1117,1117],"valid"],[[1118,1119],"valid"],[[1120,1120],"mapped",[1121]],[[1121,1121],"valid"],[[1122,1122],"mapped",[1123]],[[1123,1123],"valid"],[[1124,1124],"mapped",[1125]],[[1125,1125],"valid"],[[1126,1126],"mapped",[1127]],[[1127,1127],"valid"],[[1128,1128],"mapped",[1129]],[[1129,1129],"valid"],[[1130,1130],"mapped",[1131]],[[1131,1131],"valid"],[[1132,1132],"mapped",[1133]],[[1133,1133],"valid"],[[1134,1134],"mapped",[1135]],[[1135,1135],"valid"],[[1136,1136],"mapped",[1137]],[[1137,1137],"valid"],[[1138,1138],"mapped",[1139]],[[1139,1139],"valid"],[[1140,1140],"mapped",[1141]],[[1141,1141],"valid"],[[1142,1142],"mapped",[1143]],[[1143,1143],"valid"],[[1144,1144],"mapped",[1145]],[[1145,1145],"valid"],[[1146,1146],"mapped",[1147]],[[1147,1147],"valid"],[[1148,1148],"mapped",[1149]],[[1149,1149],"valid"],[[1150,1150],"mapped",[1151]],[[1151,1151],"valid"],[[1152,1152],"mapped",[1153]],[[1153,1153],"valid"],[[1154,1154],"valid",[],"NV8"],[[1155,1158],"valid"],[[1159,1159],"valid"],[[1160,1161],"valid",[],"NV8"],[[1162,1162],"mapped",[1163]],[[1163,1163],"valid"],[[1164,1164],"mapped",[1165]],[[1165,1165],"valid"],[[1166,1166],"mapped",[1167]],[[1167,1167],"valid"],[[1168,1168],"mapped",[1169]],[[1169,1169],"valid"],[[1170,1170],"mapped",[1171]],[[1171,1171],"valid"],[[1172,1172],"mapped",[1173]],[[1173,1173],"valid"],[[1174,1174],"mapped",[1175]],[[1175,1175],"valid"],[[1176,1176],"mapped",[1177]],[[1177,1177],"valid"],[[1178,1178],"mapped",[1179]],[[1179,1179],"valid"],[[1180,1180],"mapped",[1181]],[[1181,1181],"valid"],[[1182,1182],"mapped",[1183]],[[1183,1183],"valid"],[[1184,1184],"mapped",[1185]],[[1185,1185],"valid"],[[1186,1186],"mapped",[1187]],[[1187,1187],"valid"],[[1188,1188],"mapped",[1189]],[[1189,1189],"valid"],[[1190,1190],"mapped",[1191]],[[1191,1191],"valid"],[[1192,1192],"mapped",[1193]],[[1193,1193],"valid"],[[1194,1194],"mapped",[1195]],[[1195,1195],"valid"],[[1196,1196],"mapped",[1197]],[[1197,1197],"valid"],[[1198,1198],"mapped",[1199]],[[1199,1199],"valid"],[[1200,1200],"mapped",[1201]],[[1201,1201],"valid"],[[1202,1202],"mapped",[1203]],[[1203,1203],"valid"],[[1204,1204],"mapped",[1205]],[[1205,1205],"valid"],[[1206,1206],"mapped",[1207]],[[1207,1207],"valid"],[[1208,1208],"mapped",[1209]],[[1209,1209],"valid"],[[1210,1210],"mapped",[1211]],[[1211,1211],"valid"],[[1212,1212],"mapped",[1213]],[[1213,1213],"valid"],[[1214,1214],"mapped",[1215]],[[1215,1215],"valid"],[[1216,1216],"disallowed"],[[1217,1217],"mapped",[1218]],[[1218,1218],"valid"],[[1219,1219],"mapped",[1220]],[[1220,1220],"valid"],[[1221,1221],"mapped",[1222]],[[1222,1222],"valid"],[[1223,1223],"mapped",[1224]],[[1224,1224],"valid"],[[1225,1225],"mapped",[1226]],[[1226,1226],"valid"],[[1227,1227],"mapped",[1228]],[[1228,1228],"valid"],[[1229,1229],"mapped",[1230]],[[1230,1230],"valid"],[[1231,1231],"valid"],[[1232,1232],"mapped",[1233]],[[1233,1233],"valid"],[[1234,1234],"mapped",[1235]],[[1235,1235],"valid"],[[1236,1236],"mapped",[1237]],[[1237,1237],"valid"],[[1238,1238],"mapped",[1239]],[[1239,1239],"valid"],[[1240,1240],"mapped",[1241]],[[1241,1241],"valid"],[[1242,1242],"mapped",[1243]],[[1243,1243],"valid"],[[1244,1244],"mapped",[1245]],[[1245,1245],"valid"],[[1246,1246],"mapped",[1247]],[[1247,1247],"valid"],[[1248,1248],"mapped",[1249]],[[1249,1249],"valid"],[[1250,1250],"mapped",[1251]],[[1251,1251],"valid"],[[1252,1252],"mapped",[1253]],[[1253,1253],"valid"],[[1254,1254],"mapped",[1255]],[[1255,1255],"valid"],[[1256,1256],"mapped",[1257]],[[1257,1257],"valid"],[[1258,1258],"mapped",[1259]],[[1259,1259],"valid"],[[1260,1260],"mapped",[1261]],[[1261,1261],"valid"],[[1262,1262],"mapped",[1263]],[[1263,1263],"valid"],[[1264,1264],"mapped",[1265]],[[1265,1265],"valid"],[[1266,1266],"mapped",[1267]],[[1267,1267],"valid"],[[1268,1268],"mapped",[1269]],[[1269,1269],"valid"],[[1270,1270],"mapped",[1271]],[[1271,1271],"valid"],[[1272,1272],"mapped",[1273]],[[1273,1273],"valid"],[[1274,1274],"mapped",[1275]],[[1275,1275],"valid"],[[1276,1276],"mapped",[1277]],[[1277,1277],"valid"],[[1278,1278],"mapped",[1279]],[[1279,1279],"valid"],[[1280,1280],"mapped",[1281]],[[1281,1281],"valid"],[[1282,1282],"mapped",[1283]],[[1283,1283],"valid"],[[1284,1284],"mapped",[1285]],[[1285,1285],"valid"],[[1286,1286],"mapped",[1287]],[[1287,1287],"valid"],[[1288,1288],"mapped",[1289]],[[1289,1289],"valid"],[[1290,1290],"mapped",[1291]],[[1291,1291],"valid"],[[1292,1292],"mapped",[1293]],[[1293,1293],"valid"],[[1294,1294],"mapped",[1295]],[[1295,1295],"valid"],[[1296,1296],"mapped",[1297]],[[1297,1297],"valid"],[[1298,1298],"mapped",[1299]],[[1299,1299],"valid"],[[1300,1300],"mapped",[1301]],[[1301,1301],"valid"],[[1302,1302],"mapped",[1303]],[[1303,1303],"valid"],[[1304,1304],"mapped",[1305]],[[1305,1305],"valid"],[[1306,1306],"mapped",[1307]],[[1307,1307],"valid"],[[1308,1308],"mapped",[1309]],[[1309,1309],"valid"],[[1310,1310],"mapped",[1311]],[[1311,1311],"valid"],[[1312,1312],"mapped",[1313]],[[1313,1313],"valid"],[[1314,1314],"mapped",[1315]],[[1315,1315],"valid"],[[1316,1316],"mapped",[1317]],[[1317,1317],"valid"],[[1318,1318],"mapped",[1319]],[[1319,1319],"valid"],[[1320,1320],"mapped",[1321]],[[1321,1321],"valid"],[[1322,1322],"mapped",[1323]],[[1323,1323],"valid"],[[1324,1324],"mapped",[1325]],[[1325,1325],"valid"],[[1326,1326],"mapped",[1327]],[[1327,1327],"valid"],[[1328,1328],"disallowed"],[[1329,1329],"mapped",[1377]],[[1330,1330],"mapped",[1378]],[[1331,1331],"mapped",[1379]],[[1332,1332],"mapped",[1380]],[[1333,1333],"mapped",[1381]],[[1334,1334],"mapped",[1382]],[[1335,1335],"mapped",[1383]],[[1336,1336],"mapped",[1384]],[[1337,1337],"mapped",[1385]],[[1338,1338],"mapped",[1386]],[[1339,1339],"mapped",[1387]],[[1340,1340],"mapped",[1388]],[[1341,1341],"mapped",[1389]],[[1342,1342],"mapped",[1390]],[[1343,1343],"mapped",[1391]],[[1344,1344],"mapped",[1392]],[[1345,1345],"mapped",[1393]],[[1346,1346],"mapped",[1394]],[[1347,1347],"mapped",[1395]],[[1348,1348],"mapped",[1396]],[[1349,1349],"mapped",[1397]],[[1350,1350],"mapped",[1398]],[[1351,1351],"mapped",[1399]],[[1352,1352],"mapped",[1400]],[[1353,1353],"mapped",[1401]],[[1354,1354],"mapped",[1402]],[[1355,1355],"mapped",[1403]],[[1356,1356],"mapped",[1404]],[[1357,1357],"mapped",[1405]],[[1358,1358],"mapped",[1406]],[[1359,1359],"mapped",[1407]],[[1360,1360],"mapped",[1408]],[[1361,1361],"mapped",[1409]],[[1362,1362],"mapped",[1410]],[[1363,1363],"mapped",[1411]],[[1364,1364],"mapped",[1412]],[[1365,1365],"mapped",[1413]],[[1366,1366],"mapped",[1414]],[[1367,1368],"disallowed"],[[1369,1369],"valid"],[[1370,1375],"valid",[],"NV8"],[[1376,1376],"disallowed"],[[1377,1414],"valid"],[[1415,1415],"mapped",[1381,1410]],[[1416,1416],"disallowed"],[[1417,1417],"valid",[],"NV8"],[[1418,1418],"valid",[],"NV8"],[[1419,1420],"disallowed"],[[1421,1422],"valid",[],"NV8"],[[1423,1423],"valid",[],"NV8"],[[1424,1424],"disallowed"],[[1425,1441],"valid"],[[1442,1442],"valid"],[[1443,1455],"valid"],[[1456,1465],"valid"],[[1466,1466],"valid"],[[1467,1469],"valid"],[[1470,1470],"valid",[],"NV8"],[[1471,1471],"valid"],[[1472,1472],"valid",[],"NV8"],[[1473,1474],"valid"],[[1475,1475],"valid",[],"NV8"],[[1476,1476],"valid"],[[1477,1477],"valid"],[[1478,1478],"valid",[],"NV8"],[[1479,1479],"valid"],[[1480,1487],"disallowed"],[[1488,1514],"valid"],[[1515,1519],"disallowed"],[[1520,1524],"valid"],[[1525,1535],"disallowed"],[[1536,1539],"disallowed"],[[1540,1540],"disallowed"],[[1541,1541],"disallowed"],[[1542,1546],"valid",[],"NV8"],[[1547,1547],"valid",[],"NV8"],[[1548,1548],"valid",[],"NV8"],[[1549,1551],"valid",[],"NV8"],[[1552,1557],"valid"],[[1558,1562],"valid"],[[1563,1563],"valid",[],"NV8"],[[1564,1564],"disallowed"],[[1565,1565],"disallowed"],[[1566,1566],"valid",[],"NV8"],[[1567,1567],"valid",[],"NV8"],[[1568,1568],"valid"],[[1569,1594],"valid"],[[1595,1599],"valid"],[[1600,1600],"valid",[],"NV8"],[[1601,1618],"valid"],[[1619,1621],"valid"],[[1622,1624],"valid"],[[1625,1630],"valid"],[[1631,1631],"valid"],[[1632,1641],"valid"],[[1642,1645],"valid",[],"NV8"],[[1646,1647],"valid"],[[1648,1652],"valid"],[[1653,1653],"mapped",[1575,1652]],[[1654,1654],"mapped",[1608,1652]],[[1655,1655],"mapped",[1735,1652]],[[1656,1656],"mapped",[1610,1652]],[[1657,1719],"valid"],[[1720,1721],"valid"],[[1722,1726],"valid"],[[1727,1727],"valid"],[[1728,1742],"valid"],[[1743,1743],"valid"],[[1744,1747],"valid"],[[1748,1748],"valid",[],"NV8"],[[1749,1756],"valid"],[[1757,1757],"disallowed"],[[1758,1758],"valid",[],"NV8"],[[1759,1768],"valid"],[[1769,1769],"valid",[],"NV8"],[[1770,1773],"valid"],[[1774,1775],"valid"],[[1776,1785],"valid"],[[1786,1790],"valid"],[[1791,1791],"valid"],[[1792,1805],"valid",[],"NV8"],[[1806,1806],"disallowed"],[[1807,1807],"disallowed"],[[1808,1836],"valid"],[[1837,1839],"valid"],[[1840,1866],"valid"],[[1867,1868],"disallowed"],[[1869,1871],"valid"],[[1872,1901],"valid"],[[1902,1919],"valid"],[[1920,1968],"valid"],[[1969,1969],"valid"],[[1970,1983],"disallowed"],[[1984,2037],"valid"],[[2038,2042],"valid",[],"NV8"],[[2043,2047],"disallowed"],[[2048,2093],"valid"],[[2094,2095],"disallowed"],[[2096,2110],"valid",[],"NV8"],[[2111,2111],"disallowed"],[[2112,2139],"valid"],[[2140,2141],"disallowed"],[[2142,2142],"valid",[],"NV8"],[[2143,2207],"disallowed"],[[2208,2208],"valid"],[[2209,2209],"valid"],[[2210,2220],"valid"],[[2221,2226],"valid"],[[2227,2228],"valid"],[[2229,2274],"disallowed"],[[2275,2275],"valid"],[[2276,2302],"valid"],[[2303,2303],"valid"],[[2304,2304],"valid"],[[2305,2307],"valid"],[[2308,2308],"valid"],[[2309,2361],"valid"],[[2362,2363],"valid"],[[2364,2381],"valid"],[[2382,2382],"valid"],[[2383,2383],"valid"],[[2384,2388],"valid"],[[2389,2389],"valid"],[[2390,2391],"valid"],[[2392,2392],"mapped",[2325,2364]],[[2393,2393],"mapped",[2326,2364]],[[2394,2394],"mapped",[2327,2364]],[[2395,2395],"mapped",[2332,2364]],[[2396,2396],"mapped",[2337,2364]],[[2397,2397],"mapped",[2338,2364]],[[2398,2398],"mapped",[2347,2364]],[[2399,2399],"mapped",[2351,2364]],[[2400,2403],"valid"],[[2404,2405],"valid",[],"NV8"],[[2406,2415],"valid"],[[2416,2416],"valid",[],"NV8"],[[2417,2418],"valid"],[[2419,2423],"valid"],[[2424,2424],"valid"],[[2425,2426],"valid"],[[2427,2428],"valid"],[[2429,2429],"valid"],[[2430,2431],"valid"],[[2432,2432],"valid"],[[2433,2435],"valid"],[[2436,2436],"disallowed"],[[2437,2444],"valid"],[[2445,2446],"disallowed"],[[2447,2448],"valid"],[[2449,2450],"disallowed"],[[2451,2472],"valid"],[[2473,2473],"disallowed"],[[2474,2480],"valid"],[[2481,2481],"disallowed"],[[2482,2482],"valid"],[[2483,2485],"disallowed"],[[2486,2489],"valid"],[[2490,2491],"disallowed"],[[2492,2492],"valid"],[[2493,2493],"valid"],[[2494,2500],"valid"],[[2501,2502],"disallowed"],[[2503,2504],"valid"],[[2505,2506],"disallowed"],[[2507,2509],"valid"],[[2510,2510],"valid"],[[2511,2518],"disallowed"],[[2519,2519],"valid"],[[2520,2523],"disallowed"],[[2524,2524],"mapped",[2465,2492]],[[2525,2525],"mapped",[2466,2492]],[[2526,2526],"disallowed"],[[2527,2527],"mapped",[2479,2492]],[[2528,2531],"valid"],[[2532,2533],"disallowed"],[[2534,2545],"valid"],[[2546,2554],"valid",[],"NV8"],[[2555,2555],"valid",[],"NV8"],[[2556,2560],"disallowed"],[[2561,2561],"valid"],[[2562,2562],"valid"],[[2563,2563],"valid"],[[2564,2564],"disallowed"],[[2565,2570],"valid"],[[2571,2574],"disallowed"],[[2575,2576],"valid"],[[2577,2578],"disallowed"],[[2579,2600],"valid"],[[2601,2601],"disallowed"],[[2602,2608],"valid"],[[2609,2609],"disallowed"],[[2610,2610],"valid"],[[2611,2611],"mapped",[2610,2620]],[[2612,2612],"disallowed"],[[2613,2613],"valid"],[[2614,2614],"mapped",[2616,2620]],[[2615,2615],"disallowed"],[[2616,2617],"valid"],[[2618,2619],"disallowed"],[[2620,2620],"valid"],[[2621,2621],"disallowed"],[[2622,2626],"valid"],[[2627,2630],"disallowed"],[[2631,2632],"valid"],[[2633,2634],"disallowed"],[[2635,2637],"valid"],[[2638,2640],"disallowed"],[[2641,2641],"valid"],[[2642,2648],"disallowed"],[[2649,2649],"mapped",[2582,2620]],[[2650,2650],"mapped",[2583,2620]],[[2651,2651],"mapped",[2588,2620]],[[2652,2652],"valid"],[[2653,2653],"disallowed"],[[2654,2654],"mapped",[2603,2620]],[[2655,2661],"disallowed"],[[2662,2676],"valid"],[[2677,2677],"valid"],[[2678,2688],"disallowed"],[[2689,2691],"valid"],[[2692,2692],"disallowed"],[[2693,2699],"valid"],[[2700,2700],"valid"],[[2701,2701],"valid"],[[2702,2702],"disallowed"],[[2703,2705],"valid"],[[2706,2706],"disallowed"],[[2707,2728],"valid"],[[2729,2729],"disallowed"],[[2730,2736],"valid"],[[2737,2737],"disallowed"],[[2738,2739],"valid"],[[2740,2740],"disallowed"],[[2741,2745],"valid"],[[2746,2747],"disallowed"],[[2748,2757],"valid"],[[2758,2758],"disallowed"],[[2759,2761],"valid"],[[2762,2762],"disallowed"],[[2763,2765],"valid"],[[2766,2767],"disallowed"],[[2768,2768],"valid"],[[2769,2783],"disallowed"],[[2784,2784],"valid"],[[2785,2787],"valid"],[[2788,2789],"disallowed"],[[2790,2799],"valid"],[[2800,2800],"valid",[],"NV8"],[[2801,2801],"valid",[],"NV8"],[[2802,2808],"disallowed"],[[2809,2809],"valid"],[[2810,2816],"disallowed"],[[2817,2819],"valid"],[[2820,2820],"disallowed"],[[2821,2828],"valid"],[[2829,2830],"disallowed"],[[2831,2832],"valid"],[[2833,2834],"disallowed"],[[2835,2856],"valid"],[[2857,2857],"disallowed"],[[2858,2864],"valid"],[[2865,2865],"disallowed"],[[2866,2867],"valid"],[[2868,2868],"disallowed"],[[2869,2869],"valid"],[[2870,2873],"valid"],[[2874,2875],"disallowed"],[[2876,2883],"valid"],[[2884,2884],"valid"],[[2885,2886],"disallowed"],[[2887,2888],"valid"],[[2889,2890],"disallowed"],[[2891,2893],"valid"],[[2894,2901],"disallowed"],[[2902,2903],"valid"],[[2904,2907],"disallowed"],[[2908,2908],"mapped",[2849,2876]],[[2909,2909],"mapped",[2850,2876]],[[2910,2910],"disallowed"],[[2911,2913],"valid"],[[2914,2915],"valid"],[[2916,2917],"disallowed"],[[2918,2927],"valid"],[[2928,2928],"valid",[],"NV8"],[[2929,2929],"valid"],[[2930,2935],"valid",[],"NV8"],[[2936,2945],"disallowed"],[[2946,2947],"valid"],[[2948,2948],"disallowed"],[[2949,2954],"valid"],[[2955,2957],"disallowed"],[[2958,2960],"valid"],[[2961,2961],"disallowed"],[[2962,2965],"valid"],[[2966,2968],"disallowed"],[[2969,2970],"valid"],[[2971,2971],"disallowed"],[[2972,2972],"valid"],[[2973,2973],"disallowed"],[[2974,2975],"valid"],[[2976,2978],"disallowed"],[[2979,2980],"valid"],[[2981,2983],"disallowed"],[[2984,2986],"valid"],[[2987,2989],"disallowed"],[[2990,2997],"valid"],[[2998,2998],"valid"],[[2999,3001],"valid"],[[3002,3005],"disallowed"],[[3006,3010],"valid"],[[3011,3013],"disallowed"],[[3014,3016],"valid"],[[3017,3017],"disallowed"],[[3018,3021],"valid"],[[3022,3023],"disallowed"],[[3024,3024],"valid"],[[3025,3030],"disallowed"],[[3031,3031],"valid"],[[3032,3045],"disallowed"],[[3046,3046],"valid"],[[3047,3055],"valid"],[[3056,3058],"valid",[],"NV8"],[[3059,3066],"valid",[],"NV8"],[[3067,3071],"disallowed"],[[3072,3072],"valid"],[[3073,3075],"valid"],[[3076,3076],"disallowed"],[[3077,3084],"valid"],[[3085,3085],"disallowed"],[[3086,3088],"valid"],[[3089,3089],"disallowed"],[[3090,3112],"valid"],[[3113,3113],"disallowed"],[[3114,3123],"valid"],[[3124,3124],"valid"],[[3125,3129],"valid"],[[3130,3132],"disallowed"],[[3133,3133],"valid"],[[3134,3140],"valid"],[[3141,3141],"disallowed"],[[3142,3144],"valid"],[[3145,3145],"disallowed"],[[3146,3149],"valid"],[[3150,3156],"disallowed"],[[3157,3158],"valid"],[[3159,3159],"disallowed"],[[3160,3161],"valid"],[[3162,3162],"valid"],[[3163,3167],"disallowed"],[[3168,3169],"valid"],[[3170,3171],"valid"],[[3172,3173],"disallowed"],[[3174,3183],"valid"],[[3184,3191],"disallowed"],[[3192,3199],"valid",[],"NV8"],[[3200,3200],"disallowed"],[[3201,3201],"valid"],[[3202,3203],"valid"],[[3204,3204],"disallowed"],[[3205,3212],"valid"],[[3213,3213],"disallowed"],[[3214,3216],"valid"],[[3217,3217],"disallowed"],[[3218,3240],"valid"],[[3241,3241],"disallowed"],[[3242,3251],"valid"],[[3252,3252],"disallowed"],[[3253,3257],"valid"],[[3258,3259],"disallowed"],[[3260,3261],"valid"],[[3262,3268],"valid"],[[3269,3269],"disallowed"],[[3270,3272],"valid"],[[3273,3273],"disallowed"],[[3274,3277],"valid"],[[3278,3284],"disallowed"],[[3285,3286],"valid"],[[3287,3293],"disallowed"],[[3294,3294],"valid"],[[3295,3295],"disallowed"],[[3296,3297],"valid"],[[3298,3299],"valid"],[[3300,3301],"disallowed"],[[3302,3311],"valid"],[[3312,3312],"disallowed"],[[3313,3314],"valid"],[[3315,3328],"disallowed"],[[3329,3329],"valid"],[[3330,3331],"valid"],[[3332,3332],"disallowed"],[[3333,3340],"valid"],[[3341,3341],"disallowed"],[[3342,3344],"valid"],[[3345,3345],"disallowed"],[[3346,3368],"valid"],[[3369,3369],"valid"],[[3370,3385],"valid"],[[3386,3386],"valid"],[[3387,3388],"disallowed"],[[3389,3389],"valid"],[[3390,3395],"valid"],[[3396,3396],"valid"],[[3397,3397],"disallowed"],[[3398,3400],"valid"],[[3401,3401],"disallowed"],[[3402,3405],"valid"],[[3406,3406],"valid"],[[3407,3414],"disallowed"],[[3415,3415],"valid"],[[3416,3422],"disallowed"],[[3423,3423],"valid"],[[3424,3425],"valid"],[[3426,3427],"valid"],[[3428,3429],"disallowed"],[[3430,3439],"valid"],[[3440,3445],"valid",[],"NV8"],[[3446,3448],"disallowed"],[[3449,3449],"valid",[],"NV8"],[[3450,3455],"valid"],[[3456,3457],"disallowed"],[[3458,3459],"valid"],[[3460,3460],"disallowed"],[[3461,3478],"valid"],[[3479,3481],"disallowed"],[[3482,3505],"valid"],[[3506,3506],"disallowed"],[[3507,3515],"valid"],[[3516,3516],"disallowed"],[[3517,3517],"valid"],[[3518,3519],"disallowed"],[[3520,3526],"valid"],[[3527,3529],"disallowed"],[[3530,3530],"valid"],[[3531,3534],"disallowed"],[[3535,3540],"valid"],[[3541,3541],"disallowed"],[[3542,3542],"valid"],[[3543,3543],"disallowed"],[[3544,3551],"valid"],[[3552,3557],"disallowed"],[[3558,3567],"valid"],[[3568,3569],"disallowed"],[[3570,3571],"valid"],[[3572,3572],"valid",[],"NV8"],[[3573,3584],"disallowed"],[[3585,3634],"valid"],[[3635,3635],"mapped",[3661,3634]],[[3636,3642],"valid"],[[3643,3646],"disallowed"],[[3647,3647],"valid",[],"NV8"],[[3648,3662],"valid"],[[3663,3663],"valid",[],"NV8"],[[3664,3673],"valid"],[[3674,3675],"valid",[],"NV8"],[[3676,3712],"disallowed"],[[3713,3714],"valid"],[[3715,3715],"disallowed"],[[3716,3716],"valid"],[[3717,3718],"disallowed"],[[3719,3720],"valid"],[[3721,3721],"disallowed"],[[3722,3722],"valid"],[[3723,3724],"disallowed"],[[3725,3725],"valid"],[[3726,3731],"disallowed"],[[3732,3735],"valid"],[[3736,3736],"disallowed"],[[3737,3743],"valid"],[[3744,3744],"disallowed"],[[3745,3747],"valid"],[[3748,3748],"disallowed"],[[3749,3749],"valid"],[[3750,3750],"disallowed"],[[3751,3751],"valid"],[[3752,3753],"disallowed"],[[3754,3755],"valid"],[[3756,3756],"disallowed"],[[3757,3762],"valid"],[[3763,3763],"mapped",[3789,3762]],[[3764,3769],"valid"],[[3770,3770],"disallowed"],[[3771,3773],"valid"],[[3774,3775],"disallowed"],[[3776,3780],"valid"],[[3781,3781],"disallowed"],[[3782,3782],"valid"],[[3783,3783],"disallowed"],[[3784,3789],"valid"],[[3790,3791],"disallowed"],[[3792,3801],"valid"],[[3802,3803],"disallowed"],[[3804,3804],"mapped",[3755,3737]],[[3805,3805],"mapped",[3755,3745]],[[3806,3807],"valid"],[[3808,3839],"disallowed"],[[3840,3840],"valid"],[[3841,3850],"valid",[],"NV8"],[[3851,3851],"valid"],[[3852,3852],"mapped",[3851]],[[3853,3863],"valid",[],"NV8"],[[3864,3865],"valid"],[[3866,3871],"valid",[],"NV8"],[[3872,3881],"valid"],[[3882,3892],"valid",[],"NV8"],[[3893,3893],"valid"],[[3894,3894],"valid",[],"NV8"],[[3895,3895],"valid"],[[3896,3896],"valid",[],"NV8"],[[3897,3897],"valid"],[[3898,3901],"valid",[],"NV8"],[[3902,3906],"valid"],[[3907,3907],"mapped",[3906,4023]],[[3908,3911],"valid"],[[3912,3912],"disallowed"],[[3913,3916],"valid"],[[3917,3917],"mapped",[3916,4023]],[[3918,3921],"valid"],[[3922,3922],"mapped",[3921,4023]],[[3923,3926],"valid"],[[3927,3927],"mapped",[3926,4023]],[[3928,3931],"valid"],[[3932,3932],"mapped",[3931,4023]],[[3933,3944],"valid"],[[3945,3945],"mapped",[3904,4021]],[[3946,3946],"valid"],[[3947,3948],"valid"],[[3949,3952],"disallowed"],[[3953,3954],"valid"],[[3955,3955],"mapped",[3953,3954]],[[3956,3956],"valid"],[[3957,3957],"mapped",[3953,3956]],[[3958,3958],"mapped",[4018,3968]],[[3959,3959],"mapped",[4018,3953,3968]],[[3960,3960],"mapped",[4019,3968]],[[3961,3961],"mapped",[4019,3953,3968]],[[3962,3968],"valid"],[[3969,3969],"mapped",[3953,3968]],[[3970,3972],"valid"],[[3973,3973],"valid",[],"NV8"],[[3974,3979],"valid"],[[3980,3983],"valid"],[[3984,3986],"valid"],[[3987,3987],"mapped",[3986,4023]],[[3988,3989],"valid"],[[3990,3990],"valid"],[[3991,3991],"valid"],[[3992,3992],"disallowed"],[[3993,3996],"valid"],[[3997,3997],"mapped",[3996,4023]],[[3998,4001],"valid"],[[4002,4002],"mapped",[4001,4023]],[[4003,4006],"valid"],[[4007,4007],"mapped",[4006,4023]],[[4008,4011],"valid"],[[4012,4012],"mapped",[4011,4023]],[[4013,4013],"valid"],[[4014,4016],"valid"],[[4017,4023],"valid"],[[4024,4024],"valid"],[[4025,4025],"mapped",[3984,4021]],[[4026,4028],"valid"],[[4029,4029],"disallowed"],[[4030,4037],"valid",[],"NV8"],[[4038,4038],"valid"],[[4039,4044],"valid",[],"NV8"],[[4045,4045],"disallowed"],[[4046,4046],"valid",[],"NV8"],[[4047,4047],"valid",[],"NV8"],[[4048,4049],"valid",[],"NV8"],[[4050,4052],"valid",[],"NV8"],[[4053,4056],"valid",[],"NV8"],[[4057,4058],"valid",[],"NV8"],[[4059,4095],"disallowed"],[[4096,4129],"valid"],[[4130,4130],"valid"],[[4131,4135],"valid"],[[4136,4136],"valid"],[[4137,4138],"valid"],[[4139,4139],"valid"],[[4140,4146],"valid"],[[4147,4149],"valid"],[[4150,4153],"valid"],[[4154,4159],"valid"],[[4160,4169],"valid"],[[4170,4175],"valid",[],"NV8"],[[4176,4185],"valid"],[[4186,4249],"valid"],[[4250,4253],"valid"],[[4254,4255],"valid",[],"NV8"],[[4256,4293],"disallowed"],[[4294,4294],"disallowed"],[[4295,4295],"mapped",[11559]],[[4296,4300],"disallowed"],[[4301,4301],"mapped",[11565]],[[4302,4303],"disallowed"],[[4304,4342],"valid"],[[4343,4344],"valid"],[[4345,4346],"valid"],[[4347,4347],"valid",[],"NV8"],[[4348,4348],"mapped",[4316]],[[4349,4351],"valid"],[[4352,4441],"valid",[],"NV8"],[[4442,4446],"valid",[],"NV8"],[[4447,4448],"disallowed"],[[4449,4514],"valid",[],"NV8"],[[4515,4519],"valid",[],"NV8"],[[4520,4601],"valid",[],"NV8"],[[4602,4607],"valid",[],"NV8"],[[4608,4614],"valid"],[[4615,4615],"valid"],[[4616,4678],"valid"],[[4679,4679],"valid"],[[4680,4680],"valid"],[[4681,4681],"disallowed"],[[4682,4685],"valid"],[[4686,4687],"disallowed"],[[4688,4694],"valid"],[[4695,4695],"disallowed"],[[4696,4696],"valid"],[[4697,4697],"disallowed"],[[4698,4701],"valid"],[[4702,4703],"disallowed"],[[4704,4742],"valid"],[[4743,4743],"valid"],[[4744,4744],"valid"],[[4745,4745],"disallowed"],[[4746,4749],"valid"],[[4750,4751],"disallowed"],[[4752,4782],"valid"],[[4783,4783],"valid"],[[4784,4784],"valid"],[[4785,4785],"disallowed"],[[4786,4789],"valid"],[[4790,4791],"disallowed"],[[4792,4798],"valid"],[[4799,4799],"disallowed"],[[4800,4800],"valid"],[[4801,4801],"disallowed"],[[4802,4805],"valid"],[[4806,4807],"disallowed"],[[4808,4814],"valid"],[[4815,4815],"valid"],[[4816,4822],"valid"],[[4823,4823],"disallowed"],[[4824,4846],"valid"],[[4847,4847],"valid"],[[4848,4878],"valid"],[[4879,4879],"valid"],[[4880,4880],"valid"],[[4881,4881],"disallowed"],[[4882,4885],"valid"],[[4886,4887],"disallowed"],[[4888,4894],"valid"],[[4895,4895],"valid"],[[4896,4934],"valid"],[[4935,4935],"valid"],[[4936,4954],"valid"],[[4955,4956],"disallowed"],[[4957,4958],"valid"],[[4959,4959],"valid"],[[4960,4960],"valid",[],"NV8"],[[4961,4988],"valid",[],"NV8"],[[4989,4991],"disallowed"],[[4992,5007],"valid"],[[5008,5017],"valid",[],"NV8"],[[5018,5023],"disallowed"],[[5024,5108],"valid"],[[5109,5109],"valid"],[[5110,5111],"disallowed"],[[5112,5112],"mapped",[5104]],[[5113,5113],"mapped",[5105]],[[5114,5114],"mapped",[5106]],[[5115,5115],"mapped",[5107]],[[5116,5116],"mapped",[5108]],[[5117,5117],"mapped",[5109]],[[5118,5119],"disallowed"],[[5120,5120],"valid",[],"NV8"],[[5121,5740],"valid"],[[5741,5742],"valid",[],"NV8"],[[5743,5750],"valid"],[[5751,5759],"valid"],[[5760,5760],"disallowed"],[[5761,5786],"valid"],[[5787,5788],"valid",[],"NV8"],[[5789,5791],"disallowed"],[[5792,5866],"valid"],[[5867,5872],"valid",[],"NV8"],[[5873,5880],"valid"],[[5881,5887],"disallowed"],[[5888,5900],"valid"],[[5901,5901],"disallowed"],[[5902,5908],"valid"],[[5909,5919],"disallowed"],[[5920,5940],"valid"],[[5941,5942],"valid",[],"NV8"],[[5943,5951],"disallowed"],[[5952,5971],"valid"],[[5972,5983],"disallowed"],[[5984,5996],"valid"],[[5997,5997],"disallowed"],[[5998,6000],"valid"],[[6001,6001],"disallowed"],[[6002,6003],"valid"],[[6004,6015],"disallowed"],[[6016,6067],"valid"],[[6068,6069],"disallowed"],[[6070,6099],"valid"],[[6100,6102],"valid",[],"NV8"],[[6103,6103],"valid"],[[6104,6107],"valid",[],"NV8"],[[6108,6108],"valid"],[[6109,6109],"valid"],[[6110,6111],"disallowed"],[[6112,6121],"valid"],[[6122,6127],"disallowed"],[[6128,6137],"valid",[],"NV8"],[[6138,6143],"disallowed"],[[6144,6149],"valid",[],"NV8"],[[6150,6150],"disallowed"],[[6151,6154],"valid",[],"NV8"],[[6155,6157],"ignored"],[[6158,6158],"disallowed"],[[6159,6159],"disallowed"],[[6160,6169],"valid"],[[6170,6175],"disallowed"],[[6176,6263],"valid"],[[6264,6271],"disallowed"],[[6272,6313],"valid"],[[6314,6314],"valid"],[[6315,6319],"disallowed"],[[6320,6389],"valid"],[[6390,6399],"disallowed"],[[6400,6428],"valid"],[[6429,6430],"valid"],[[6431,6431],"disallowed"],[[6432,6443],"valid"],[[6444,6447],"disallowed"],[[6448,6459],"valid"],[[6460,6463],"disallowed"],[[6464,6464],"valid",[],"NV8"],[[6465,6467],"disallowed"],[[6468,6469],"valid",[],"NV8"],[[6470,6509],"valid"],[[6510,6511],"disallowed"],[[6512,6516],"valid"],[[6517,6527],"disallowed"],[[6528,6569],"valid"],[[6570,6571],"valid"],[[6572,6575],"disallowed"],[[6576,6601],"valid"],[[6602,6607],"disallowed"],[[6608,6617],"valid"],[[6618,6618],"valid",[],"XV8"],[[6619,6621],"disallowed"],[[6622,6623],"valid",[],"NV8"],[[6624,6655],"valid",[],"NV8"],[[6656,6683],"valid"],[[6684,6685],"disallowed"],[[6686,6687],"valid",[],"NV8"],[[6688,6750],"valid"],[[6751,6751],"disallowed"],[[6752,6780],"valid"],[[6781,6782],"disallowed"],[[6783,6793],"valid"],[[6794,6799],"disallowed"],[[6800,6809],"valid"],[[6810,6815],"disallowed"],[[6816,6822],"valid",[],"NV8"],[[6823,6823],"valid"],[[6824,6829],"valid",[],"NV8"],[[6830,6831],"disallowed"],[[6832,6845],"valid"],[[6846,6846],"valid",[],"NV8"],[[6847,6911],"disallowed"],[[6912,6987],"valid"],[[6988,6991],"disallowed"],[[6992,7001],"valid"],[[7002,7018],"valid",[],"NV8"],[[7019,7027],"valid"],[[7028,7036],"valid",[],"NV8"],[[7037,7039],"disallowed"],[[7040,7082],"valid"],[[7083,7085],"valid"],[[7086,7097],"valid"],[[7098,7103],"valid"],[[7104,7155],"valid"],[[7156,7163],"disallowed"],[[7164,7167],"valid",[],"NV8"],[[7168,7223],"valid"],[[7224,7226],"disallowed"],[[7227,7231],"valid",[],"NV8"],[[7232,7241],"valid"],[[7242,7244],"disallowed"],[[7245,7293],"valid"],[[7294,7295],"valid",[],"NV8"],[[7296,7359],"disallowed"],[[7360,7367],"valid",[],"NV8"],[[7368,7375],"disallowed"],[[7376,7378],"valid"],[[7379,7379],"valid",[],"NV8"],[[7380,7410],"valid"],[[7411,7414],"valid"],[[7415,7415],"disallowed"],[[7416,7417],"valid"],[[7418,7423],"disallowed"],[[7424,7467],"valid"],[[7468,7468],"mapped",[97]],[[7469,7469],"mapped",[230]],[[7470,7470],"mapped",[98]],[[7471,7471],"valid"],[[7472,7472],"mapped",[100]],[[7473,7473],"mapped",[101]],[[7474,7474],"mapped",[477]],[[7475,7475],"mapped",[103]],[[7476,7476],"mapped",[104]],[[7477,7477],"mapped",[105]],[[7478,7478],"mapped",[106]],[[7479,7479],"mapped",[107]],[[7480,7480],"mapped",[108]],[[7481,7481],"mapped",[109]],[[7482,7482],"mapped",[110]],[[7483,7483],"valid"],[[7484,7484],"mapped",[111]],[[7485,7485],"mapped",[547]],[[7486,7486],"mapped",[112]],[[7487,7487],"mapped",[114]],[[7488,7488],"mapped",[116]],[[7489,7489],"mapped",[117]],[[7490,7490],"mapped",[119]],[[7491,7491],"mapped",[97]],[[7492,7492],"mapped",[592]],[[7493,7493],"mapped",[593]],[[7494,7494],"mapped",[7426]],[[7495,7495],"mapped",[98]],[[7496,7496],"mapped",[100]],[[7497,7497],"mapped",[101]],[[7498,7498],"mapped",[601]],[[7499,7499],"mapped",[603]],[[7500,7500],"mapped",[604]],[[7501,7501],"mapped",[103]],[[7502,7502],"valid"],[[7503,7503],"mapped",[107]],[[7504,7504],"mapped",[109]],[[7505,7505],"mapped",[331]],[[7506,7506],"mapped",[111]],[[7507,7507],"mapped",[596]],[[7508,7508],"mapped",[7446]],[[7509,7509],"mapped",[7447]],[[7510,7510],"mapped",[112]],[[7511,7511],"mapped",[116]],[[7512,7512],"mapped",[117]],[[7513,7513],"mapped",[7453]],[[7514,7514],"mapped",[623]],[[7515,7515],"mapped",[118]],[[7516,7516],"mapped",[7461]],[[7517,7517],"mapped",[946]],[[7518,7518],"mapped",[947]],[[7519,7519],"mapped",[948]],[[7520,7520],"mapped",[966]],[[7521,7521],"mapped",[967]],[[7522,7522],"mapped",[105]],[[7523,7523],"mapped",[114]],[[7524,7524],"mapped",[117]],[[7525,7525],"mapped",[118]],[[7526,7526],"mapped",[946]],[[7527,7527],"mapped",[947]],[[7528,7528],"mapped",[961]],[[7529,7529],"mapped",[966]],[[7530,7530],"mapped",[967]],[[7531,7531],"valid"],[[7532,7543],"valid"],[[7544,7544],"mapped",[1085]],[[7545,7578],"valid"],[[7579,7579],"mapped",[594]],[[7580,7580],"mapped",[99]],[[7581,7581],"mapped",[597]],[[7582,7582],"mapped",[240]],[[7583,7583],"mapped",[604]],[[7584,7584],"mapped",[102]],[[7585,7585],"mapped",[607]],[[7586,7586],"mapped",[609]],[[7587,7587],"mapped",[613]],[[7588,7588],"mapped",[616]],[[7589,7589],"mapped",[617]],[[7590,7590],"mapped",[618]],[[7591,7591],"mapped",[7547]],[[7592,7592],"mapped",[669]],[[7593,7593],"mapped",[621]],[[7594,7594],"mapped",[7557]],[[7595,7595],"mapped",[671]],[[7596,7596],"mapped",[625]],[[7597,7597],"mapped",[624]],[[7598,7598],"mapped",[626]],[[7599,7599],"mapped",[627]],[[7600,7600],"mapped",[628]],[[7601,7601],"mapped",[629]],[[7602,7602],"mapped",[632]],[[7603,7603],"mapped",[642]],[[7604,7604],"mapped",[643]],[[7605,7605],"mapped",[427]],[[7606,7606],"mapped",[649]],[[7607,7607],"mapped",[650]],[[7608,7608],"mapped",[7452]],[[7609,7609],"mapped",[651]],[[7610,7610],"mapped",[652]],[[7611,7611],"mapped",[122]],[[7612,7612],"mapped",[656]],[[7613,7613],"mapped",[657]],[[7614,7614],"mapped",[658]],[[7615,7615],"mapped",[952]],[[7616,7619],"valid"],[[7620,7626],"valid"],[[7627,7654],"valid"],[[7655,7669],"valid"],[[7670,7675],"disallowed"],[[7676,7676],"valid"],[[7677,7677],"valid"],[[7678,7679],"valid"],[[7680,7680],"mapped",[7681]],[[7681,7681],"valid"],[[7682,7682],"mapped",[7683]],[[7683,7683],"valid"],[[7684,7684],"mapped",[7685]],[[7685,7685],"valid"],[[7686,7686],"mapped",[7687]],[[7687,7687],"valid"],[[7688,7688],"mapped",[7689]],[[7689,7689],"valid"],[[7690,7690],"mapped",[7691]],[[7691,7691],"valid"],[[7692,7692],"mapped",[7693]],[[7693,7693],"valid"],[[7694,7694],"mapped",[7695]],[[7695,7695],"valid"],[[7696,7696],"mapped",[7697]],[[7697,7697],"valid"],[[7698,7698],"mapped",[7699]],[[7699,7699],"valid"],[[7700,7700],"mapped",[7701]],[[7701,7701],"valid"],[[7702,7702],"mapped",[7703]],[[7703,7703],"valid"],[[7704,7704],"mapped",[7705]],[[7705,7705],"valid"],[[7706,7706],"mapped",[7707]],[[7707,7707],"valid"],[[7708,7708],"mapped",[7709]],[[7709,7709],"valid"],[[7710,7710],"mapped",[7711]],[[7711,7711],"valid"],[[7712,7712],"mapped",[7713]],[[7713,7713],"valid"],[[7714,7714],"mapped",[7715]],[[7715,7715],"valid"],[[7716,7716],"mapped",[7717]],[[7717,7717],"valid"],[[7718,7718],"mapped",[7719]],[[7719,7719],"valid"],[[7720,7720],"mapped",[7721]],[[7721,7721],"valid"],[[7722,7722],"mapped",[7723]],[[7723,7723],"valid"],[[7724,7724],"mapped",[7725]],[[7725,7725],"valid"],[[7726,7726],"mapped",[7727]],[[7727,7727],"valid"],[[7728,7728],"mapped",[7729]],[[7729,7729],"valid"],[[7730,7730],"mapped",[7731]],[[7731,7731],"valid"],[[7732,7732],"mapped",[7733]],[[7733,7733],"valid"],[[7734,7734],"mapped",[7735]],[[7735,7735],"valid"],[[7736,7736],"mapped",[7737]],[[7737,7737],"valid"],[[7738,7738],"mapped",[7739]],[[7739,7739],"valid"],[[7740,7740],"mapped",[7741]],[[7741,7741],"valid"],[[7742,7742],"mapped",[7743]],[[7743,7743],"valid"],[[7744,7744],"mapped",[7745]],[[7745,7745],"valid"],[[7746,7746],"mapped",[7747]],[[7747,7747],"valid"],[[7748,7748],"mapped",[7749]],[[7749,7749],"valid"],[[7750,7750],"mapped",[7751]],[[7751,7751],"valid"],[[7752,7752],"mapped",[7753]],[[7753,7753],"valid"],[[7754,7754],"mapped",[7755]],[[7755,7755],"valid"],[[7756,7756],"mapped",[7757]],[[7757,7757],"valid"],[[7758,7758],"mapped",[7759]],[[7759,7759],"valid"],[[7760,7760],"mapped",[7761]],[[7761,7761],"valid"],[[7762,7762],"mapped",[7763]],[[7763,7763],"valid"],[[7764,7764],"mapped",[7765]],[[7765,7765],"valid"],[[7766,7766],"mapped",[7767]],[[7767,7767],"valid"],[[7768,7768],"mapped",[7769]],[[7769,7769],"valid"],[[7770,7770],"mapped",[7771]],[[7771,7771],"valid"],[[7772,7772],"mapped",[7773]],[[7773,7773],"valid"],[[7774,7774],"mapped",[7775]],[[7775,7775],"valid"],[[7776,7776],"mapped",[7777]],[[7777,7777],"valid"],[[7778,7778],"mapped",[7779]],[[7779,7779],"valid"],[[7780,7780],"mapped",[7781]],[[7781,7781],"valid"],[[7782,7782],"mapped",[7783]],[[7783,7783],"valid"],[[7784,7784],"mapped",[7785]],[[7785,7785],"valid"],[[7786,7786],"mapped",[7787]],[[7787,7787],"valid"],[[7788,7788],"mapped",[7789]],[[7789,7789],"valid"],[[7790,7790],"mapped",[7791]],[[7791,7791],"valid"],[[7792,7792],"mapped",[7793]],[[7793,7793],"valid"],[[7794,7794],"mapped",[7795]],[[7795,7795],"valid"],[[7796,7796],"mapped",[7797]],[[7797,7797],"valid"],[[7798,7798],"mapped",[7799]],[[7799,7799],"valid"],[[7800,7800],"mapped",[7801]],[[7801,7801],"valid"],[[7802,7802],"mapped",[7803]],[[7803,7803],"valid"],[[7804,7804],"mapped",[7805]],[[7805,7805],"valid"],[[7806,7806],"mapped",[7807]],[[7807,7807],"valid"],[[7808,7808],"mapped",[7809]],[[7809,7809],"valid"],[[7810,7810],"mapped",[7811]],[[7811,7811],"valid"],[[7812,7812],"mapped",[7813]],[[7813,7813],"valid"],[[7814,7814],"mapped",[7815]],[[7815,7815],"valid"],[[7816,7816],"mapped",[7817]],[[7817,7817],"valid"],[[7818,7818],"mapped",[7819]],[[7819,7819],"valid"],[[7820,7820],"mapped",[7821]],[[7821,7821],"valid"],[[7822,7822],"mapped",[7823]],[[7823,7823],"valid"],[[7824,7824],"mapped",[7825]],[[7825,7825],"valid"],[[7826,7826],"mapped",[7827]],[[7827,7827],"valid"],[[7828,7828],"mapped",[7829]],[[7829,7833],"valid"],[[7834,7834],"mapped",[97,702]],[[7835,7835],"mapped",[7777]],[[7836,7837],"valid"],[[7838,7838],"mapped",[115,115]],[[7839,7839],"valid"],[[7840,7840],"mapped",[7841]],[[7841,7841],"valid"],[[7842,7842],"mapped",[7843]],[[7843,7843],"valid"],[[7844,7844],"mapped",[7845]],[[7845,7845],"valid"],[[7846,7846],"mapped",[7847]],[[7847,7847],"valid"],[[7848,7848],"mapped",[7849]],[[7849,7849],"valid"],[[7850,7850],"mapped",[7851]],[[7851,7851],"valid"],[[7852,7852],"mapped",[7853]],[[7853,7853],"valid"],[[7854,7854],"mapped",[7855]],[[7855,7855],"valid"],[[7856,7856],"mapped",[7857]],[[7857,7857],"valid"],[[7858,7858],"mapped",[7859]],[[7859,7859],"valid"],[[7860,7860],"mapped",[7861]],[[7861,7861],"valid"],[[7862,7862],"mapped",[7863]],[[7863,7863],"valid"],[[7864,7864],"mapped",[7865]],[[7865,7865],"valid"],[[7866,7866],"mapped",[7867]],[[7867,7867],"valid"],[[7868,7868],"mapped",[7869]],[[7869,7869],"valid"],[[7870,7870],"mapped",[7871]],[[7871,7871],"valid"],[[7872,7872],"mapped",[7873]],[[7873,7873],"valid"],[[7874,7874],"mapped",[7875]],[[7875,7875],"valid"],[[7876,7876],"mapped",[7877]],[[7877,7877],"valid"],[[7878,7878],"mapped",[7879]],[[7879,7879],"valid"],[[7880,7880],"mapped",[7881]],[[7881,7881],"valid"],[[7882,7882],"mapped",[7883]],[[7883,7883],"valid"],[[7884,7884],"mapped",[7885]],[[7885,7885],"valid"],[[7886,7886],"mapped",[7887]],[[7887,7887],"valid"],[[7888,7888],"mapped",[7889]],[[7889,7889],"valid"],[[7890,7890],"mapped",[7891]],[[7891,7891],"valid"],[[7892,7892],"mapped",[7893]],[[7893,7893],"valid"],[[7894,7894],"mapped",[7895]],[[7895,7895],"valid"],[[7896,7896],"mapped",[7897]],[[7897,7897],"valid"],[[7898,7898],"mapped",[7899]],[[7899,7899],"valid"],[[7900,7900],"mapped",[7901]],[[7901,7901],"valid"],[[7902,7902],"mapped",[7903]],[[7903,7903],"valid"],[[7904,7904],"mapped",[7905]],[[7905,7905],"valid"],[[7906,7906],"mapped",[7907]],[[7907,7907],"valid"],[[7908,7908],"mapped",[7909]],[[7909,7909],"valid"],[[7910,7910],"mapped",[7911]],[[7911,7911],"valid"],[[7912,7912],"mapped",[7913]],[[7913,7913],"valid"],[[7914,7914],"mapped",[7915]],[[7915,7915],"valid"],[[7916,7916],"mapped",[7917]],[[7917,7917],"valid"],[[7918,7918],"mapped",[7919]],[[7919,7919],"valid"],[[7920,7920],"mapped",[7921]],[[7921,7921],"valid"],[[7922,7922],"mapped",[7923]],[[7923,7923],"valid"],[[7924,7924],"mapped",[7925]],[[7925,7925],"valid"],[[7926,7926],"mapped",[7927]],[[7927,7927],"valid"],[[7928,7928],"mapped",[7929]],[[7929,7929],"valid"],[[7930,7930],"mapped",[7931]],[[7931,7931],"valid"],[[7932,7932],"mapped",[7933]],[[7933,7933],"valid"],[[7934,7934],"mapped",[7935]],[[7935,7935],"valid"],[[7936,7943],"valid"],[[7944,7944],"mapped",[7936]],[[7945,7945],"mapped",[7937]],[[7946,7946],"mapped",[7938]],[[7947,7947],"mapped",[7939]],[[7948,7948],"mapped",[7940]],[[7949,7949],"mapped",[7941]],[[7950,7950],"mapped",[7942]],[[7951,7951],"mapped",[7943]],[[7952,7957],"valid"],[[7958,7959],"disallowed"],[[7960,7960],"mapped",[7952]],[[7961,7961],"mapped",[7953]],[[7962,7962],"mapped",[7954]],[[7963,7963],"mapped",[7955]],[[7964,7964],"mapped",[7956]],[[7965,7965],"mapped",[7957]],[[7966,7967],"disallowed"],[[7968,7975],"valid"],[[7976,7976],"mapped",[7968]],[[7977,7977],"mapped",[7969]],[[7978,7978],"mapped",[7970]],[[7979,7979],"mapped",[7971]],[[7980,7980],"mapped",[7972]],[[7981,7981],"mapped",[7973]],[[7982,7982],"mapped",[7974]],[[7983,7983],"mapped",[7975]],[[7984,7991],"valid"],[[7992,7992],"mapped",[7984]],[[7993,7993],"mapped",[7985]],[[7994,7994],"mapped",[7986]],[[7995,7995],"mapped",[7987]],[[7996,7996],"mapped",[7988]],[[7997,7997],"mapped",[7989]],[[7998,7998],"mapped",[7990]],[[7999,7999],"mapped",[7991]],[[8000,8005],"valid"],[[8006,8007],"disallowed"],[[8008,8008],"mapped",[8000]],[[8009,8009],"mapped",[8001]],[[8010,8010],"mapped",[8002]],[[8011,8011],"mapped",[8003]],[[8012,8012],"mapped",[8004]],[[8013,8013],"mapped",[8005]],[[8014,8015],"disallowed"],[[8016,8023],"valid"],[[8024,8024],"disallowed"],[[8025,8025],"mapped",[8017]],[[8026,8026],"disallowed"],[[8027,8027],"mapped",[8019]],[[8028,8028],"disallowed"],[[8029,8029],"mapped",[8021]],[[8030,8030],"disallowed"],[[8031,8031],"mapped",[8023]],[[8032,8039],"valid"],[[8040,8040],"mapped",[8032]],[[8041,8041],"mapped",[8033]],[[8042,8042],"mapped",[8034]],[[8043,8043],"mapped",[8035]],[[8044,8044],"mapped",[8036]],[[8045,8045],"mapped",[8037]],[[8046,8046],"mapped",[8038]],[[8047,8047],"mapped",[8039]],[[8048,8048],"valid"],[[8049,8049],"mapped",[940]],[[8050,8050],"valid"],[[8051,8051],"mapped",[941]],[[8052,8052],"valid"],[[8053,8053],"mapped",[942]],[[8054,8054],"valid"],[[8055,8055],"mapped",[943]],[[8056,8056],"valid"],[[8057,8057],"mapped",[972]],[[8058,8058],"valid"],[[8059,8059],"mapped",[973]],[[8060,8060],"valid"],[[8061,8061],"mapped",[974]],[[8062,8063],"disallowed"],[[8064,8064],"mapped",[7936,953]],[[8065,8065],"mapped",[7937,953]],[[8066,8066],"mapped",[7938,953]],[[8067,8067],"mapped",[7939,953]],[[8068,8068],"mapped",[7940,953]],[[8069,8069],"mapped",[7941,953]],[[8070,8070],"mapped",[7942,953]],[[8071,8071],"mapped",[7943,953]],[[8072,8072],"mapped",[7936,953]],[[8073,8073],"mapped",[7937,953]],[[8074,8074],"mapped",[7938,953]],[[8075,8075],"mapped",[7939,953]],[[8076,8076],"mapped",[7940,953]],[[8077,8077],"mapped",[7941,953]],[[8078,8078],"mapped",[7942,953]],[[8079,8079],"mapped",[7943,953]],[[8080,8080],"mapped",[7968,953]],[[8081,8081],"mapped",[7969,953]],[[8082,8082],"mapped",[7970,953]],[[8083,8083],"mapped",[7971,953]],[[8084,8084],"mapped",[7972,953]],[[8085,8085],"mapped",[7973,953]],[[8086,8086],"mapped",[7974,953]],[[8087,8087],"mapped",[7975,953]],[[8088,8088],"mapped",[7968,953]],[[8089,8089],"mapped",[7969,953]],[[8090,8090],"mapped",[7970,953]],[[8091,8091],"mapped",[7971,953]],[[8092,8092],"mapped",[7972,953]],[[8093,8093],"mapped",[7973,953]],[[8094,8094],"mapped",[7974,953]],[[8095,8095],"mapped",[7975,953]],[[8096,8096],"mapped",[8032,953]],[[8097,8097],"mapped",[8033,953]],[[8098,8098],"mapped",[8034,953]],[[8099,8099],"mapped",[8035,953]],[[8100,8100],"mapped",[8036,953]],[[8101,8101],"mapped",[8037,953]],[[8102,8102],"mapped",[8038,953]],[[8103,8103],"mapped",[8039,953]],[[8104,8104],"mapped",[8032,953]],[[8105,8105],"mapped",[8033,953]],[[8106,8106],"mapped",[8034,953]],[[8107,8107],"mapped",[8035,953]],[[8108,8108],"mapped",[8036,953]],[[8109,8109],"mapped",[8037,953]],[[8110,8110],"mapped",[8038,953]],[[8111,8111],"mapped",[8039,953]],[[8112,8113],"valid"],[[8114,8114],"mapped",[8048,953]],[[8115,8115],"mapped",[945,953]],[[8116,8116],"mapped",[940,953]],[[8117,8117],"disallowed"],[[8118,8118],"valid"],[[8119,8119],"mapped",[8118,953]],[[8120,8120],"mapped",[8112]],[[8121,8121],"mapped",[8113]],[[8122,8122],"mapped",[8048]],[[8123,8123],"mapped",[940]],[[8124,8124],"mapped",[945,953]],[[8125,8125],"disallowed_STD3_mapped",[32,787]],[[8126,8126],"mapped",[953]],[[8127,8127],"disallowed_STD3_mapped",[32,787]],[[8128,8128],"disallowed_STD3_mapped",[32,834]],[[8129,8129],"disallowed_STD3_mapped",[32,776,834]],[[8130,8130],"mapped",[8052,953]],[[8131,8131],"mapped",[951,953]],[[8132,8132],"mapped",[942,953]],[[8133,8133],"disallowed"],[[8134,8134],"valid"],[[8135,8135],"mapped",[8134,953]],[[8136,8136],"mapped",[8050]],[[8137,8137],"mapped",[941]],[[8138,8138],"mapped",[8052]],[[8139,8139],"mapped",[942]],[[8140,8140],"mapped",[951,953]],[[8141,8141],"disallowed_STD3_mapped",[32,787,768]],[[8142,8142],"disallowed_STD3_mapped",[32,787,769]],[[8143,8143],"disallowed_STD3_mapped",[32,787,834]],[[8144,8146],"valid"],[[8147,8147],"mapped",[912]],[[8148,8149],"disallowed"],[[8150,8151],"valid"],[[8152,8152],"mapped",[8144]],[[8153,8153],"mapped",[8145]],[[8154,8154],"mapped",[8054]],[[8155,8155],"mapped",[943]],[[8156,8156],"disallowed"],[[8157,8157],"disallowed_STD3_mapped",[32,788,768]],[[8158,8158],"disallowed_STD3_mapped",[32,788,769]],[[8159,8159],"disallowed_STD3_mapped",[32,788,834]],[[8160,8162],"valid"],[[8163,8163],"mapped",[944]],[[8164,8167],"valid"],[[8168,8168],"mapped",[8160]],[[8169,8169],"mapped",[8161]],[[8170,8170],"mapped",[8058]],[[8171,8171],"mapped",[973]],[[8172,8172],"mapped",[8165]],[[8173,8173],"disallowed_STD3_mapped",[32,776,768]],[[8174,8174],"disallowed_STD3_mapped",[32,776,769]],[[8175,8175],"disallowed_STD3_mapped",[96]],[[8176,8177],"disallowed"],[[8178,8178],"mapped",[8060,953]],[[8179,8179],"mapped",[969,953]],[[8180,8180],"mapped",[974,953]],[[8181,8181],"disallowed"],[[8182,8182],"valid"],[[8183,8183],"mapped",[8182,953]],[[8184,8184],"mapped",[8056]],[[8185,8185],"mapped",[972]],[[8186,8186],"mapped",[8060]],[[8187,8187],"mapped",[974]],[[8188,8188],"mapped",[969,953]],[[8189,8189],"disallowed_STD3_mapped",[32,769]],[[8190,8190],"disallowed_STD3_mapped",[32,788]],[[8191,8191],"disallowed"],[[8192,8202],"disallowed_STD3_mapped",[32]],[[8203,8203],"ignored"],[[8204,8205],"deviation",[]],[[8206,8207],"disallowed"],[[8208,8208],"valid",[],"NV8"],[[8209,8209],"mapped",[8208]],[[8210,8214],"valid",[],"NV8"],[[8215,8215],"disallowed_STD3_mapped",[32,819]],[[8216,8227],"valid",[],"NV8"],[[8228,8230],"disallowed"],[[8231,8231],"valid",[],"NV8"],[[8232,8238],"disallowed"],[[8239,8239],"disallowed_STD3_mapped",[32]],[[8240,8242],"valid",[],"NV8"],[[8243,8243],"mapped",[8242,8242]],[[8244,8244],"mapped",[8242,8242,8242]],[[8245,8245],"valid",[],"NV8"],[[8246,8246],"mapped",[8245,8245]],[[8247,8247],"mapped",[8245,8245,8245]],[[8248,8251],"valid",[],"NV8"],[[8252,8252],"disallowed_STD3_mapped",[33,33]],[[8253,8253],"valid",[],"NV8"],[[8254,8254],"disallowed_STD3_mapped",[32,773]],[[8255,8262],"valid",[],"NV8"],[[8263,8263],"disallowed_STD3_mapped",[63,63]],[[8264,8264],"disallowed_STD3_mapped",[63,33]],[[8265,8265],"disallowed_STD3_mapped",[33,63]],[[8266,8269],"valid",[],"NV8"],[[8270,8274],"valid",[],"NV8"],[[8275,8276],"valid",[],"NV8"],[[8277,8278],"valid",[],"NV8"],[[8279,8279],"mapped",[8242,8242,8242,8242]],[[8280,8286],"valid",[],"NV8"],[[8287,8287],"disallowed_STD3_mapped",[32]],[[8288,8288],"ignored"],[[8289,8291],"disallowed"],[[8292,8292],"ignored"],[[8293,8293],"disallowed"],[[8294,8297],"disallowed"],[[8298,8303],"disallowed"],[[8304,8304],"mapped",[48]],[[8305,8305],"mapped",[105]],[[8306,8307],"disallowed"],[[8308,8308],"mapped",[52]],[[8309,8309],"mapped",[53]],[[8310,8310],"mapped",[54]],[[8311,8311],"mapped",[55]],[[8312,8312],"mapped",[56]],[[8313,8313],"mapped",[57]],[[8314,8314],"disallowed_STD3_mapped",[43]],[[8315,8315],"mapped",[8722]],[[8316,8316],"disallowed_STD3_mapped",[61]],[[8317,8317],"disallowed_STD3_mapped",[40]],[[8318,8318],"disallowed_STD3_mapped",[41]],[[8319,8319],"mapped",[110]],[[8320,8320],"mapped",[48]],[[8321,8321],"mapped",[49]],[[8322,8322],"mapped",[50]],[[8323,8323],"mapped",[51]],[[8324,8324],"mapped",[52]],[[8325,8325],"mapped",[53]],[[8326,8326],"mapped",[54]],[[8327,8327],"mapped",[55]],[[8328,8328],"mapped",[56]],[[8329,8329],"mapped",[57]],[[8330,8330],"disallowed_STD3_mapped",[43]],[[8331,8331],"mapped",[8722]],[[8332,8332],"disallowed_STD3_mapped",[61]],[[8333,8333],"disallowed_STD3_mapped",[40]],[[8334,8334],"disallowed_STD3_mapped",[41]],[[8335,8335],"disallowed"],[[8336,8336],"mapped",[97]],[[8337,8337],"mapped",[101]],[[8338,8338],"mapped",[111]],[[8339,8339],"mapped",[120]],[[8340,8340],"mapped",[601]],[[8341,8341],"mapped",[104]],[[8342,8342],"mapped",[107]],[[8343,8343],"mapped",[108]],[[8344,8344],"mapped",[109]],[[8345,8345],"mapped",[110]],[[8346,8346],"mapped",[112]],[[8347,8347],"mapped",[115]],[[8348,8348],"mapped",[116]],[[8349,8351],"disallowed"],[[8352,8359],"valid",[],"NV8"],[[8360,8360],"mapped",[114,115]],[[8361,8362],"valid",[],"NV8"],[[8363,8363],"valid",[],"NV8"],[[8364,8364],"valid",[],"NV8"],[[8365,8367],"valid",[],"NV8"],[[8368,8369],"valid",[],"NV8"],[[8370,8373],"valid",[],"NV8"],[[8374,8376],"valid",[],"NV8"],[[8377,8377],"valid",[],"NV8"],[[8378,8378],"valid",[],"NV8"],[[8379,8381],"valid",[],"NV8"],[[8382,8382],"valid",[],"NV8"],[[8383,8399],"disallowed"],[[8400,8417],"valid",[],"NV8"],[[8418,8419],"valid",[],"NV8"],[[8420,8426],"valid",[],"NV8"],[[8427,8427],"valid",[],"NV8"],[[8428,8431],"valid",[],"NV8"],[[8432,8432],"valid",[],"NV8"],[[8433,8447],"disallowed"],[[8448,8448],"disallowed_STD3_mapped",[97,47,99]],[[8449,8449],"disallowed_STD3_mapped",[97,47,115]],[[8450,8450],"mapped",[99]],[[8451,8451],"mapped",[176,99]],[[8452,8452],"valid",[],"NV8"],[[8453,8453],"disallowed_STD3_mapped",[99,47,111]],[[8454,8454],"disallowed_STD3_mapped",[99,47,117]],[[8455,8455],"mapped",[603]],[[8456,8456],"valid",[],"NV8"],[[8457,8457],"mapped",[176,102]],[[8458,8458],"mapped",[103]],[[8459,8462],"mapped",[104]],[[8463,8463],"mapped",[295]],[[8464,8465],"mapped",[105]],[[8466,8467],"mapped",[108]],[[8468,8468],"valid",[],"NV8"],[[8469,8469],"mapped",[110]],[[8470,8470],"mapped",[110,111]],[[8471,8472],"valid",[],"NV8"],[[8473,8473],"mapped",[112]],[[8474,8474],"mapped",[113]],[[8475,8477],"mapped",[114]],[[8478,8479],"valid",[],"NV8"],[[8480,8480],"mapped",[115,109]],[[8481,8481],"mapped",[116,101,108]],[[8482,8482],"mapped",[116,109]],[[8483,8483],"valid",[],"NV8"],[[8484,8484],"mapped",[122]],[[8485,8485],"valid",[],"NV8"],[[8486,8486],"mapped",[969]],[[8487,8487],"valid",[],"NV8"],[[8488,8488],"mapped",[122]],[[8489,8489],"valid",[],"NV8"],[[8490,8490],"mapped",[107]],[[8491,8491],"mapped",[229]],[[8492,8492],"mapped",[98]],[[8493,8493],"mapped",[99]],[[8494,8494],"valid",[],"NV8"],[[8495,8496],"mapped",[101]],[[8497,8497],"mapped",[102]],[[8498,8498],"disallowed"],[[8499,8499],"mapped",[109]],[[8500,8500],"mapped",[111]],[[8501,8501],"mapped",[1488]],[[8502,8502],"mapped",[1489]],[[8503,8503],"mapped",[1490]],[[8504,8504],"mapped",[1491]],[[8505,8505],"mapped",[105]],[[8506,8506],"valid",[],"NV8"],[[8507,8507],"mapped",[102,97,120]],[[8508,8508],"mapped",[960]],[[8509,8510],"mapped",[947]],[[8511,8511],"mapped",[960]],[[8512,8512],"mapped",[8721]],[[8513,8516],"valid",[],"NV8"],[[8517,8518],"mapped",[100]],[[8519,8519],"mapped",[101]],[[8520,8520],"mapped",[105]],[[8521,8521],"mapped",[106]],[[8522,8523],"valid",[],"NV8"],[[8524,8524],"valid",[],"NV8"],[[8525,8525],"valid",[],"NV8"],[[8526,8526],"valid"],[[8527,8527],"valid",[],"NV8"],[[8528,8528],"mapped",[49,8260,55]],[[8529,8529],"mapped",[49,8260,57]],[[8530,8530],"mapped",[49,8260,49,48]],[[8531,8531],"mapped",[49,8260,51]],[[8532,8532],"mapped",[50,8260,51]],[[8533,8533],"mapped",[49,8260,53]],[[8534,8534],"mapped",[50,8260,53]],[[8535,8535],"mapped",[51,8260,53]],[[8536,8536],"mapped",[52,8260,53]],[[8537,8537],"mapped",[49,8260,54]],[[8538,8538],"mapped",[53,8260,54]],[[8539,8539],"mapped",[49,8260,56]],[[8540,8540],"mapped",[51,8260,56]],[[8541,8541],"mapped",[53,8260,56]],[[8542,8542],"mapped",[55,8260,56]],[[8543,8543],"mapped",[49,8260]],[[8544,8544],"mapped",[105]],[[8545,8545],"mapped",[105,105]],[[8546,8546],"mapped",[105,105,105]],[[8547,8547],"mapped",[105,118]],[[8548,8548],"mapped",[118]],[[8549,8549],"mapped",[118,105]],[[8550,8550],"mapped",[118,105,105]],[[8551,8551],"mapped",[118,105,105,105]],[[8552,8552],"mapped",[105,120]],[[8553,8553],"mapped",[120]],[[8554,8554],"mapped",[120,105]],[[8555,8555],"mapped",[120,105,105]],[[8556,8556],"mapped",[108]],[[8557,8557],"mapped",[99]],[[8558,8558],"mapped",[100]],[[8559,8559],"mapped",[109]],[[8560,8560],"mapped",[105]],[[8561,8561],"mapped",[105,105]],[[8562,8562],"mapped",[105,105,105]],[[8563,8563],"mapped",[105,118]],[[8564,8564],"mapped",[118]],[[8565,8565],"mapped",[118,105]],[[8566,8566],"mapped",[118,105,105]],[[8567,8567],"mapped",[118,105,105,105]],[[8568,8568],"mapped",[105,120]],[[8569,8569],"mapped",[120]],[[8570,8570],"mapped",[120,105]],[[8571,8571],"mapped",[120,105,105]],[[8572,8572],"mapped",[108]],[[8573,8573],"mapped",[99]],[[8574,8574],"mapped",[100]],[[8575,8575],"mapped",[109]],[[8576,8578],"valid",[],"NV8"],[[8579,8579],"disallowed"],[[8580,8580],"valid"],[[8581,8584],"valid",[],"NV8"],[[8585,8585],"mapped",[48,8260,51]],[[8586,8587],"valid",[],"NV8"],[[8588,8591],"disallowed"],[[8592,8682],"valid",[],"NV8"],[[8683,8691],"valid",[],"NV8"],[[8692,8703],"valid",[],"NV8"],[[8704,8747],"valid",[],"NV8"],[[8748,8748],"mapped",[8747,8747]],[[8749,8749],"mapped",[8747,8747,8747]],[[8750,8750],"valid",[],"NV8"],[[8751,8751],"mapped",[8750,8750]],[[8752,8752],"mapped",[8750,8750,8750]],[[8753,8799],"valid",[],"NV8"],[[8800,8800],"disallowed_STD3_valid"],[[8801,8813],"valid",[],"NV8"],[[8814,8815],"disallowed_STD3_valid"],[[8816,8945],"valid",[],"NV8"],[[8946,8959],"valid",[],"NV8"],[[8960,8960],"valid",[],"NV8"],[[8961,8961],"valid",[],"NV8"],[[8962,9000],"valid",[],"NV8"],[[9001,9001],"mapped",[12296]],[[9002,9002],"mapped",[12297]],[[9003,9082],"valid",[],"NV8"],[[9083,9083],"valid",[],"NV8"],[[9084,9084],"valid",[],"NV8"],[[9085,9114],"valid",[],"NV8"],[[9115,9166],"valid",[],"NV8"],[[9167,9168],"valid",[],"NV8"],[[9169,9179],"valid",[],"NV8"],[[9180,9191],"valid",[],"NV8"],[[9192,9192],"valid",[],"NV8"],[[9193,9203],"valid",[],"NV8"],[[9204,9210],"valid",[],"NV8"],[[9211,9215],"disallowed"],[[9216,9252],"valid",[],"NV8"],[[9253,9254],"valid",[],"NV8"],[[9255,9279],"disallowed"],[[9280,9290],"valid",[],"NV8"],[[9291,9311],"disallowed"],[[9312,9312],"mapped",[49]],[[9313,9313],"mapped",[50]],[[9314,9314],"mapped",[51]],[[9315,9315],"mapped",[52]],[[9316,9316],"mapped",[53]],[[9317,9317],"mapped",[54]],[[9318,9318],"mapped",[55]],[[9319,9319],"mapped",[56]],[[9320,9320],"mapped",[57]],[[9321,9321],"mapped",[49,48]],[[9322,9322],"mapped",[49,49]],[[9323,9323],"mapped",[49,50]],[[9324,9324],"mapped",[49,51]],[[9325,9325],"mapped",[49,52]],[[9326,9326],"mapped",[49,53]],[[9327,9327],"mapped",[49,54]],[[9328,9328],"mapped",[49,55]],[[9329,9329],"mapped",[49,56]],[[9330,9330],"mapped",[49,57]],[[9331,9331],"mapped",[50,48]],[[9332,9332],"disallowed_STD3_mapped",[40,49,41]],[[9333,9333],"disallowed_STD3_mapped",[40,50,41]],[[9334,9334],"disallowed_STD3_mapped",[40,51,41]],[[9335,9335],"disallowed_STD3_mapped",[40,52,41]],[[9336,9336],"disallowed_STD3_mapped",[40,53,41]],[[9337,9337],"disallowed_STD3_mapped",[40,54,41]],[[9338,9338],"disallowed_STD3_mapped",[40,55,41]],[[9339,9339],"disallowed_STD3_mapped",[40,56,41]],[[9340,9340],"disallowed_STD3_mapped",[40,57,41]],[[9341,9341],"disallowed_STD3_mapped",[40,49,48,41]],[[9342,9342],"disallowed_STD3_mapped",[40,49,49,41]],[[9343,9343],"disallowed_STD3_mapped",[40,49,50,41]],[[9344,9344],"disallowed_STD3_mapped",[40,49,51,41]],[[9345,9345],"disallowed_STD3_mapped",[40,49,52,41]],[[9346,9346],"disallowed_STD3_mapped",[40,49,53,41]],[[9347,9347],"disallowed_STD3_mapped",[40,49,54,41]],[[9348,9348],"disallowed_STD3_mapped",[40,49,55,41]],[[9349,9349],"disallowed_STD3_mapped",[40,49,56,41]],[[9350,9350],"disallowed_STD3_mapped",[40,49,57,41]],[[9351,9351],"disallowed_STD3_mapped",[40,50,48,41]],[[9352,9371],"disallowed"],[[9372,9372],"disallowed_STD3_mapped",[40,97,41]],[[9373,9373],"disallowed_STD3_mapped",[40,98,41]],[[9374,9374],"disallowed_STD3_mapped",[40,99,41]],[[9375,9375],"disallowed_STD3_mapped",[40,100,41]],[[9376,9376],"disallowed_STD3_mapped",[40,101,41]],[[9377,9377],"disallowed_STD3_mapped",[40,102,41]],[[9378,9378],"disallowed_STD3_mapped",[40,103,41]],[[9379,9379],"disallowed_STD3_mapped",[40,104,41]],[[9380,9380],"disallowed_STD3_mapped",[40,105,41]],[[9381,9381],"disallowed_STD3_mapped",[40,106,41]],[[9382,9382],"disallowed_STD3_mapped",[40,107,41]],[[9383,9383],"disallowed_STD3_mapped",[40,108,41]],[[9384,9384],"disallowed_STD3_mapped",[40,109,41]],[[9385,9385],"disallowed_STD3_mapped",[40,110,41]],[[9386,9386],"disallowed_STD3_mapped",[40,111,41]],[[9387,9387],"disallowed_STD3_mapped",[40,112,41]],[[9388,9388],"disallowed_STD3_mapped",[40,113,41]],[[9389,9389],"disallowed_STD3_mapped",[40,114,41]],[[9390,9390],"disallowed_STD3_mapped",[40,115,41]],[[9391,9391],"disallowed_STD3_mapped",[40,116,41]],[[9392,9392],"disallowed_STD3_mapped",[40,117,41]],[[9393,9393],"disallowed_STD3_mapped",[40,118,41]],[[9394,9394],"disallowed_STD3_mapped",[40,119,41]],[[9395,9395],"disallowed_STD3_mapped",[40,120,41]],[[9396,9396],"disallowed_STD3_mapped",[40,121,41]],[[9397,9397],"disallowed_STD3_mapped",[40,122,41]],[[9398,9398],"mapped",[97]],[[9399,9399],"mapped",[98]],[[9400,9400],"mapped",[99]],[[9401,9401],"mapped",[100]],[[9402,9402],"mapped",[101]],[[9403,9403],"mapped",[102]],[[9404,9404],"mapped",[103]],[[9405,9405],"mapped",[104]],[[9406,9406],"mapped",[105]],[[9407,9407],"mapped",[106]],[[9408,9408],"mapped",[107]],[[9409,9409],"mapped",[108]],[[9410,9410],"mapped",[109]],[[9411,9411],"mapped",[110]],[[9412,9412],"mapped",[111]],[[9413,9413],"mapped",[112]],[[9414,9414],"mapped",[113]],[[9415,9415],"mapped",[114]],[[9416,9416],"mapped",[115]],[[9417,9417],"mapped",[116]],[[9418,9418],"mapped",[117]],[[9419,9419],"mapped",[118]],[[9420,9420],"mapped",[119]],[[9421,9421],"mapped",[120]],[[9422,9422],"mapped",[121]],[[9423,9423],"mapped",[122]],[[9424,9424],"mapped",[97]],[[9425,9425],"mapped",[98]],[[9426,9426],"mapped",[99]],[[9427,9427],"mapped",[100]],[[9428,9428],"mapped",[101]],[[9429,9429],"mapped",[102]],[[9430,9430],"mapped",[103]],[[9431,9431],"mapped",[104]],[[9432,9432],"mapped",[105]],[[9433,9433],"mapped",[106]],[[9434,9434],"mapped",[107]],[[9435,9435],"mapped",[108]],[[9436,9436],"mapped",[109]],[[9437,9437],"mapped",[110]],[[9438,9438],"mapped",[111]],[[9439,9439],"mapped",[112]],[[9440,9440],"mapped",[113]],[[9441,9441],"mapped",[114]],[[9442,9442],"mapped",[115]],[[9443,9443],"mapped",[116]],[[9444,9444],"mapped",[117]],[[9445,9445],"mapped",[118]],[[9446,9446],"mapped",[119]],[[9447,9447],"mapped",[120]],[[9448,9448],"mapped",[121]],[[9449,9449],"mapped",[122]],[[9450,9450],"mapped",[48]],[[9451,9470],"valid",[],"NV8"],[[9471,9471],"valid",[],"NV8"],[[9472,9621],"valid",[],"NV8"],[[9622,9631],"valid",[],"NV8"],[[9632,9711],"valid",[],"NV8"],[[9712,9719],"valid",[],"NV8"],[[9720,9727],"valid",[],"NV8"],[[9728,9747],"valid",[],"NV8"],[[9748,9749],"valid",[],"NV8"],[[9750,9751],"valid",[],"NV8"],[[9752,9752],"valid",[],"NV8"],[[9753,9753],"valid",[],"NV8"],[[9754,9839],"valid",[],"NV8"],[[9840,9841],"valid",[],"NV8"],[[9842,9853],"valid",[],"NV8"],[[9854,9855],"valid",[],"NV8"],[[9856,9865],"valid",[],"NV8"],[[9866,9873],"valid",[],"NV8"],[[9874,9884],"valid",[],"NV8"],[[9885,9885],"valid",[],"NV8"],[[9886,9887],"valid",[],"NV8"],[[9888,9889],"valid",[],"NV8"],[[9890,9905],"valid",[],"NV8"],[[9906,9906],"valid",[],"NV8"],[[9907,9916],"valid",[],"NV8"],[[9917,9919],"valid",[],"NV8"],[[9920,9923],"valid",[],"NV8"],[[9924,9933],"valid",[],"NV8"],[[9934,9934],"valid",[],"NV8"],[[9935,9953],"valid",[],"NV8"],[[9954,9954],"valid",[],"NV8"],[[9955,9955],"valid",[],"NV8"],[[9956,9959],"valid",[],"NV8"],[[9960,9983],"valid",[],"NV8"],[[9984,9984],"valid",[],"NV8"],[[9985,9988],"valid",[],"NV8"],[[9989,9989],"valid",[],"NV8"],[[9990,9993],"valid",[],"NV8"],[[9994,9995],"valid",[],"NV8"],[[9996,10023],"valid",[],"NV8"],[[10024,10024],"valid",[],"NV8"],[[10025,10059],"valid",[],"NV8"],[[10060,10060],"valid",[],"NV8"],[[10061,10061],"valid",[],"NV8"],[[10062,10062],"valid",[],"NV8"],[[10063,10066],"valid",[],"NV8"],[[10067,10069],"valid",[],"NV8"],[[10070,10070],"valid",[],"NV8"],[[10071,10071],"valid",[],"NV8"],[[10072,10078],"valid",[],"NV8"],[[10079,10080],"valid",[],"NV8"],[[10081,10087],"valid",[],"NV8"],[[10088,10101],"valid",[],"NV8"],[[10102,10132],"valid",[],"NV8"],[[10133,10135],"valid",[],"NV8"],[[10136,10159],"valid",[],"NV8"],[[10160,10160],"valid",[],"NV8"],[[10161,10174],"valid",[],"NV8"],[[10175,10175],"valid",[],"NV8"],[[10176,10182],"valid",[],"NV8"],[[10183,10186],"valid",[],"NV8"],[[10187,10187],"valid",[],"NV8"],[[10188,10188],"valid",[],"NV8"],[[10189,10189],"valid",[],"NV8"],[[10190,10191],"valid",[],"NV8"],[[10192,10219],"valid",[],"NV8"],[[10220,10223],"valid",[],"NV8"],[[10224,10239],"valid",[],"NV8"],[[10240,10495],"valid",[],"NV8"],[[10496,10763],"valid",[],"NV8"],[[10764,10764],"mapped",[8747,8747,8747,8747]],[[10765,10867],"valid",[],"NV8"],[[10868,10868],"disallowed_STD3_mapped",[58,58,61]],[[10869,10869],"disallowed_STD3_mapped",[61,61]],[[10870,10870],"disallowed_STD3_mapped",[61,61,61]],[[10871,10971],"valid",[],"NV8"],[[10972,10972],"mapped",[10973,824]],[[10973,11007],"valid",[],"NV8"],[[11008,11021],"valid",[],"NV8"],[[11022,11027],"valid",[],"NV8"],[[11028,11034],"valid",[],"NV8"],[[11035,11039],"valid",[],"NV8"],[[11040,11043],"valid",[],"NV8"],[[11044,11084],"valid",[],"NV8"],[[11085,11087],"valid",[],"NV8"],[[11088,11092],"valid",[],"NV8"],[[11093,11097],"valid",[],"NV8"],[[11098,11123],"valid",[],"NV8"],[[11124,11125],"disallowed"],[[11126,11157],"valid",[],"NV8"],[[11158,11159],"disallowed"],[[11160,11193],"valid",[],"NV8"],[[11194,11196],"disallowed"],[[11197,11208],"valid",[],"NV8"],[[11209,11209],"disallowed"],[[11210,11217],"valid",[],"NV8"],[[11218,11243],"disallowed"],[[11244,11247],"valid",[],"NV8"],[[11248,11263],"disallowed"],[[11264,11264],"mapped",[11312]],[[11265,11265],"mapped",[11313]],[[11266,11266],"mapped",[11314]],[[11267,11267],"mapped",[11315]],[[11268,11268],"mapped",[11316]],[[11269,11269],"mapped",[11317]],[[11270,11270],"mapped",[11318]],[[11271,11271],"mapped",[11319]],[[11272,11272],"mapped",[11320]],[[11273,11273],"mapped",[11321]],[[11274,11274],"mapped",[11322]],[[11275,11275],"mapped",[11323]],[[11276,11276],"mapped",[11324]],[[11277,11277],"mapped",[11325]],[[11278,11278],"mapped",[11326]],[[11279,11279],"mapped",[11327]],[[11280,11280],"mapped",[11328]],[[11281,11281],"mapped",[11329]],[[11282,11282],"mapped",[11330]],[[11283,11283],"mapped",[11331]],[[11284,11284],"mapped",[11332]],[[11285,11285],"mapped",[11333]],[[11286,11286],"mapped",[11334]],[[11287,11287],"mapped",[11335]],[[11288,11288],"mapped",[11336]],[[11289,11289],"mapped",[11337]],[[11290,11290],"mapped",[11338]],[[11291,11291],"mapped",[11339]],[[11292,11292],"mapped",[11340]],[[11293,11293],"mapped",[11341]],[[11294,11294],"mapped",[11342]],[[11295,11295],"mapped",[11343]],[[11296,11296],"mapped",[11344]],[[11297,11297],"mapped",[11345]],[[11298,11298],"mapped",[11346]],[[11299,11299],"mapped",[11347]],[[11300,11300],"mapped",[11348]],[[11301,11301],"mapped",[11349]],[[11302,11302],"mapped",[11350]],[[11303,11303],"mapped",[11351]],[[11304,11304],"mapped",[11352]],[[11305,11305],"mapped",[11353]],[[11306,11306],"mapped",[11354]],[[11307,11307],"mapped",[11355]],[[11308,11308],"mapped",[11356]],[[11309,11309],"mapped",[11357]],[[11310,11310],"mapped",[11358]],[[11311,11311],"disallowed"],[[11312,11358],"valid"],[[11359,11359],"disallowed"],[[11360,11360],"mapped",[11361]],[[11361,11361],"valid"],[[11362,11362],"mapped",[619]],[[11363,11363],"mapped",[7549]],[[11364,11364],"mapped",[637]],[[11365,11366],"valid"],[[11367,11367],"mapped",[11368]],[[11368,11368],"valid"],[[11369,11369],"mapped",[11370]],[[11370,11370],"valid"],[[11371,11371],"mapped",[11372]],[[11372,11372],"valid"],[[11373,11373],"mapped",[593]],[[11374,11374],"mapped",[625]],[[11375,11375],"mapped",[592]],[[11376,11376],"mapped",[594]],[[11377,11377],"valid"],[[11378,11378],"mapped",[11379]],[[11379,11379],"valid"],[[11380,11380],"valid"],[[11381,11381],"mapped",[11382]],[[11382,11383],"valid"],[[11384,11387],"valid"],[[11388,11388],"mapped",[106]],[[11389,11389],"mapped",[118]],[[11390,11390],"mapped",[575]],[[11391,11391],"mapped",[576]],[[11392,11392],"mapped",[11393]],[[11393,11393],"valid"],[[11394,11394],"mapped",[11395]],[[11395,11395],"valid"],[[11396,11396],"mapped",[11397]],[[11397,11397],"valid"],[[11398,11398],"mapped",[11399]],[[11399,11399],"valid"],[[11400,11400],"mapped",[11401]],[[11401,11401],"valid"],[[11402,11402],"mapped",[11403]],[[11403,11403],"valid"],[[11404,11404],"mapped",[11405]],[[11405,11405],"valid"],[[11406,11406],"mapped",[11407]],[[11407,11407],"valid"],[[11408,11408],"mapped",[11409]],[[11409,11409],"valid"],[[11410,11410],"mapped",[11411]],[[11411,11411],"valid"],[[11412,11412],"mapped",[11413]],[[11413,11413],"valid"],[[11414,11414],"mapped",[11415]],[[11415,11415],"valid"],[[11416,11416],"mapped",[11417]],[[11417,11417],"valid"],[[11418,11418],"mapped",[11419]],[[11419,11419],"valid"],[[11420,11420],"mapped",[11421]],[[11421,11421],"valid"],[[11422,11422],"mapped",[11423]],[[11423,11423],"valid"],[[11424,11424],"mapped",[11425]],[[11425,11425],"valid"],[[11426,11426],"mapped",[11427]],[[11427,11427],"valid"],[[11428,11428],"mapped",[11429]],[[11429,11429],"valid"],[[11430,11430],"mapped",[11431]],[[11431,11431],"valid"],[[11432,11432],"mapped",[11433]],[[11433,11433],"valid"],[[11434,11434],"mapped",[11435]],[[11435,11435],"valid"],[[11436,11436],"mapped",[11437]],[[11437,11437],"valid"],[[11438,11438],"mapped",[11439]],[[11439,11439],"valid"],[[11440,11440],"mapped",[11441]],[[11441,11441],"valid"],[[11442,11442],"mapped",[11443]],[[11443,11443],"valid"],[[11444,11444],"mapped",[11445]],[[11445,11445],"valid"],[[11446,11446],"mapped",[11447]],[[11447,11447],"valid"],[[11448,11448],"mapped",[11449]],[[11449,11449],"valid"],[[11450,11450],"mapped",[11451]],[[11451,11451],"valid"],[[11452,11452],"mapped",[11453]],[[11453,11453],"valid"],[[11454,11454],"mapped",[11455]],[[11455,11455],"valid"],[[11456,11456],"mapped",[11457]],[[11457,11457],"valid"],[[11458,11458],"mapped",[11459]],[[11459,11459],"valid"],[[11460,11460],"mapped",[11461]],[[11461,11461],"valid"],[[11462,11462],"mapped",[11463]],[[11463,11463],"valid"],[[11464,11464],"mapped",[11465]],[[11465,11465],"valid"],[[11466,11466],"mapped",[11467]],[[11467,11467],"valid"],[[11468,11468],"mapped",[11469]],[[11469,11469],"valid"],[[11470,11470],"mapped",[11471]],[[11471,11471],"valid"],[[11472,11472],"mapped",[11473]],[[11473,11473],"valid"],[[11474,11474],"mapped",[11475]],[[11475,11475],"valid"],[[11476,11476],"mapped",[11477]],[[11477,11477],"valid"],[[11478,11478],"mapped",[11479]],[[11479,11479],"valid"],[[11480,11480],"mapped",[11481]],[[11481,11481],"valid"],[[11482,11482],"mapped",[11483]],[[11483,11483],"valid"],[[11484,11484],"mapped",[11485]],[[11485,11485],"valid"],[[11486,11486],"mapped",[11487]],[[11487,11487],"valid"],[[11488,11488],"mapped",[11489]],[[11489,11489],"valid"],[[11490,11490],"mapped",[11491]],[[11491,11492],"valid"],[[11493,11498],"valid",[],"NV8"],[[11499,11499],"mapped",[11500]],[[11500,11500],"valid"],[[11501,11501],"mapped",[11502]],[[11502,11505],"valid"],[[11506,11506],"mapped",[11507]],[[11507,11507],"valid"],[[11508,11512],"disallowed"],[[11513,11519],"valid",[],"NV8"],[[11520,11557],"valid"],[[11558,11558],"disallowed"],[[11559,11559],"valid"],[[11560,11564],"disallowed"],[[11565,11565],"valid"],[[11566,11567],"disallowed"],[[11568,11621],"valid"],[[11622,11623],"valid"],[[11624,11630],"disallowed"],[[11631,11631],"mapped",[11617]],[[11632,11632],"valid",[],"NV8"],[[11633,11646],"disallowed"],[[11647,11647],"valid"],[[11648,11670],"valid"],[[11671,11679],"disallowed"],[[11680,11686],"valid"],[[11687,11687],"disallowed"],[[11688,11694],"valid"],[[11695,11695],"disallowed"],[[11696,11702],"valid"],[[11703,11703],"disallowed"],[[11704,11710],"valid"],[[11711,11711],"disallowed"],[[11712,11718],"valid"],[[11719,11719],"disallowed"],[[11720,11726],"valid"],[[11727,11727],"disallowed"],[[11728,11734],"valid"],[[11735,11735],"disallowed"],[[11736,11742],"valid"],[[11743,11743],"disallowed"],[[11744,11775],"valid"],[[11776,11799],"valid",[],"NV8"],[[11800,11803],"valid",[],"NV8"],[[11804,11805],"valid",[],"NV8"],[[11806,11822],"valid",[],"NV8"],[[11823,11823],"valid"],[[11824,11824],"valid",[],"NV8"],[[11825,11825],"valid",[],"NV8"],[[11826,11835],"valid",[],"NV8"],[[11836,11842],"valid",[],"NV8"],[[11843,11903],"disallowed"],[[11904,11929],"valid",[],"NV8"],[[11930,11930],"disallowed"],[[11931,11934],"valid",[],"NV8"],[[11935,11935],"mapped",[27597]],[[11936,12018],"valid",[],"NV8"],[[12019,12019],"mapped",[40863]],[[12020,12031],"disallowed"],[[12032,12032],"mapped",[19968]],[[12033,12033],"mapped",[20008]],[[12034,12034],"mapped",[20022]],[[12035,12035],"mapped",[20031]],[[12036,12036],"mapped",[20057]],[[12037,12037],"mapped",[20101]],[[12038,12038],"mapped",[20108]],[[12039,12039],"mapped",[20128]],[[12040,12040],"mapped",[20154]],[[12041,12041],"mapped",[20799]],[[12042,12042],"mapped",[20837]],[[12043,12043],"mapped",[20843]],[[12044,12044],"mapped",[20866]],[[12045,12045],"mapped",[20886]],[[12046,12046],"mapped",[20907]],[[12047,12047],"mapped",[20960]],[[12048,12048],"mapped",[20981]],[[12049,12049],"mapped",[20992]],[[12050,12050],"mapped",[21147]],[[12051,12051],"mapped",[21241]],[[12052,12052],"mapped",[21269]],[[12053,12053],"mapped",[21274]],[[12054,12054],"mapped",[21304]],[[12055,12055],"mapped",[21313]],[[12056,12056],"mapped",[21340]],[[12057,12057],"mapped",[21353]],[[12058,12058],"mapped",[21378]],[[12059,12059],"mapped",[21430]],[[12060,12060],"mapped",[21448]],[[12061,12061],"mapped",[21475]],[[12062,12062],"mapped",[22231]],[[12063,12063],"mapped",[22303]],[[12064,12064],"mapped",[22763]],[[12065,12065],"mapped",[22786]],[[12066,12066],"mapped",[22794]],[[12067,12067],"mapped",[22805]],[[12068,12068],"mapped",[22823]],[[12069,12069],"mapped",[22899]],[[12070,12070],"mapped",[23376]],[[12071,12071],"mapped",[23424]],[[12072,12072],"mapped",[23544]],[[12073,12073],"mapped",[23567]],[[12074,12074],"mapped",[23586]],[[12075,12075],"mapped",[23608]],[[12076,12076],"mapped",[23662]],[[12077,12077],"mapped",[23665]],[[12078,12078],"mapped",[24027]],[[12079,12079],"mapped",[24037]],[[12080,12080],"mapped",[24049]],[[12081,12081],"mapped",[24062]],[[12082,12082],"mapped",[24178]],[[12083,12083],"mapped",[24186]],[[12084,12084],"mapped",[24191]],[[12085,12085],"mapped",[24308]],[[12086,12086],"mapped",[24318]],[[12087,12087],"mapped",[24331]],[[12088,12088],"mapped",[24339]],[[12089,12089],"mapped",[24400]],[[12090,12090],"mapped",[24417]],[[12091,12091],"mapped",[24435]],[[12092,12092],"mapped",[24515]],[[12093,12093],"mapped",[25096]],[[12094,12094],"mapped",[25142]],[[12095,12095],"mapped",[25163]],[[12096,12096],"mapped",[25903]],[[12097,12097],"mapped",[25908]],[[12098,12098],"mapped",[25991]],[[12099,12099],"mapped",[26007]],[[12100,12100],"mapped",[26020]],[[12101,12101],"mapped",[26041]],[[12102,12102],"mapped",[26080]],[[12103,12103],"mapped",[26085]],[[12104,12104],"mapped",[26352]],[[12105,12105],"mapped",[26376]],[[12106,12106],"mapped",[26408]],[[12107,12107],"mapped",[27424]],[[12108,12108],"mapped",[27490]],[[12109,12109],"mapped",[27513]],[[12110,12110],"mapped",[27571]],[[12111,12111],"mapped",[27595]],[[12112,12112],"mapped",[27604]],[[12113,12113],"mapped",[27611]],[[12114,12114],"mapped",[27663]],[[12115,12115],"mapped",[27668]],[[12116,12116],"mapped",[27700]],[[12117,12117],"mapped",[28779]],[[12118,12118],"mapped",[29226]],[[12119,12119],"mapped",[29238]],[[12120,12120],"mapped",[29243]],[[12121,12121],"mapped",[29247]],[[12122,12122],"mapped",[29255]],[[12123,12123],"mapped",[29273]],[[12124,12124],"mapped",[29275]],[[12125,12125],"mapped",[29356]],[[12126,12126],"mapped",[29572]],[[12127,12127],"mapped",[29577]],[[12128,12128],"mapped",[29916]],[[12129,12129],"mapped",[29926]],[[12130,12130],"mapped",[29976]],[[12131,12131],"mapped",[29983]],[[12132,12132],"mapped",[29992]],[[12133,12133],"mapped",[30000]],[[12134,12134],"mapped",[30091]],[[12135,12135],"mapped",[30098]],[[12136,12136],"mapped",[30326]],[[12137,12137],"mapped",[30333]],[[12138,12138],"mapped",[30382]],[[12139,12139],"mapped",[30399]],[[12140,12140],"mapped",[30446]],[[12141,12141],"mapped",[30683]],[[12142,12142],"mapped",[30690]],[[12143,12143],"mapped",[30707]],[[12144,12144],"mapped",[31034]],[[12145,12145],"mapped",[31160]],[[12146,12146],"mapped",[31166]],[[12147,12147],"mapped",[31348]],[[12148,12148],"mapped",[31435]],[[12149,12149],"mapped",[31481]],[[12150,12150],"mapped",[31859]],[[12151,12151],"mapped",[31992]],[[12152,12152],"mapped",[32566]],[[12153,12153],"mapped",[32593]],[[12154,12154],"mapped",[32650]],[[12155,12155],"mapped",[32701]],[[12156,12156],"mapped",[32769]],[[12157,12157],"mapped",[32780]],[[12158,12158],"mapped",[32786]],[[12159,12159],"mapped",[32819]],[[12160,12160],"mapped",[32895]],[[12161,12161],"mapped",[32905]],[[12162,12162],"mapped",[33251]],[[12163,12163],"mapped",[33258]],[[12164,12164],"mapped",[33267]],[[12165,12165],"mapped",[33276]],[[12166,12166],"mapped",[33292]],[[12167,12167],"mapped",[33307]],[[12168,12168],"mapped",[33311]],[[12169,12169],"mapped",[33390]],[[12170,12170],"mapped",[33394]],[[12171,12171],"mapped",[33400]],[[12172,12172],"mapped",[34381]],[[12173,12173],"mapped",[34411]],[[12174,12174],"mapped",[34880]],[[12175,12175],"mapped",[34892]],[[12176,12176],"mapped",[34915]],[[12177,12177],"mapped",[35198]],[[12178,12178],"mapped",[35211]],[[12179,12179],"mapped",[35282]],[[12180,12180],"mapped",[35328]],[[12181,12181],"mapped",[35895]],[[12182,12182],"mapped",[35910]],[[12183,12183],"mapped",[35925]],[[12184,12184],"mapped",[35960]],[[12185,12185],"mapped",[35997]],[[12186,12186],"mapped",[36196]],[[12187,12187],"mapped",[36208]],[[12188,12188],"mapped",[36275]],[[12189,12189],"mapped",[36523]],[[12190,12190],"mapped",[36554]],[[12191,12191],"mapped",[36763]],[[12192,12192],"mapped",[36784]],[[12193,12193],"mapped",[36789]],[[12194,12194],"mapped",[37009]],[[12195,12195],"mapped",[37193]],[[12196,12196],"mapped",[37318]],[[12197,12197],"mapped",[37324]],[[12198,12198],"mapped",[37329]],[[12199,12199],"mapped",[38263]],[[12200,12200],"mapped",[38272]],[[12201,12201],"mapped",[38428]],[[12202,12202],"mapped",[38582]],[[12203,12203],"mapped",[38585]],[[12204,12204],"mapped",[38632]],[[12205,12205],"mapped",[38737]],[[12206,12206],"mapped",[38750]],[[12207,12207],"mapped",[38754]],[[12208,12208],"mapped",[38761]],[[12209,12209],"mapped",[38859]],[[12210,12210],"mapped",[38893]],[[12211,12211],"mapped",[38899]],[[12212,12212],"mapped",[38913]],[[12213,12213],"mapped",[39080]],[[12214,12214],"mapped",[39131]],[[12215,12215],"mapped",[39135]],[[12216,12216],"mapped",[39318]],[[12217,12217],"mapped",[39321]],[[12218,12218],"mapped",[39340]],[[12219,12219],"mapped",[39592]],[[12220,12220],"mapped",[39640]],[[12221,12221],"mapped",[39647]],[[12222,12222],"mapped",[39717]],[[12223,12223],"mapped",[39727]],[[12224,12224],"mapped",[39730]],[[12225,12225],"mapped",[39740]],[[12226,12226],"mapped",[39770]],[[12227,12227],"mapped",[40165]],[[12228,12228],"mapped",[40565]],[[12229,12229],"mapped",[40575]],[[12230,12230],"mapped",[40613]],[[12231,12231],"mapped",[40635]],[[12232,12232],"mapped",[40643]],[[12233,12233],"mapped",[40653]],[[12234,12234],"mapped",[40657]],[[12235,12235],"mapped",[40697]],[[12236,12236],"mapped",[40701]],[[12237,12237],"mapped",[40718]],[[12238,12238],"mapped",[40723]],[[12239,12239],"mapped",[40736]],[[12240,12240],"mapped",[40763]],[[12241,12241],"mapped",[40778]],[[12242,12242],"mapped",[40786]],[[12243,12243],"mapped",[40845]],[[12244,12244],"mapped",[40860]],[[12245,12245],"mapped",[40864]],[[12246,12271],"disallowed"],[[12272,12283],"disallowed"],[[12284,12287],"disallowed"],[[12288,12288],"disallowed_STD3_mapped",[32]],[[12289,12289],"valid",[],"NV8"],[[12290,12290],"mapped",[46]],[[12291,12292],"valid",[],"NV8"],[[12293,12295],"valid"],[[12296,12329],"valid",[],"NV8"],[[12330,12333],"valid"],[[12334,12341],"valid",[],"NV8"],[[12342,12342],"mapped",[12306]],[[12343,12343],"valid",[],"NV8"],[[12344,12344],"mapped",[21313]],[[12345,12345],"mapped",[21316]],[[12346,12346],"mapped",[21317]],[[12347,12347],"valid",[],"NV8"],[[12348,12348],"valid"],[[12349,12349],"valid",[],"NV8"],[[12350,12350],"valid",[],"NV8"],[[12351,12351],"valid",[],"NV8"],[[12352,12352],"disallowed"],[[12353,12436],"valid"],[[12437,12438],"valid"],[[12439,12440],"disallowed"],[[12441,12442],"valid"],[[12443,12443],"disallowed_STD3_mapped",[32,12441]],[[12444,12444],"disallowed_STD3_mapped",[32,12442]],[[12445,12446],"valid"],[[12447,12447],"mapped",[12424,12426]],[[12448,12448],"valid",[],"NV8"],[[12449,12542],"valid"],[[12543,12543],"mapped",[12467,12488]],[[12544,12548],"disallowed"],[[12549,12588],"valid"],[[12589,12589],"valid"],[[12590,12592],"disallowed"],[[12593,12593],"mapped",[4352]],[[12594,12594],"mapped",[4353]],[[12595,12595],"mapped",[4522]],[[12596,12596],"mapped",[4354]],[[12597,12597],"mapped",[4524]],[[12598,12598],"mapped",[4525]],[[12599,12599],"mapped",[4355]],[[12600,12600],"mapped",[4356]],[[12601,12601],"mapped",[4357]],[[12602,12602],"mapped",[4528]],[[12603,12603],"mapped",[4529]],[[12604,12604],"mapped",[4530]],[[12605,12605],"mapped",[4531]],[[12606,12606],"mapped",[4532]],[[12607,12607],"mapped",[4533]],[[12608,12608],"mapped",[4378]],[[12609,12609],"mapped",[4358]],[[12610,12610],"mapped",[4359]],[[12611,12611],"mapped",[4360]],[[12612,12612],"mapped",[4385]],[[12613,12613],"mapped",[4361]],[[12614,12614],"mapped",[4362]],[[12615,12615],"mapped",[4363]],[[12616,12616],"mapped",[4364]],[[12617,12617],"mapped",[4365]],[[12618,12618],"mapped",[4366]],[[12619,12619],"mapped",[4367]],[[12620,12620],"mapped",[4368]],[[12621,12621],"mapped",[4369]],[[12622,12622],"mapped",[4370]],[[12623,12623],"mapped",[4449]],[[12624,12624],"mapped",[4450]],[[12625,12625],"mapped",[4451]],[[12626,12626],"mapped",[4452]],[[12627,12627],"mapped",[4453]],[[12628,12628],"mapped",[4454]],[[12629,12629],"mapped",[4455]],[[12630,12630],"mapped",[4456]],[[12631,12631],"mapped",[4457]],[[12632,12632],"mapped",[4458]],[[12633,12633],"mapped",[4459]],[[12634,12634],"mapped",[4460]],[[12635,12635],"mapped",[4461]],[[12636,12636],"mapped",[4462]],[[12637,12637],"mapped",[4463]],[[12638,12638],"mapped",[4464]],[[12639,12639],"mapped",[4465]],[[12640,12640],"mapped",[4466]],[[12641,12641],"mapped",[4467]],[[12642,12642],"mapped",[4468]],[[12643,12643],"mapped",[4469]],[[12644,12644],"disallowed"],[[12645,12645],"mapped",[4372]],[[12646,12646],"mapped",[4373]],[[12647,12647],"mapped",[4551]],[[12648,12648],"mapped",[4552]],[[12649,12649],"mapped",[4556]],[[12650,12650],"mapped",[4558]],[[12651,12651],"mapped",[4563]],[[12652,12652],"mapped",[4567]],[[12653,12653],"mapped",[4569]],[[12654,12654],"mapped",[4380]],[[12655,12655],"mapped",[4573]],[[12656,12656],"mapped",[4575]],[[12657,12657],"mapped",[4381]],[[12658,12658],"mapped",[4382]],[[12659,12659],"mapped",[4384]],[[12660,12660],"mapped",[4386]],[[12661,12661],"mapped",[4387]],[[12662,12662],"mapped",[4391]],[[12663,12663],"mapped",[4393]],[[12664,12664],"mapped",[4395]],[[12665,12665],"mapped",[4396]],[[12666,12666],"mapped",[4397]],[[12667,12667],"mapped",[4398]],[[12668,12668],"mapped",[4399]],[[12669,12669],"mapped",[4402]],[[12670,12670],"mapped",[4406]],[[12671,12671],"mapped",[4416]],[[12672,12672],"mapped",[4423]],[[12673,12673],"mapped",[4428]],[[12674,12674],"mapped",[4593]],[[12675,12675],"mapped",[4594]],[[12676,12676],"mapped",[4439]],[[12677,12677],"mapped",[4440]],[[12678,12678],"mapped",[4441]],[[12679,12679],"mapped",[4484]],[[12680,12680],"mapped",[4485]],[[12681,12681],"mapped",[4488]],[[12682,12682],"mapped",[4497]],[[12683,12683],"mapped",[4498]],[[12684,12684],"mapped",[4500]],[[12685,12685],"mapped",[4510]],[[12686,12686],"mapped",[4513]],[[12687,12687],"disallowed"],[[12688,12689],"valid",[],"NV8"],[[12690,12690],"mapped",[19968]],[[12691,12691],"mapped",[20108]],[[12692,12692],"mapped",[19977]],[[12693,12693],"mapped",[22235]],[[12694,12694],"mapped",[19978]],[[12695,12695],"mapped",[20013]],[[12696,12696],"mapped",[19979]],[[12697,12697],"mapped",[30002]],[[12698,12698],"mapped",[20057]],[[12699,12699],"mapped",[19993]],[[12700,12700],"mapped",[19969]],[[12701,12701],"mapped",[22825]],[[12702,12702],"mapped",[22320]],[[12703,12703],"mapped",[20154]],[[12704,12727],"valid"],[[12728,12730],"valid"],[[12731,12735],"disallowed"],[[12736,12751],"valid",[],"NV8"],[[12752,12771],"valid",[],"NV8"],[[12772,12783],"disallowed"],[[12784,12799],"valid"],[[12800,12800],"disallowed_STD3_mapped",[40,4352,41]],[[12801,12801],"disallowed_STD3_mapped",[40,4354,41]],[[12802,12802],"disallowed_STD3_mapped",[40,4355,41]],[[12803,12803],"disallowed_STD3_mapped",[40,4357,41]],[[12804,12804],"disallowed_STD3_mapped",[40,4358,41]],[[12805,12805],"disallowed_STD3_mapped",[40,4359,41]],[[12806,12806],"disallowed_STD3_mapped",[40,4361,41]],[[12807,12807],"disallowed_STD3_mapped",[40,4363,41]],[[12808,12808],"disallowed_STD3_mapped",[40,4364,41]],[[12809,12809],"disallowed_STD3_mapped",[40,4366,41]],[[12810,12810],"disallowed_STD3_mapped",[40,4367,41]],[[12811,12811],"disallowed_STD3_mapped",[40,4368,41]],[[12812,12812],"disallowed_STD3_mapped",[40,4369,41]],[[12813,12813],"disallowed_STD3_mapped",[40,4370,41]],[[12814,12814],"disallowed_STD3_mapped",[40,44032,41]],[[12815,12815],"disallowed_STD3_mapped",[40,45208,41]],[[12816,12816],"disallowed_STD3_mapped",[40,45796,41]],[[12817,12817],"disallowed_STD3_mapped",[40,46972,41]],[[12818,12818],"disallowed_STD3_mapped",[40,47560,41]],[[12819,12819],"disallowed_STD3_mapped",[40,48148,41]],[[12820,12820],"disallowed_STD3_mapped",[40,49324,41]],[[12821,12821],"disallowed_STD3_mapped",[40,50500,41]],[[12822,12822],"disallowed_STD3_mapped",[40,51088,41]],[[12823,12823],"disallowed_STD3_mapped",[40,52264,41]],[[12824,12824],"disallowed_STD3_mapped",[40,52852,41]],[[12825,12825],"disallowed_STD3_mapped",[40,53440,41]],[[12826,12826],"disallowed_STD3_mapped",[40,54028,41]],[[12827,12827],"disallowed_STD3_mapped",[40,54616,41]],[[12828,12828],"disallowed_STD3_mapped",[40,51452,41]],[[12829,12829],"disallowed_STD3_mapped",[40,50724,51204,41]],[[12830,12830],"disallowed_STD3_mapped",[40,50724,54980,41]],[[12831,12831],"disallowed"],[[12832,12832],"disallowed_STD3_mapped",[40,19968,41]],[[12833,12833],"disallowed_STD3_mapped",[40,20108,41]],[[12834,12834],"disallowed_STD3_mapped",[40,19977,41]],[[12835,12835],"disallowed_STD3_mapped",[40,22235,41]],[[12836,12836],"disallowed_STD3_mapped",[40,20116,41]],[[12837,12837],"disallowed_STD3_mapped",[40,20845,41]],[[12838,12838],"disallowed_STD3_mapped",[40,19971,41]],[[12839,12839],"disallowed_STD3_mapped",[40,20843,41]],[[12840,12840],"disallowed_STD3_mapped",[40,20061,41]],[[12841,12841],"disallowed_STD3_mapped",[40,21313,41]],[[12842,12842],"disallowed_STD3_mapped",[40,26376,41]],[[12843,12843],"disallowed_STD3_mapped",[40,28779,41]],[[12844,12844],"disallowed_STD3_mapped",[40,27700,41]],[[12845,12845],"disallowed_STD3_mapped",[40,26408,41]],[[12846,12846],"disallowed_STD3_mapped",[40,37329,41]],[[12847,12847],"disallowed_STD3_mapped",[40,22303,41]],[[12848,12848],"disallowed_STD3_mapped",[40,26085,41]],[[12849,12849],"disallowed_STD3_mapped",[40,26666,41]],[[12850,12850],"disallowed_STD3_mapped",[40,26377,41]],[[12851,12851],"disallowed_STD3_mapped",[40,31038,41]],[[12852,12852],"disallowed_STD3_mapped",[40,21517,41]],[[12853,12853],"disallowed_STD3_mapped",[40,29305,41]],[[12854,12854],"disallowed_STD3_mapped",[40,36001,41]],[[12855,12855],"disallowed_STD3_mapped",[40,31069,41]],[[12856,12856],"disallowed_STD3_mapped",[40,21172,41]],[[12857,12857],"disallowed_STD3_mapped",[40,20195,41]],[[12858,12858],"disallowed_STD3_mapped",[40,21628,41]],[[12859,12859],"disallowed_STD3_mapped",[40,23398,41]],[[12860,12860],"disallowed_STD3_mapped",[40,30435,41]],[[12861,12861],"disallowed_STD3_mapped",[40,20225,41]],[[12862,12862],"disallowed_STD3_mapped",[40,36039,41]],[[12863,12863],"disallowed_STD3_mapped",[40,21332,41]],[[12864,12864],"disallowed_STD3_mapped",[40,31085,41]],[[12865,12865],"disallowed_STD3_mapped",[40,20241,41]],[[12866,12866],"disallowed_STD3_mapped",[40,33258,41]],[[12867,12867],"disallowed_STD3_mapped",[40,33267,41]],[[12868,12868],"mapped",[21839]],[[12869,12869],"mapped",[24188]],[[12870,12870],"mapped",[25991]],[[12871,12871],"mapped",[31631]],[[12872,12879],"valid",[],"NV8"],[[12880,12880],"mapped",[112,116,101]],[[12881,12881],"mapped",[50,49]],[[12882,12882],"mapped",[50,50]],[[12883,12883],"mapped",[50,51]],[[12884,12884],"mapped",[50,52]],[[12885,12885],"mapped",[50,53]],[[12886,12886],"mapped",[50,54]],[[12887,12887],"mapped",[50,55]],[[12888,12888],"mapped",[50,56]],[[12889,12889],"mapped",[50,57]],[[12890,12890],"mapped",[51,48]],[[12891,12891],"mapped",[51,49]],[[12892,12892],"mapped",[51,50]],[[12893,12893],"mapped",[51,51]],[[12894,12894],"mapped",[51,52]],[[12895,12895],"mapped",[51,53]],[[12896,12896],"mapped",[4352]],[[12897,12897],"mapped",[4354]],[[12898,12898],"mapped",[4355]],[[12899,12899],"mapped",[4357]],[[12900,12900],"mapped",[4358]],[[12901,12901],"mapped",[4359]],[[12902,12902],"mapped",[4361]],[[12903,12903],"mapped",[4363]],[[12904,12904],"mapped",[4364]],[[12905,12905],"mapped",[4366]],[[12906,12906],"mapped",[4367]],[[12907,12907],"mapped",[4368]],[[12908,12908],"mapped",[4369]],[[12909,12909],"mapped",[4370]],[[12910,12910],"mapped",[44032]],[[12911,12911],"mapped",[45208]],[[12912,12912],"mapped",[45796]],[[12913,12913],"mapped",[46972]],[[12914,12914],"mapped",[47560]],[[12915,12915],"mapped",[48148]],[[12916,12916],"mapped",[49324]],[[12917,12917],"mapped",[50500]],[[12918,12918],"mapped",[51088]],[[12919,12919],"mapped",[52264]],[[12920,12920],"mapped",[52852]],[[12921,12921],"mapped",[53440]],[[12922,12922],"mapped",[54028]],[[12923,12923],"mapped",[54616]],[[12924,12924],"mapped",[52280,44256]],[[12925,12925],"mapped",[51452,51032]],[[12926,12926],"mapped",[50864]],[[12927,12927],"valid",[],"NV8"],[[12928,12928],"mapped",[19968]],[[12929,12929],"mapped",[20108]],[[12930,12930],"mapped",[19977]],[[12931,12931],"mapped",[22235]],[[12932,12932],"mapped",[20116]],[[12933,12933],"mapped",[20845]],[[12934,12934],"mapped",[19971]],[[12935,12935],"mapped",[20843]],[[12936,12936],"mapped",[20061]],[[12937,12937],"mapped",[21313]],[[12938,12938],"mapped",[26376]],[[12939,12939],"mapped",[28779]],[[12940,12940],"mapped",[27700]],[[12941,12941],"mapped",[26408]],[[12942,12942],"mapped",[37329]],[[12943,12943],"mapped",[22303]],[[12944,12944],"mapped",[26085]],[[12945,12945],"mapped",[26666]],[[12946,12946],"mapped",[26377]],[[12947,12947],"mapped",[31038]],[[12948,12948],"mapped",[21517]],[[12949,12949],"mapped",[29305]],[[12950,12950],"mapped",[36001]],[[12951,12951],"mapped",[31069]],[[12952,12952],"mapped",[21172]],[[12953,12953],"mapped",[31192]],[[12954,12954],"mapped",[30007]],[[12955,12955],"mapped",[22899]],[[12956,12956],"mapped",[36969]],[[12957,12957],"mapped",[20778]],[[12958,12958],"mapped",[21360]],[[12959,12959],"mapped",[27880]],[[12960,12960],"mapped",[38917]],[[12961,12961],"mapped",[20241]],[[12962,12962],"mapped",[20889]],[[12963,12963],"mapped",[27491]],[[12964,12964],"mapped",[19978]],[[12965,12965],"mapped",[20013]],[[12966,12966],"mapped",[19979]],[[12967,12967],"mapped",[24038]],[[12968,12968],"mapped",[21491]],[[12969,12969],"mapped",[21307]],[[12970,12970],"mapped",[23447]],[[12971,12971],"mapped",[23398]],[[12972,12972],"mapped",[30435]],[[12973,12973],"mapped",[20225]],[[12974,12974],"mapped",[36039]],[[12975,12975],"mapped",[21332]],[[12976,12976],"mapped",[22812]],[[12977,12977],"mapped",[51,54]],[[12978,12978],"mapped",[51,55]],[[12979,12979],"mapped",[51,56]],[[12980,12980],"mapped",[51,57]],[[12981,12981],"mapped",[52,48]],[[12982,12982],"mapped",[52,49]],[[12983,12983],"mapped",[52,50]],[[12984,12984],"mapped",[52,51]],[[12985,12985],"mapped",[52,52]],[[12986,12986],"mapped",[52,53]],[[12987,12987],"mapped",[52,54]],[[12988,12988],"mapped",[52,55]],[[12989,12989],"mapped",[52,56]],[[12990,12990],"mapped",[52,57]],[[12991,12991],"mapped",[53,48]],[[12992,12992],"mapped",[49,26376]],[[12993,12993],"mapped",[50,26376]],[[12994,12994],"mapped",[51,26376]],[[12995,12995],"mapped",[52,26376]],[[12996,12996],"mapped",[53,26376]],[[12997,12997],"mapped",[54,26376]],[[12998,12998],"mapped",[55,26376]],[[12999,12999],"mapped",[56,26376]],[[13000,13000],"mapped",[57,26376]],[[13001,13001],"mapped",[49,48,26376]],[[13002,13002],"mapped",[49,49,26376]],[[13003,13003],"mapped",[49,50,26376]],[[13004,13004],"mapped",[104,103]],[[13005,13005],"mapped",[101,114,103]],[[13006,13006],"mapped",[101,118]],[[13007,13007],"mapped",[108,116,100]],[[13008,13008],"mapped",[12450]],[[13009,13009],"mapped",[12452]],[[13010,13010],"mapped",[12454]],[[13011,13011],"mapped",[12456]],[[13012,13012],"mapped",[12458]],[[13013,13013],"mapped",[12459]],[[13014,13014],"mapped",[12461]],[[13015,13015],"mapped",[12463]],[[13016,13016],"mapped",[12465]],[[13017,13017],"mapped",[12467]],[[13018,13018],"mapped",[12469]],[[13019,13019],"mapped",[12471]],[[13020,13020],"mapped",[12473]],[[13021,13021],"mapped",[12475]],[[13022,13022],"mapped",[12477]],[[13023,13023],"mapped",[12479]],[[13024,13024],"mapped",[12481]],[[13025,13025],"mapped",[12484]],[[13026,13026],"mapped",[12486]],[[13027,13027],"mapped",[12488]],[[13028,13028],"mapped",[12490]],[[13029,13029],"mapped",[12491]],[[13030,13030],"mapped",[12492]],[[13031,13031],"mapped",[12493]],[[13032,13032],"mapped",[12494]],[[13033,13033],"mapped",[12495]],[[13034,13034],"mapped",[12498]],[[13035,13035],"mapped",[12501]],[[13036,13036],"mapped",[12504]],[[13037,13037],"mapped",[12507]],[[13038,13038],"mapped",[12510]],[[13039,13039],"mapped",[12511]],[[13040,13040],"mapped",[12512]],[[13041,13041],"mapped",[12513]],[[13042,13042],"mapped",[12514]],[[13043,13043],"mapped",[12516]],[[13044,13044],"mapped",[12518]],[[13045,13045],"mapped",[12520]],[[13046,13046],"mapped",[12521]],[[13047,13047],"mapped",[12522]],[[13048,13048],"mapped",[12523]],[[13049,13049],"mapped",[12524]],[[13050,13050],"mapped",[12525]],[[13051,13051],"mapped",[12527]],[[13052,13052],"mapped",[12528]],[[13053,13053],"mapped",[12529]],[[13054,13054],"mapped",[12530]],[[13055,13055],"disallowed"],[[13056,13056],"mapped",[12450,12497,12540,12488]],[[13057,13057],"mapped",[12450,12523,12501,12449]],[[13058,13058],"mapped",[12450,12531,12506,12450]],[[13059,13059],"mapped",[12450,12540,12523]],[[13060,13060],"mapped",[12452,12491,12531,12464]],[[13061,13061],"mapped",[12452,12531,12481]],[[13062,13062],"mapped",[12454,12457,12531]],[[13063,13063],"mapped",[12456,12473,12463,12540,12489]],[[13064,13064],"mapped",[12456,12540,12459,12540]],[[13065,13065],"mapped",[12458,12531,12473]],[[13066,13066],"mapped",[12458,12540,12512]],[[13067,13067],"mapped",[12459,12452,12522]],[[13068,13068],"mapped",[12459,12521,12483,12488]],[[13069,13069],"mapped",[12459,12525,12522,12540]],[[13070,13070],"mapped",[12460,12525,12531]],[[13071,13071],"mapped",[12460,12531,12510]],[[13072,13072],"mapped",[12462,12460]],[[13073,13073],"mapped",[12462,12491,12540]],[[13074,13074],"mapped",[12461,12517,12522,12540]],[[13075,13075],"mapped",[12462,12523,12480,12540]],[[13076,13076],"mapped",[12461,12525]],[[13077,13077],"mapped",[12461,12525,12464,12521,12512]],[[13078,13078],"mapped",[12461,12525,12513,12540,12488,12523]],[[13079,13079],"mapped",[12461,12525,12527,12483,12488]],[[13080,13080],"mapped",[12464,12521,12512]],[[13081,13081],"mapped",[12464,12521,12512,12488,12531]],[[13082,13082],"mapped",[12463,12523,12476,12452,12525]],[[13083,13083],"mapped",[12463,12525,12540,12493]],[[13084,13084],"mapped",[12465,12540,12473]],[[13085,13085],"mapped",[12467,12523,12490]],[[13086,13086],"mapped",[12467,12540,12509]],[[13087,13087],"mapped",[12469,12452,12463,12523]],[[13088,13088],"mapped",[12469,12531,12481,12540,12512]],[[13089,13089],"mapped",[12471,12522,12531,12464]],[[13090,13090],"mapped",[12475,12531,12481]],[[13091,13091],"mapped",[12475,12531,12488]],[[13092,13092],"mapped",[12480,12540,12473]],[[13093,13093],"mapped",[12487,12471]],[[13094,13094],"mapped",[12489,12523]],[[13095,13095],"mapped",[12488,12531]],[[13096,13096],"mapped",[12490,12494]],[[13097,13097],"mapped",[12494,12483,12488]],[[13098,13098],"mapped",[12495,12452,12484]],[[13099,13099],"mapped",[12497,12540,12475,12531,12488]],[[13100,13100],"mapped",[12497,12540,12484]],[[13101,13101],"mapped",[12496,12540,12524,12523]],[[13102,13102],"mapped",[12500,12450,12473,12488,12523]],[[13103,13103],"mapped",[12500,12463,12523]],[[13104,13104],"mapped",[12500,12467]],[[13105,13105],"mapped",[12499,12523]],[[13106,13106],"mapped",[12501,12449,12521,12483,12489]],[[13107,13107],"mapped",[12501,12451,12540,12488]],[[13108,13108],"mapped",[12502,12483,12471,12455,12523]],[[13109,13109],"mapped",[12501,12521,12531]],[[13110,13110],"mapped",[12504,12463,12479,12540,12523]],[[13111,13111],"mapped",[12506,12477]],[[13112,13112],"mapped",[12506,12491,12498]],[[13113,13113],"mapped",[12504,12523,12484]],[[13114,13114],"mapped",[12506,12531,12473]],[[13115,13115],"mapped",[12506,12540,12472]],[[13116,13116],"mapped",[12505,12540,12479]],[[13117,13117],"mapped",[12509,12452,12531,12488]],[[13118,13118],"mapped",[12508,12523,12488]],[[13119,13119],"mapped",[12507,12531]],[[13120,13120],"mapped",[12509,12531,12489]],[[13121,13121],"mapped",[12507,12540,12523]],[[13122,13122],"mapped",[12507,12540,12531]],[[13123,13123],"mapped",[12510,12452,12463,12525]],[[13124,13124],"mapped",[12510,12452,12523]],[[13125,13125],"mapped",[12510,12483,12495]],[[13126,13126],"mapped",[12510,12523,12463]],[[13127,13127],"mapped",[12510,12531,12471,12519,12531]],[[13128,13128],"mapped",[12511,12463,12525,12531]],[[13129,13129],"mapped",[12511,12522]],[[13130,13130],"mapped",[12511,12522,12496,12540,12523]],[[13131,13131],"mapped",[12513,12460]],[[13132,13132],"mapped",[12513,12460,12488,12531]],[[13133,13133],"mapped",[12513,12540,12488,12523]],[[13134,13134],"mapped",[12516,12540,12489]],[[13135,13135],"mapped",[12516,12540,12523]],[[13136,13136],"mapped",[12518,12450,12531]],[[13137,13137],"mapped",[12522,12483,12488,12523]],[[13138,13138],"mapped",[12522,12521]],[[13139,13139],"mapped",[12523,12500,12540]],[[13140,13140],"mapped",[12523,12540,12502,12523]],[[13141,13141],"mapped",[12524,12512]],[[13142,13142],"mapped",[12524,12531,12488,12466,12531]],[[13143,13143],"mapped",[12527,12483,12488]],[[13144,13144],"mapped",[48,28857]],[[13145,13145],"mapped",[49,28857]],[[13146,13146],"mapped",[50,28857]],[[13147,13147],"mapped",[51,28857]],[[13148,13148],"mapped",[52,28857]],[[13149,13149],"mapped",[53,28857]],[[13150,13150],"mapped",[54,28857]],[[13151,13151],"mapped",[55,28857]],[[13152,13152],"mapped",[56,28857]],[[13153,13153],"mapped",[57,28857]],[[13154,13154],"mapped",[49,48,28857]],[[13155,13155],"mapped",[49,49,28857]],[[13156,13156],"mapped",[49,50,28857]],[[13157,13157],"mapped",[49,51,28857]],[[13158,13158],"mapped",[49,52,28857]],[[13159,13159],"mapped",[49,53,28857]],[[13160,13160],"mapped",[49,54,28857]],[[13161,13161],"mapped",[49,55,28857]],[[13162,13162],"mapped",[49,56,28857]],[[13163,13163],"mapped",[49,57,28857]],[[13164,13164],"mapped",[50,48,28857]],[[13165,13165],"mapped",[50,49,28857]],[[13166,13166],"mapped",[50,50,28857]],[[13167,13167],"mapped",[50,51,28857]],[[13168,13168],"mapped",[50,52,28857]],[[13169,13169],"mapped",[104,112,97]],[[13170,13170],"mapped",[100,97]],[[13171,13171],"mapped",[97,117]],[[13172,13172],"mapped",[98,97,114]],[[13173,13173],"mapped",[111,118]],[[13174,13174],"mapped",[112,99]],[[13175,13175],"mapped",[100,109]],[[13176,13176],"mapped",[100,109,50]],[[13177,13177],"mapped",[100,109,51]],[[13178,13178],"mapped",[105,117]],[[13179,13179],"mapped",[24179,25104]],[[13180,13180],"mapped",[26157,21644]],[[13181,13181],"mapped",[22823,27491]],[[13182,13182],"mapped",[26126,27835]],[[13183,13183],"mapped",[26666,24335,20250,31038]],[[13184,13184],"mapped",[112,97]],[[13185,13185],"mapped",[110,97]],[[13186,13186],"mapped",[956,97]],[[13187,13187],"mapped",[109,97]],[[13188,13188],"mapped",[107,97]],[[13189,13189],"mapped",[107,98]],[[13190,13190],"mapped",[109,98]],[[13191,13191],"mapped",[103,98]],[[13192,13192],"mapped",[99,97,108]],[[13193,13193],"mapped",[107,99,97,108]],[[13194,13194],"mapped",[112,102]],[[13195,13195],"mapped",[110,102]],[[13196,13196],"mapped",[956,102]],[[13197,13197],"mapped",[956,103]],[[13198,13198],"mapped",[109,103]],[[13199,13199],"mapped",[107,103]],[[13200,13200],"mapped",[104,122]],[[13201,13201],"mapped",[107,104,122]],[[13202,13202],"mapped",[109,104,122]],[[13203,13203],"mapped",[103,104,122]],[[13204,13204],"mapped",[116,104,122]],[[13205,13205],"mapped",[956,108]],[[13206,13206],"mapped",[109,108]],[[13207,13207],"mapped",[100,108]],[[13208,13208],"mapped",[107,108]],[[13209,13209],"mapped",[102,109]],[[13210,13210],"mapped",[110,109]],[[13211,13211],"mapped",[956,109]],[[13212,13212],"mapped",[109,109]],[[13213,13213],"mapped",[99,109]],[[13214,13214],"mapped",[107,109]],[[13215,13215],"mapped",[109,109,50]],[[13216,13216],"mapped",[99,109,50]],[[13217,13217],"mapped",[109,50]],[[13218,13218],"mapped",[107,109,50]],[[13219,13219],"mapped",[109,109,51]],[[13220,13220],"mapped",[99,109,51]],[[13221,13221],"mapped",[109,51]],[[13222,13222],"mapped",[107,109,51]],[[13223,13223],"mapped",[109,8725,115]],[[13224,13224],"mapped",[109,8725,115,50]],[[13225,13225],"mapped",[112,97]],[[13226,13226],"mapped",[107,112,97]],[[13227,13227],"mapped",[109,112,97]],[[13228,13228],"mapped",[103,112,97]],[[13229,13229],"mapped",[114,97,100]],[[13230,13230],"mapped",[114,97,100,8725,115]],[[13231,13231],"mapped",[114,97,100,8725,115,50]],[[13232,13232],"mapped",[112,115]],[[13233,13233],"mapped",[110,115]],[[13234,13234],"mapped",[956,115]],[[13235,13235],"mapped",[109,115]],[[13236,13236],"mapped",[112,118]],[[13237,13237],"mapped",[110,118]],[[13238,13238],"mapped",[956,118]],[[13239,13239],"mapped",[109,118]],[[13240,13240],"mapped",[107,118]],[[13241,13241],"mapped",[109,118]],[[13242,13242],"mapped",[112,119]],[[13243,13243],"mapped",[110,119]],[[13244,13244],"mapped",[956,119]],[[13245,13245],"mapped",[109,119]],[[13246,13246],"mapped",[107,119]],[[13247,13247],"mapped",[109,119]],[[13248,13248],"mapped",[107,969]],[[13249,13249],"mapped",[109,969]],[[13250,13250],"disallowed"],[[13251,13251],"mapped",[98,113]],[[13252,13252],"mapped",[99,99]],[[13253,13253],"mapped",[99,100]],[[13254,13254],"mapped",[99,8725,107,103]],[[13255,13255],"disallowed"],[[13256,13256],"mapped",[100,98]],[[13257,13257],"mapped",[103,121]],[[13258,13258],"mapped",[104,97]],[[13259,13259],"mapped",[104,112]],[[13260,13260],"mapped",[105,110]],[[13261,13261],"mapped",[107,107]],[[13262,13262],"mapped",[107,109]],[[13263,13263],"mapped",[107,116]],[[13264,13264],"mapped",[108,109]],[[13265,13265],"mapped",[108,110]],[[13266,13266],"mapped",[108,111,103]],[[13267,13267],"mapped",[108,120]],[[13268,13268],"mapped",[109,98]],[[13269,13269],"mapped",[109,105,108]],[[13270,13270],"mapped",[109,111,108]],[[13271,13271],"mapped",[112,104]],[[13272,13272],"disallowed"],[[13273,13273],"mapped",[112,112,109]],[[13274,13274],"mapped",[112,114]],[[13275,13275],"mapped",[115,114]],[[13276,13276],"mapped",[115,118]],[[13277,13277],"mapped",[119,98]],[[13278,13278],"mapped",[118,8725,109]],[[13279,13279],"mapped",[97,8725,109]],[[13280,13280],"mapped",[49,26085]],[[13281,13281],"mapped",[50,26085]],[[13282,13282],"mapped",[51,26085]],[[13283,13283],"mapped",[52,26085]],[[13284,13284],"mapped",[53,26085]],[[13285,13285],"mapped",[54,26085]],[[13286,13286],"mapped",[55,26085]],[[13287,13287],"mapped",[56,26085]],[[13288,13288],"mapped",[57,26085]],[[13289,13289],"mapped",[49,48,26085]],[[13290,13290],"mapped",[49,49,26085]],[[13291,13291],"mapped",[49,50,26085]],[[13292,13292],"mapped",[49,51,26085]],[[13293,13293],"mapped",[49,52,26085]],[[13294,13294],"mapped",[49,53,26085]],[[13295,13295],"mapped",[49,54,26085]],[[13296,13296],"mapped",[49,55,26085]],[[13297,13297],"mapped",[49,56,26085]],[[13298,13298],"mapped",[49,57,26085]],[[13299,13299],"mapped",[50,48,26085]],[[13300,13300],"mapped",[50,49,26085]],[[13301,13301],"mapped",[50,50,26085]],[[13302,13302],"mapped",[50,51,26085]],[[13303,13303],"mapped",[50,52,26085]],[[13304,13304],"mapped",[50,53,26085]],[[13305,13305],"mapped",[50,54,26085]],[[13306,13306],"mapped",[50,55,26085]],[[13307,13307],"mapped",[50,56,26085]],[[13308,13308],"mapped",[50,57,26085]],[[13309,13309],"mapped",[51,48,26085]],[[13310,13310],"mapped",[51,49,26085]],[[13311,13311],"mapped",[103,97,108]],[[13312,19893],"valid"],[[19894,19903],"disallowed"],[[19904,19967],"valid",[],"NV8"],[[19968,40869],"valid"],[[40870,40891],"valid"],[[40892,40899],"valid"],[[40900,40907],"valid"],[[40908,40908],"valid"],[[40909,40917],"valid"],[[40918,40959],"disallowed"],[[40960,42124],"valid"],[[42125,42127],"disallowed"],[[42128,42145],"valid",[],"NV8"],[[42146,42147],"valid",[],"NV8"],[[42148,42163],"valid",[],"NV8"],[[42164,42164],"valid",[],"NV8"],[[42165,42176],"valid",[],"NV8"],[[42177,42177],"valid",[],"NV8"],[[42178,42180],"valid",[],"NV8"],[[42181,42181],"valid",[],"NV8"],[[42182,42182],"valid",[],"NV8"],[[42183,42191],"disallowed"],[[42192,42237],"valid"],[[42238,42239],"valid",[],"NV8"],[[42240,42508],"valid"],[[42509,42511],"valid",[],"NV8"],[[42512,42539],"valid"],[[42540,42559],"disallowed"],[[42560,42560],"mapped",[42561]],[[42561,42561],"valid"],[[42562,42562],"mapped",[42563]],[[42563,42563],"valid"],[[42564,42564],"mapped",[42565]],[[42565,42565],"valid"],[[42566,42566],"mapped",[42567]],[[42567,42567],"valid"],[[42568,42568],"mapped",[42569]],[[42569,42569],"valid"],[[42570,42570],"mapped",[42571]],[[42571,42571],"valid"],[[42572,42572],"mapped",[42573]],[[42573,42573],"valid"],[[42574,42574],"mapped",[42575]],[[42575,42575],"valid"],[[42576,42576],"mapped",[42577]],[[42577,42577],"valid"],[[42578,42578],"mapped",[42579]],[[42579,42579],"valid"],[[42580,42580],"mapped",[42581]],[[42581,42581],"valid"],[[42582,42582],"mapped",[42583]],[[42583,42583],"valid"],[[42584,42584],"mapped",[42585]],[[42585,42585],"valid"],[[42586,42586],"mapped",[42587]],[[42587,42587],"valid"],[[42588,42588],"mapped",[42589]],[[42589,42589],"valid"],[[42590,42590],"mapped",[42591]],[[42591,42591],"valid"],[[42592,42592],"mapped",[42593]],[[42593,42593],"valid"],[[42594,42594],"mapped",[42595]],[[42595,42595],"valid"],[[42596,42596],"mapped",[42597]],[[42597,42597],"valid"],[[42598,42598],"mapped",[42599]],[[42599,42599],"valid"],[[42600,42600],"mapped",[42601]],[[42601,42601],"valid"],[[42602,42602],"mapped",[42603]],[[42603,42603],"valid"],[[42604,42604],"mapped",[42605]],[[42605,42607],"valid"],[[42608,42611],"valid",[],"NV8"],[[42612,42619],"valid"],[[42620,42621],"valid"],[[42622,42622],"valid",[],"NV8"],[[42623,42623],"valid"],[[42624,42624],"mapped",[42625]],[[42625,42625],"valid"],[[42626,42626],"mapped",[42627]],[[42627,42627],"valid"],[[42628,42628],"mapped",[42629]],[[42629,42629],"valid"],[[42630,42630],"mapped",[42631]],[[42631,42631],"valid"],[[42632,42632],"mapped",[42633]],[[42633,42633],"valid"],[[42634,42634],"mapped",[42635]],[[42635,42635],"valid"],[[42636,42636],"mapped",[42637]],[[42637,42637],"valid"],[[42638,42638],"mapped",[42639]],[[42639,42639],"valid"],[[42640,42640],"mapped",[42641]],[[42641,42641],"valid"],[[42642,42642],"mapped",[42643]],[[42643,42643],"valid"],[[42644,42644],"mapped",[42645]],[[42645,42645],"valid"],[[42646,42646],"mapped",[42647]],[[42647,42647],"valid"],[[42648,42648],"mapped",[42649]],[[42649,42649],"valid"],[[42650,42650],"mapped",[42651]],[[42651,42651],"valid"],[[42652,42652],"mapped",[1098]],[[42653,42653],"mapped",[1100]],[[42654,42654],"valid"],[[42655,42655],"valid"],[[42656,42725],"valid"],[[42726,42735],"valid",[],"NV8"],[[42736,42737],"valid"],[[42738,42743],"valid",[],"NV8"],[[42744,42751],"disallowed"],[[42752,42774],"valid",[],"NV8"],[[42775,42778],"valid"],[[42779,42783],"valid"],[[42784,42785],"valid",[],"NV8"],[[42786,42786],"mapped",[42787]],[[42787,42787],"valid"],[[42788,42788],"mapped",[42789]],[[42789,42789],"valid"],[[42790,42790],"mapped",[42791]],[[42791,42791],"valid"],[[42792,42792],"mapped",[42793]],[[42793,42793],"valid"],[[42794,42794],"mapped",[42795]],[[42795,42795],"valid"],[[42796,42796],"mapped",[42797]],[[42797,42797],"valid"],[[42798,42798],"mapped",[42799]],[[42799,42801],"valid"],[[42802,42802],"mapped",[42803]],[[42803,42803],"valid"],[[42804,42804],"mapped",[42805]],[[42805,42805],"valid"],[[42806,42806],"mapped",[42807]],[[42807,42807],"valid"],[[42808,42808],"mapped",[42809]],[[42809,42809],"valid"],[[42810,42810],"mapped",[42811]],[[42811,42811],"valid"],[[42812,42812],"mapped",[42813]],[[42813,42813],"valid"],[[42814,42814],"mapped",[42815]],[[42815,42815],"valid"],[[42816,42816],"mapped",[42817]],[[42817,42817],"valid"],[[42818,42818],"mapped",[42819]],[[42819,42819],"valid"],[[42820,42820],"mapped",[42821]],[[42821,42821],"valid"],[[42822,42822],"mapped",[42823]],[[42823,42823],"valid"],[[42824,42824],"mapped",[42825]],[[42825,42825],"valid"],[[42826,42826],"mapped",[42827]],[[42827,42827],"valid"],[[42828,42828],"mapped",[42829]],[[42829,42829],"valid"],[[42830,42830],"mapped",[42831]],[[42831,42831],"valid"],[[42832,42832],"mapped",[42833]],[[42833,42833],"valid"],[[42834,42834],"mapped",[42835]],[[42835,42835],"valid"],[[42836,42836],"mapped",[42837]],[[42837,42837],"valid"],[[42838,42838],"mapped",[42839]],[[42839,42839],"valid"],[[42840,42840],"mapped",[42841]],[[42841,42841],"valid"],[[42842,42842],"mapped",[42843]],[[42843,42843],"valid"],[[42844,42844],"mapped",[42845]],[[42845,42845],"valid"],[[42846,42846],"mapped",[42847]],[[42847,42847],"valid"],[[42848,42848],"mapped",[42849]],[[42849,42849],"valid"],[[42850,42850],"mapped",[42851]],[[42851,42851],"valid"],[[42852,42852],"mapped",[42853]],[[42853,42853],"valid"],[[42854,42854],"mapped",[42855]],[[42855,42855],"valid"],[[42856,42856],"mapped",[42857]],[[42857,42857],"valid"],[[42858,42858],"mapped",[42859]],[[42859,42859],"valid"],[[42860,42860],"mapped",[42861]],[[42861,42861],"valid"],[[42862,42862],"mapped",[42863]],[[42863,42863],"valid"],[[42864,42864],"mapped",[42863]],[[42865,42872],"valid"],[[42873,42873],"mapped",[42874]],[[42874,42874],"valid"],[[42875,42875],"mapped",[42876]],[[42876,42876],"valid"],[[42877,42877],"mapped",[7545]],[[42878,42878],"mapped",[42879]],[[42879,42879],"valid"],[[42880,42880],"mapped",[42881]],[[42881,42881],"valid"],[[42882,42882],"mapped",[42883]],[[42883,42883],"valid"],[[42884,42884],"mapped",[42885]],[[42885,42885],"valid"],[[42886,42886],"mapped",[42887]],[[42887,42888],"valid"],[[42889,42890],"valid",[],"NV8"],[[42891,42891],"mapped",[42892]],[[42892,42892],"valid"],[[42893,42893],"mapped",[613]],[[42894,42894],"valid"],[[42895,42895],"valid"],[[42896,42896],"mapped",[42897]],[[42897,42897],"valid"],[[42898,42898],"mapped",[42899]],[[42899,42899],"valid"],[[42900,42901],"valid"],[[42902,42902],"mapped",[42903]],[[42903,42903],"valid"],[[42904,42904],"mapped",[42905]],[[42905,42905],"valid"],[[42906,42906],"mapped",[42907]],[[42907,42907],"valid"],[[42908,42908],"mapped",[42909]],[[42909,42909],"valid"],[[42910,42910],"mapped",[42911]],[[42911,42911],"valid"],[[42912,42912],"mapped",[42913]],[[42913,42913],"valid"],[[42914,42914],"mapped",[42915]],[[42915,42915],"valid"],[[42916,42916],"mapped",[42917]],[[42917,42917],"valid"],[[42918,42918],"mapped",[42919]],[[42919,42919],"valid"],[[42920,42920],"mapped",[42921]],[[42921,42921],"valid"],[[42922,42922],"mapped",[614]],[[42923,42923],"mapped",[604]],[[42924,42924],"mapped",[609]],[[42925,42925],"mapped",[620]],[[42926,42927],"disallowed"],[[42928,42928],"mapped",[670]],[[42929,42929],"mapped",[647]],[[42930,42930],"mapped",[669]],[[42931,42931],"mapped",[43859]],[[42932,42932],"mapped",[42933]],[[42933,42933],"valid"],[[42934,42934],"mapped",[42935]],[[42935,42935],"valid"],[[42936,42998],"disallowed"],[[42999,42999],"valid"],[[43000,43000],"mapped",[295]],[[43001,43001],"mapped",[339]],[[43002,43002],"valid"],[[43003,43007],"valid"],[[43008,43047],"valid"],[[43048,43051],"valid",[],"NV8"],[[43052,43055],"disallowed"],[[43056,43065],"valid",[],"NV8"],[[43066,43071],"disallowed"],[[43072,43123],"valid"],[[43124,43127],"valid",[],"NV8"],[[43128,43135],"disallowed"],[[43136,43204],"valid"],[[43205,43213],"disallowed"],[[43214,43215],"valid",[],"NV8"],[[43216,43225],"valid"],[[43226,43231],"disallowed"],[[43232,43255],"valid"],[[43256,43258],"valid",[],"NV8"],[[43259,43259],"valid"],[[43260,43260],"valid",[],"NV8"],[[43261,43261],"valid"],[[43262,43263],"disallowed"],[[43264,43309],"valid"],[[43310,43311],"valid",[],"NV8"],[[43312,43347],"valid"],[[43348,43358],"disallowed"],[[43359,43359],"valid",[],"NV8"],[[43360,43388],"valid",[],"NV8"],[[43389,43391],"disallowed"],[[43392,43456],"valid"],[[43457,43469],"valid",[],"NV8"],[[43470,43470],"disallowed"],[[43471,43481],"valid"],[[43482,43485],"disallowed"],[[43486,43487],"valid",[],"NV8"],[[43488,43518],"valid"],[[43519,43519],"disallowed"],[[43520,43574],"valid"],[[43575,43583],"disallowed"],[[43584,43597],"valid"],[[43598,43599],"disallowed"],[[43600,43609],"valid"],[[43610,43611],"disallowed"],[[43612,43615],"valid",[],"NV8"],[[43616,43638],"valid"],[[43639,43641],"valid",[],"NV8"],[[43642,43643],"valid"],[[43644,43647],"valid"],[[43648,43714],"valid"],[[43715,43738],"disallowed"],[[43739,43741],"valid"],[[43742,43743],"valid",[],"NV8"],[[43744,43759],"valid"],[[43760,43761],"valid",[],"NV8"],[[43762,43766],"valid"],[[43767,43776],"disallowed"],[[43777,43782],"valid"],[[43783,43784],"disallowed"],[[43785,43790],"valid"],[[43791,43792],"disallowed"],[[43793,43798],"valid"],[[43799,43807],"disallowed"],[[43808,43814],"valid"],[[43815,43815],"disallowed"],[[43816,43822],"valid"],[[43823,43823],"disallowed"],[[43824,43866],"valid"],[[43867,43867],"valid",[],"NV8"],[[43868,43868],"mapped",[42791]],[[43869,43869],"mapped",[43831]],[[43870,43870],"mapped",[619]],[[43871,43871],"mapped",[43858]],[[43872,43875],"valid"],[[43876,43877],"valid"],[[43878,43887],"disallowed"],[[43888,43888],"mapped",[5024]],[[43889,43889],"mapped",[5025]],[[43890,43890],"mapped",[5026]],[[43891,43891],"mapped",[5027]],[[43892,43892],"mapped",[5028]],[[43893,43893],"mapped",[5029]],[[43894,43894],"mapped",[5030]],[[43895,43895],"mapped",[5031]],[[43896,43896],"mapped",[5032]],[[43897,43897],"mapped",[5033]],[[43898,43898],"mapped",[5034]],[[43899,43899],"mapped",[5035]],[[43900,43900],"mapped",[5036]],[[43901,43901],"mapped",[5037]],[[43902,43902],"mapped",[5038]],[[43903,43903],"mapped",[5039]],[[43904,43904],"mapped",[5040]],[[43905,43905],"mapped",[5041]],[[43906,43906],"mapped",[5042]],[[43907,43907],"mapped",[5043]],[[43908,43908],"mapped",[5044]],[[43909,43909],"mapped",[5045]],[[43910,43910],"mapped",[5046]],[[43911,43911],"mapped",[5047]],[[43912,43912],"mapped",[5048]],[[43913,43913],"mapped",[5049]],[[43914,43914],"mapped",[5050]],[[43915,43915],"mapped",[5051]],[[43916,43916],"mapped",[5052]],[[43917,43917],"mapped",[5053]],[[43918,43918],"mapped",[5054]],[[43919,43919],"mapped",[5055]],[[43920,43920],"mapped",[5056]],[[43921,43921],"mapped",[5057]],[[43922,43922],"mapped",[5058]],[[43923,43923],"mapped",[5059]],[[43924,43924],"mapped",[5060]],[[43925,43925],"mapped",[5061]],[[43926,43926],"mapped",[5062]],[[43927,43927],"mapped",[5063]],[[43928,43928],"mapped",[5064]],[[43929,43929],"mapped",[5065]],[[43930,43930],"mapped",[5066]],[[43931,43931],"mapped",[5067]],[[43932,43932],"mapped",[5068]],[[43933,43933],"mapped",[5069]],[[43934,43934],"mapped",[5070]],[[43935,43935],"mapped",[5071]],[[43936,43936],"mapped",[5072]],[[43937,43937],"mapped",[5073]],[[43938,43938],"mapped",[5074]],[[43939,43939],"mapped",[5075]],[[43940,43940],"mapped",[5076]],[[43941,43941],"mapped",[5077]],[[43942,43942],"mapped",[5078]],[[43943,43943],"mapped",[5079]],[[43944,43944],"mapped",[5080]],[[43945,43945],"mapped",[5081]],[[43946,43946],"mapped",[5082]],[[43947,43947],"mapped",[5083]],[[43948,43948],"mapped",[5084]],[[43949,43949],"mapped",[5085]],[[43950,43950],"mapped",[5086]],[[43951,43951],"mapped",[5087]],[[43952,43952],"mapped",[5088]],[[43953,43953],"mapped",[5089]],[[43954,43954],"mapped",[5090]],[[43955,43955],"mapped",[5091]],[[43956,43956],"mapped",[5092]],[[43957,43957],"mapped",[5093]],[[43958,43958],"mapped",[5094]],[[43959,43959],"mapped",[5095]],[[43960,43960],"mapped",[5096]],[[43961,43961],"mapped",[5097]],[[43962,43962],"mapped",[5098]],[[43963,43963],"mapped",[5099]],[[43964,43964],"mapped",[5100]],[[43965,43965],"mapped",[5101]],[[43966,43966],"mapped",[5102]],[[43967,43967],"mapped",[5103]],[[43968,44010],"valid"],[[44011,44011],"valid",[],"NV8"],[[44012,44013],"valid"],[[44014,44015],"disallowed"],[[44016,44025],"valid"],[[44026,44031],"disallowed"],[[44032,55203],"valid"],[[55204,55215],"disallowed"],[[55216,55238],"valid",[],"NV8"],[[55239,55242],"disallowed"],[[55243,55291],"valid",[],"NV8"],[[55292,55295],"disallowed"],[[55296,57343],"disallowed"],[[57344,63743],"disallowed"],[[63744,63744],"mapped",[35912]],[[63745,63745],"mapped",[26356]],[[63746,63746],"mapped",[36554]],[[63747,63747],"mapped",[36040]],[[63748,63748],"mapped",[28369]],[[63749,63749],"mapped",[20018]],[[63750,63750],"mapped",[21477]],[[63751,63752],"mapped",[40860]],[[63753,63753],"mapped",[22865]],[[63754,63754],"mapped",[37329]],[[63755,63755],"mapped",[21895]],[[63756,63756],"mapped",[22856]],[[63757,63757],"mapped",[25078]],[[63758,63758],"mapped",[30313]],[[63759,63759],"mapped",[32645]],[[63760,63760],"mapped",[34367]],[[63761,63761],"mapped",[34746]],[[63762,63762],"mapped",[35064]],[[63763,63763],"mapped",[37007]],[[63764,63764],"mapped",[27138]],[[63765,63765],"mapped",[27931]],[[63766,63766],"mapped",[28889]],[[63767,63767],"mapped",[29662]],[[63768,63768],"mapped",[33853]],[[63769,63769],"mapped",[37226]],[[63770,63770],"mapped",[39409]],[[63771,63771],"mapped",[20098]],[[63772,63772],"mapped",[21365]],[[63773,63773],"mapped",[27396]],[[63774,63774],"mapped",[29211]],[[63775,63775],"mapped",[34349]],[[63776,63776],"mapped",[40478]],[[63777,63777],"mapped",[23888]],[[63778,63778],"mapped",[28651]],[[63779,63779],"mapped",[34253]],[[63780,63780],"mapped",[35172]],[[63781,63781],"mapped",[25289]],[[63782,63782],"mapped",[33240]],[[63783,63783],"mapped",[34847]],[[63784,63784],"mapped",[24266]],[[63785,63785],"mapped",[26391]],[[63786,63786],"mapped",[28010]],[[63787,63787],"mapped",[29436]],[[63788,63788],"mapped",[37070]],[[63789,63789],"mapped",[20358]],[[63790,63790],"mapped",[20919]],[[63791,63791],"mapped",[21214]],[[63792,63792],"mapped",[25796]],[[63793,63793],"mapped",[27347]],[[63794,63794],"mapped",[29200]],[[63795,63795],"mapped",[30439]],[[63796,63796],"mapped",[32769]],[[63797,63797],"mapped",[34310]],[[63798,63798],"mapped",[34396]],[[63799,63799],"mapped",[36335]],[[63800,63800],"mapped",[38706]],[[63801,63801],"mapped",[39791]],[[63802,63802],"mapped",[40442]],[[63803,63803],"mapped",[30860]],[[63804,63804],"mapped",[31103]],[[63805,63805],"mapped",[32160]],[[63806,63806],"mapped",[33737]],[[63807,63807],"mapped",[37636]],[[63808,63808],"mapped",[40575]],[[63809,63809],"mapped",[35542]],[[63810,63810],"mapped",[22751]],[[63811,63811],"mapped",[24324]],[[63812,63812],"mapped",[31840]],[[63813,63813],"mapped",[32894]],[[63814,63814],"mapped",[29282]],[[63815,63815],"mapped",[30922]],[[63816,63816],"mapped",[36034]],[[63817,63817],"mapped",[38647]],[[63818,63818],"mapped",[22744]],[[63819,63819],"mapped",[23650]],[[63820,63820],"mapped",[27155]],[[63821,63821],"mapped",[28122]],[[63822,63822],"mapped",[28431]],[[63823,63823],"mapped",[32047]],[[63824,63824],"mapped",[32311]],[[63825,63825],"mapped",[38475]],[[63826,63826],"mapped",[21202]],[[63827,63827],"mapped",[32907]],[[63828,63828],"mapped",[20956]],[[63829,63829],"mapped",[20940]],[[63830,63830],"mapped",[31260]],[[63831,63831],"mapped",[32190]],[[63832,63832],"mapped",[33777]],[[63833,63833],"mapped",[38517]],[[63834,63834],"mapped",[35712]],[[63835,63835],"mapped",[25295]],[[63836,63836],"mapped",[27138]],[[63837,63837],"mapped",[35582]],[[63838,63838],"mapped",[20025]],[[63839,63839],"mapped",[23527]],[[63840,63840],"mapped",[24594]],[[63841,63841],"mapped",[29575]],[[63842,63842],"mapped",[30064]],[[63843,63843],"mapped",[21271]],[[63844,63844],"mapped",[30971]],[[63845,63845],"mapped",[20415]],[[63846,63846],"mapped",[24489]],[[63847,63847],"mapped",[19981]],[[63848,63848],"mapped",[27852]],[[63849,63849],"mapped",[25976]],[[63850,63850],"mapped",[32034]],[[63851,63851],"mapped",[21443]],[[63852,63852],"mapped",[22622]],[[63853,63853],"mapped",[30465]],[[63854,63854],"mapped",[33865]],[[63855,63855],"mapped",[35498]],[[63856,63856],"mapped",[27578]],[[63857,63857],"mapped",[36784]],[[63858,63858],"mapped",[27784]],[[63859,63859],"mapped",[25342]],[[63860,63860],"mapped",[33509]],[[63861,63861],"mapped",[25504]],[[63862,63862],"mapped",[30053]],[[63863,63863],"mapped",[20142]],[[63864,63864],"mapped",[20841]],[[63865,63865],"mapped",[20937]],[[63866,63866],"mapped",[26753]],[[63867,63867],"mapped",[31975]],[[63868,63868],"mapped",[33391]],[[63869,63869],"mapped",[35538]],[[63870,63870],"mapped",[37327]],[[63871,63871],"mapped",[21237]],[[63872,63872],"mapped",[21570]],[[63873,63873],"mapped",[22899]],[[63874,63874],"mapped",[24300]],[[63875,63875],"mapped",[26053]],[[63876,63876],"mapped",[28670]],[[63877,63877],"mapped",[31018]],[[63878,63878],"mapped",[38317]],[[63879,63879],"mapped",[39530]],[[63880,63880],"mapped",[40599]],[[63881,63881],"mapped",[40654]],[[63882,63882],"mapped",[21147]],[[63883,63883],"mapped",[26310]],[[63884,63884],"mapped",[27511]],[[63885,63885],"mapped",[36706]],[[63886,63886],"mapped",[24180]],[[63887,63887],"mapped",[24976]],[[63888,63888],"mapped",[25088]],[[63889,63889],"mapped",[25754]],[[63890,63890],"mapped",[28451]],[[63891,63891],"mapped",[29001]],[[63892,63892],"mapped",[29833]],[[63893,63893],"mapped",[31178]],[[63894,63894],"mapped",[32244]],[[63895,63895],"mapped",[32879]],[[63896,63896],"mapped",[36646]],[[63897,63897],"mapped",[34030]],[[63898,63898],"mapped",[36899]],[[63899,63899],"mapped",[37706]],[[63900,63900],"mapped",[21015]],[[63901,63901],"mapped",[21155]],[[63902,63902],"mapped",[21693]],[[63903,63903],"mapped",[28872]],[[63904,63904],"mapped",[35010]],[[63905,63905],"mapped",[35498]],[[63906,63906],"mapped",[24265]],[[63907,63907],"mapped",[24565]],[[63908,63908],"mapped",[25467]],[[63909,63909],"mapped",[27566]],[[63910,63910],"mapped",[31806]],[[63911,63911],"mapped",[29557]],[[63912,63912],"mapped",[20196]],[[63913,63913],"mapped",[22265]],[[63914,63914],"mapped",[23527]],[[63915,63915],"mapped",[23994]],[[63916,63916],"mapped",[24604]],[[63917,63917],"mapped",[29618]],[[63918,63918],"mapped",[29801]],[[63919,63919],"mapped",[32666]],[[63920,63920],"mapped",[32838]],[[63921,63921],"mapped",[37428]],[[63922,63922],"mapped",[38646]],[[63923,63923],"mapped",[38728]],[[63924,63924],"mapped",[38936]],[[63925,63925],"mapped",[20363]],[[63926,63926],"mapped",[31150]],[[63927,63927],"mapped",[37300]],[[63928,63928],"mapped",[38584]],[[63929,63929],"mapped",[24801]],[[63930,63930],"mapped",[20102]],[[63931,63931],"mapped",[20698]],[[63932,63932],"mapped",[23534]],[[63933,63933],"mapped",[23615]],[[63934,63934],"mapped",[26009]],[[63935,63935],"mapped",[27138]],[[63936,63936],"mapped",[29134]],[[63937,63937],"mapped",[30274]],[[63938,63938],"mapped",[34044]],[[63939,63939],"mapped",[36988]],[[63940,63940],"mapped",[40845]],[[63941,63941],"mapped",[26248]],[[63942,63942],"mapped",[38446]],[[63943,63943],"mapped",[21129]],[[63944,63944],"mapped",[26491]],[[63945,63945],"mapped",[26611]],[[63946,63946],"mapped",[27969]],[[63947,63947],"mapped",[28316]],[[63948,63948],"mapped",[29705]],[[63949,63949],"mapped",[30041]],[[63950,63950],"mapped",[30827]],[[63951,63951],"mapped",[32016]],[[63952,63952],"mapped",[39006]],[[63953,63953],"mapped",[20845]],[[63954,63954],"mapped",[25134]],[[63955,63955],"mapped",[38520]],[[63956,63956],"mapped",[20523]],[[63957,63957],"mapped",[23833]],[[63958,63958],"mapped",[28138]],[[63959,63959],"mapped",[36650]],[[63960,63960],"mapped",[24459]],[[63961,63961],"mapped",[24900]],[[63962,63962],"mapped",[26647]],[[63963,63963],"mapped",[29575]],[[63964,63964],"mapped",[38534]],[[63965,63965],"mapped",[21033]],[[63966,63966],"mapped",[21519]],[[63967,63967],"mapped",[23653]],[[63968,63968],"mapped",[26131]],[[63969,63969],"mapped",[26446]],[[63970,63970],"mapped",[26792]],[[63971,63971],"mapped",[27877]],[[63972,63972],"mapped",[29702]],[[63973,63973],"mapped",[30178]],[[63974,63974],"mapped",[32633]],[[63975,63975],"mapped",[35023]],[[63976,63976],"mapped",[35041]],[[63977,63977],"mapped",[37324]],[[63978,63978],"mapped",[38626]],[[63979,63979],"mapped",[21311]],[[63980,63980],"mapped",[28346]],[[63981,63981],"mapped",[21533]],[[63982,63982],"mapped",[29136]],[[63983,63983],"mapped",[29848]],[[63984,63984],"mapped",[34298]],[[63985,63985],"mapped",[38563]],[[63986,63986],"mapped",[40023]],[[63987,63987],"mapped",[40607]],[[63988,63988],"mapped",[26519]],[[63989,63989],"mapped",[28107]],[[63990,63990],"mapped",[33256]],[[63991,63991],"mapped",[31435]],[[63992,63992],"mapped",[31520]],[[63993,63993],"mapped",[31890]],[[63994,63994],"mapped",[29376]],[[63995,63995],"mapped",[28825]],[[63996,63996],"mapped",[35672]],[[63997,63997],"mapped",[20160]],[[63998,63998],"mapped",[33590]],[[63999,63999],"mapped",[21050]],[[64000,64000],"mapped",[20999]],[[64001,64001],"mapped",[24230]],[[64002,64002],"mapped",[25299]],[[64003,64003],"mapped",[31958]],[[64004,64004],"mapped",[23429]],[[64005,64005],"mapped",[27934]],[[64006,64006],"mapped",[26292]],[[64007,64007],"mapped",[36667]],[[64008,64008],"mapped",[34892]],[[64009,64009],"mapped",[38477]],[[64010,64010],"mapped",[35211]],[[64011,64011],"mapped",[24275]],[[64012,64012],"mapped",[20800]],[[64013,64013],"mapped",[21952]],[[64014,64015],"valid"],[[64016,64016],"mapped",[22618]],[[64017,64017],"valid"],[[64018,64018],"mapped",[26228]],[[64019,64020],"valid"],[[64021,64021],"mapped",[20958]],[[64022,64022],"mapped",[29482]],[[64023,64023],"mapped",[30410]],[[64024,64024],"mapped",[31036]],[[64025,64025],"mapped",[31070]],[[64026,64026],"mapped",[31077]],[[64027,64027],"mapped",[31119]],[[64028,64028],"mapped",[38742]],[[64029,64029],"mapped",[31934]],[[64030,64030],"mapped",[32701]],[[64031,64031],"valid"],[[64032,64032],"mapped",[34322]],[[64033,64033],"valid"],[[64034,64034],"mapped",[35576]],[[64035,64036],"valid"],[[64037,64037],"mapped",[36920]],[[64038,64038],"mapped",[37117]],[[64039,64041],"valid"],[[64042,64042],"mapped",[39151]],[[64043,64043],"mapped",[39164]],[[64044,64044],"mapped",[39208]],[[64045,64045],"mapped",[40372]],[[64046,64046],"mapped",[37086]],[[64047,64047],"mapped",[38583]],[[64048,64048],"mapped",[20398]],[[64049,64049],"mapped",[20711]],[[64050,64050],"mapped",[20813]],[[64051,64051],"mapped",[21193]],[[64052,64052],"mapped",[21220]],[[64053,64053],"mapped",[21329]],[[64054,64054],"mapped",[21917]],[[64055,64055],"mapped",[22022]],[[64056,64056],"mapped",[22120]],[[64057,64057],"mapped",[22592]],[[64058,64058],"mapped",[22696]],[[64059,64059],"mapped",[23652]],[[64060,64060],"mapped",[23662]],[[64061,64061],"mapped",[24724]],[[64062,64062],"mapped",[24936]],[[64063,64063],"mapped",[24974]],[[64064,64064],"mapped",[25074]],[[64065,64065],"mapped",[25935]],[[64066,64066],"mapped",[26082]],[[64067,64067],"mapped",[26257]],[[64068,64068],"mapped",[26757]],[[64069,64069],"mapped",[28023]],[[64070,64070],"mapped",[28186]],[[64071,64071],"mapped",[28450]],[[64072,64072],"mapped",[29038]],[[64073,64073],"mapped",[29227]],[[64074,64074],"mapped",[29730]],[[64075,64075],"mapped",[30865]],[[64076,64076],"mapped",[31038]],[[64077,64077],"mapped",[31049]],[[64078,64078],"mapped",[31048]],[[64079,64079],"mapped",[31056]],[[64080,64080],"mapped",[31062]],[[64081,64081],"mapped",[31069]],[[64082,64082],"mapped",[31117]],[[64083,64083],"mapped",[31118]],[[64084,64084],"mapped",[31296]],[[64085,64085],"mapped",[31361]],[[64086,64086],"mapped",[31680]],[[64087,64087],"mapped",[32244]],[[64088,64088],"mapped",[32265]],[[64089,64089],"mapped",[32321]],[[64090,64090],"mapped",[32626]],[[64091,64091],"mapped",[32773]],[[64092,64092],"mapped",[33261]],[[64093,64094],"mapped",[33401]],[[64095,64095],"mapped",[33879]],[[64096,64096],"mapped",[35088]],[[64097,64097],"mapped",[35222]],[[64098,64098],"mapped",[35585]],[[64099,64099],"mapped",[35641]],[[64100,64100],"mapped",[36051]],[[64101,64101],"mapped",[36104]],[[64102,64102],"mapped",[36790]],[[64103,64103],"mapped",[36920]],[[64104,64104],"mapped",[38627]],[[64105,64105],"mapped",[38911]],[[64106,64106],"mapped",[38971]],[[64107,64107],"mapped",[24693]],[[64108,64108],"mapped",[148206]],[[64109,64109],"mapped",[33304]],[[64110,64111],"disallowed"],[[64112,64112],"mapped",[20006]],[[64113,64113],"mapped",[20917]],[[64114,64114],"mapped",[20840]],[[64115,64115],"mapped",[20352]],[[64116,64116],"mapped",[20805]],[[64117,64117],"mapped",[20864]],[[64118,64118],"mapped",[21191]],[[64119,64119],"mapped",[21242]],[[64120,64120],"mapped",[21917]],[[64121,64121],"mapped",[21845]],[[64122,64122],"mapped",[21913]],[[64123,64123],"mapped",[21986]],[[64124,64124],"mapped",[22618]],[[64125,64125],"mapped",[22707]],[[64126,64126],"mapped",[22852]],[[64127,64127],"mapped",[22868]],[[64128,64128],"mapped",[23138]],[[64129,64129],"mapped",[23336]],[[64130,64130],"mapped",[24274]],[[64131,64131],"mapped",[24281]],[[64132,64132],"mapped",[24425]],[[64133,64133],"mapped",[24493]],[[64134,64134],"mapped",[24792]],[[64135,64135],"mapped",[24910]],[[64136,64136],"mapped",[24840]],[[64137,64137],"mapped",[24974]],[[64138,64138],"mapped",[24928]],[[64139,64139],"mapped",[25074]],[[64140,64140],"mapped",[25140]],[[64141,64141],"mapped",[25540]],[[64142,64142],"mapped",[25628]],[[64143,64143],"mapped",[25682]],[[64144,64144],"mapped",[25942]],[[64145,64145],"mapped",[26228]],[[64146,64146],"mapped",[26391]],[[64147,64147],"mapped",[26395]],[[64148,64148],"mapped",[26454]],[[64149,64149],"mapped",[27513]],[[64150,64150],"mapped",[27578]],[[64151,64151],"mapped",[27969]],[[64152,64152],"mapped",[28379]],[[64153,64153],"mapped",[28363]],[[64154,64154],"mapped",[28450]],[[64155,64155],"mapped",[28702]],[[64156,64156],"mapped",[29038]],[[64157,64157],"mapped",[30631]],[[64158,64158],"mapped",[29237]],[[64159,64159],"mapped",[29359]],[[64160,64160],"mapped",[29482]],[[64161,64161],"mapped",[29809]],[[64162,64162],"mapped",[29958]],[[64163,64163],"mapped",[30011]],[[64164,64164],"mapped",[30237]],[[64165,64165],"mapped",[30239]],[[64166,64166],"mapped",[30410]],[[64167,64167],"mapped",[30427]],[[64168,64168],"mapped",[30452]],[[64169,64169],"mapped",[30538]],[[64170,64170],"mapped",[30528]],[[64171,64171],"mapped",[30924]],[[64172,64172],"mapped",[31409]],[[64173,64173],"mapped",[31680]],[[64174,64174],"mapped",[31867]],[[64175,64175],"mapped",[32091]],[[64176,64176],"mapped",[32244]],[[64177,64177],"mapped",[32574]],[[64178,64178],"mapped",[32773]],[[64179,64179],"mapped",[33618]],[[64180,64180],"mapped",[33775]],[[64181,64181],"mapped",[34681]],[[64182,64182],"mapped",[35137]],[[64183,64183],"mapped",[35206]],[[64184,64184],"mapped",[35222]],[[64185,64185],"mapped",[35519]],[[64186,64186],"mapped",[35576]],[[64187,64187],"mapped",[35531]],[[64188,64188],"mapped",[35585]],[[64189,64189],"mapped",[35582]],[[64190,64190],"mapped",[35565]],[[64191,64191],"mapped",[35641]],[[64192,64192],"mapped",[35722]],[[64193,64193],"mapped",[36104]],[[64194,64194],"mapped",[36664]],[[64195,64195],"mapped",[36978]],[[64196,64196],"mapped",[37273]],[[64197,64197],"mapped",[37494]],[[64198,64198],"mapped",[38524]],[[64199,64199],"mapped",[38627]],[[64200,64200],"mapped",[38742]],[[64201,64201],"mapped",[38875]],[[64202,64202],"mapped",[38911]],[[64203,64203],"mapped",[38923]],[[64204,64204],"mapped",[38971]],[[64205,64205],"mapped",[39698]],[[64206,64206],"mapped",[40860]],[[64207,64207],"mapped",[141386]],[[64208,64208],"mapped",[141380]],[[64209,64209],"mapped",[144341]],[[64210,64210],"mapped",[15261]],[[64211,64211],"mapped",[16408]],[[64212,64212],"mapped",[16441]],[[64213,64213],"mapped",[152137]],[[64214,64214],"mapped",[154832]],[[64215,64215],"mapped",[163539]],[[64216,64216],"mapped",[40771]],[[64217,64217],"mapped",[40846]],[[64218,64255],"disallowed"],[[64256,64256],"mapped",[102,102]],[[64257,64257],"mapped",[102,105]],[[64258,64258],"mapped",[102,108]],[[64259,64259],"mapped",[102,102,105]],[[64260,64260],"mapped",[102,102,108]],[[64261,64262],"mapped",[115,116]],[[64263,64274],"disallowed"],[[64275,64275],"mapped",[1396,1398]],[[64276,64276],"mapped",[1396,1381]],[[64277,64277],"mapped",[1396,1387]],[[64278,64278],"mapped",[1406,1398]],[[64279,64279],"mapped",[1396,1389]],[[64280,64284],"disallowed"],[[64285,64285],"mapped",[1497,1460]],[[64286,64286],"valid"],[[64287,64287],"mapped",[1522,1463]],[[64288,64288],"mapped",[1506]],[[64289,64289],"mapped",[1488]],[[64290,64290],"mapped",[1491]],[[64291,64291],"mapped",[1492]],[[64292,64292],"mapped",[1499]],[[64293,64293],"mapped",[1500]],[[64294,64294],"mapped",[1501]],[[64295,64295],"mapped",[1512]],[[64296,64296],"mapped",[1514]],[[64297,64297],"disallowed_STD3_mapped",[43]],[[64298,64298],"mapped",[1513,1473]],[[64299,64299],"mapped",[1513,1474]],[[64300,64300],"mapped",[1513,1468,1473]],[[64301,64301],"mapped",[1513,1468,1474]],[[64302,64302],"mapped",[1488,1463]],[[64303,64303],"mapped",[1488,1464]],[[64304,64304],"mapped",[1488,1468]],[[64305,64305],"mapped",[1489,1468]],[[64306,64306],"mapped",[1490,1468]],[[64307,64307],"mapped",[1491,1468]],[[64308,64308],"mapped",[1492,1468]],[[64309,64309],"mapped",[1493,1468]],[[64310,64310],"mapped",[1494,1468]],[[64311,64311],"disallowed"],[[64312,64312],"mapped",[1496,1468]],[[64313,64313],"mapped",[1497,1468]],[[64314,64314],"mapped",[1498,1468]],[[64315,64315],"mapped",[1499,1468]],[[64316,64316],"mapped",[1500,1468]],[[64317,64317],"disallowed"],[[64318,64318],"mapped",[1502,1468]],[[64319,64319],"disallowed"],[[64320,64320],"mapped",[1504,1468]],[[64321,64321],"mapped",[1505,1468]],[[64322,64322],"disallowed"],[[64323,64323],"mapped",[1507,1468]],[[64324,64324],"mapped",[1508,1468]],[[64325,64325],"disallowed"],[[64326,64326],"mapped",[1510,1468]],[[64327,64327],"mapped",[1511,1468]],[[64328,64328],"mapped",[1512,1468]],[[64329,64329],"mapped",[1513,1468]],[[64330,64330],"mapped",[1514,1468]],[[64331,64331],"mapped",[1493,1465]],[[64332,64332],"mapped",[1489,1471]],[[64333,64333],"mapped",[1499,1471]],[[64334,64334],"mapped",[1508,1471]],[[64335,64335],"mapped",[1488,1500]],[[64336,64337],"mapped",[1649]],[[64338,64341],"mapped",[1659]],[[64342,64345],"mapped",[1662]],[[64346,64349],"mapped",[1664]],[[64350,64353],"mapped",[1658]],[[64354,64357],"mapped",[1663]],[[64358,64361],"mapped",[1657]],[[64362,64365],"mapped",[1700]],[[64366,64369],"mapped",[1702]],[[64370,64373],"mapped",[1668]],[[64374,64377],"mapped",[1667]],[[64378,64381],"mapped",[1670]],[[64382,64385],"mapped",[1671]],[[64386,64387],"mapped",[1677]],[[64388,64389],"mapped",[1676]],[[64390,64391],"mapped",[1678]],[[64392,64393],"mapped",[1672]],[[64394,64395],"mapped",[1688]],[[64396,64397],"mapped",[1681]],[[64398,64401],"mapped",[1705]],[[64402,64405],"mapped",[1711]],[[64406,64409],"mapped",[1715]],[[64410,64413],"mapped",[1713]],[[64414,64415],"mapped",[1722]],[[64416,64419],"mapped",[1723]],[[64420,64421],"mapped",[1728]],[[64422,64425],"mapped",[1729]],[[64426,64429],"mapped",[1726]],[[64430,64431],"mapped",[1746]],[[64432,64433],"mapped",[1747]],[[64434,64449],"valid",[],"NV8"],[[64450,64466],"disallowed"],[[64467,64470],"mapped",[1709]],[[64471,64472],"mapped",[1735]],[[64473,64474],"mapped",[1734]],[[64475,64476],"mapped",[1736]],[[64477,64477],"mapped",[1735,1652]],[[64478,64479],"mapped",[1739]],[[64480,64481],"mapped",[1733]],[[64482,64483],"mapped",[1737]],[[64484,64487],"mapped",[1744]],[[64488,64489],"mapped",[1609]],[[64490,64491],"mapped",[1574,1575]],[[64492,64493],"mapped",[1574,1749]],[[64494,64495],"mapped",[1574,1608]],[[64496,64497],"mapped",[1574,1735]],[[64498,64499],"mapped",[1574,1734]],[[64500,64501],"mapped",[1574,1736]],[[64502,64504],"mapped",[1574,1744]],[[64505,64507],"mapped",[1574,1609]],[[64508,64511],"mapped",[1740]],[[64512,64512],"mapped",[1574,1580]],[[64513,64513],"mapped",[1574,1581]],[[64514,64514],"mapped",[1574,1605]],[[64515,64515],"mapped",[1574,1609]],[[64516,64516],"mapped",[1574,1610]],[[64517,64517],"mapped",[1576,1580]],[[64518,64518],"mapped",[1576,1581]],[[64519,64519],"mapped",[1576,1582]],[[64520,64520],"mapped",[1576,1605]],[[64521,64521],"mapped",[1576,1609]],[[64522,64522],"mapped",[1576,1610]],[[64523,64523],"mapped",[1578,1580]],[[64524,64524],"mapped",[1578,1581]],[[64525,64525],"mapped",[1578,1582]],[[64526,64526],"mapped",[1578,1605]],[[64527,64527],"mapped",[1578,1609]],[[64528,64528],"mapped",[1578,1610]],[[64529,64529],"mapped",[1579,1580]],[[64530,64530],"mapped",[1579,1605]],[[64531,64531],"mapped",[1579,1609]],[[64532,64532],"mapped",[1579,1610]],[[64533,64533],"mapped",[1580,1581]],[[64534,64534],"mapped",[1580,1605]],[[64535,64535],"mapped",[1581,1580]],[[64536,64536],"mapped",[1581,1605]],[[64537,64537],"mapped",[1582,1580]],[[64538,64538],"mapped",[1582,1581]],[[64539,64539],"mapped",[1582,1605]],[[64540,64540],"mapped",[1587,1580]],[[64541,64541],"mapped",[1587,1581]],[[64542,64542],"mapped",[1587,1582]],[[64543,64543],"mapped",[1587,1605]],[[64544,64544],"mapped",[1589,1581]],[[64545,64545],"mapped",[1589,1605]],[[64546,64546],"mapped",[1590,1580]],[[64547,64547],"mapped",[1590,1581]],[[64548,64548],"mapped",[1590,1582]],[[64549,64549],"mapped",[1590,1605]],[[64550,64550],"mapped",[1591,1581]],[[64551,64551],"mapped",[1591,1605]],[[64552,64552],"mapped",[1592,1605]],[[64553,64553],"mapped",[1593,1580]],[[64554,64554],"mapped",[1593,1605]],[[64555,64555],"mapped",[1594,1580]],[[64556,64556],"mapped",[1594,1605]],[[64557,64557],"mapped",[1601,1580]],[[64558,64558],"mapped",[1601,1581]],[[64559,64559],"mapped",[1601,1582]],[[64560,64560],"mapped",[1601,1605]],[[64561,64561],"mapped",[1601,1609]],[[64562,64562],"mapped",[1601,1610]],[[64563,64563],"mapped",[1602,1581]],[[64564,64564],"mapped",[1602,1605]],[[64565,64565],"mapped",[1602,1609]],[[64566,64566],"mapped",[1602,1610]],[[64567,64567],"mapped",[1603,1575]],[[64568,64568],"mapped",[1603,1580]],[[64569,64569],"mapped",[1603,1581]],[[64570,64570],"mapped",[1603,1582]],[[64571,64571],"mapped",[1603,1604]],[[64572,64572],"mapped",[1603,1605]],[[64573,64573],"mapped",[1603,1609]],[[64574,64574],"mapped",[1603,1610]],[[64575,64575],"mapped",[1604,1580]],[[64576,64576],"mapped",[1604,1581]],[[64577,64577],"mapped",[1604,1582]],[[64578,64578],"mapped",[1604,1605]],[[64579,64579],"mapped",[1604,1609]],[[64580,64580],"mapped",[1604,1610]],[[64581,64581],"mapped",[1605,1580]],[[64582,64582],"mapped",[1605,1581]],[[64583,64583],"mapped",[1605,1582]],[[64584,64584],"mapped",[1605,1605]],[[64585,64585],"mapped",[1605,1609]],[[64586,64586],"mapped",[1605,1610]],[[64587,64587],"mapped",[1606,1580]],[[64588,64588],"mapped",[1606,1581]],[[64589,64589],"mapped",[1606,1582]],[[64590,64590],"mapped",[1606,1605]],[[64591,64591],"mapped",[1606,1609]],[[64592,64592],"mapped",[1606,1610]],[[64593,64593],"mapped",[1607,1580]],[[64594,64594],"mapped",[1607,1605]],[[64595,64595],"mapped",[1607,1609]],[[64596,64596],"mapped",[1607,1610]],[[64597,64597],"mapped",[1610,1580]],[[64598,64598],"mapped",[1610,1581]],[[64599,64599],"mapped",[1610,1582]],[[64600,64600],"mapped",[1610,1605]],[[64601,64601],"mapped",[1610,1609]],[[64602,64602],"mapped",[1610,1610]],[[64603,64603],"mapped",[1584,1648]],[[64604,64604],"mapped",[1585,1648]],[[64605,64605],"mapped",[1609,1648]],[[64606,64606],"disallowed_STD3_mapped",[32,1612,1617]],[[64607,64607],"disallowed_STD3_mapped",[32,1613,1617]],[[64608,64608],"disallowed_STD3_mapped",[32,1614,1617]],[[64609,64609],"disallowed_STD3_mapped",[32,1615,1617]],[[64610,64610],"disallowed_STD3_mapped",[32,1616,1617]],[[64611,64611],"disallowed_STD3_mapped",[32,1617,1648]],[[64612,64612],"mapped",[1574,1585]],[[64613,64613],"mapped",[1574,1586]],[[64614,64614],"mapped",[1574,1605]],[[64615,64615],"mapped",[1574,1606]],[[64616,64616],"mapped",[1574,1609]],[[64617,64617],"mapped",[1574,1610]],[[64618,64618],"mapped",[1576,1585]],[[64619,64619],"mapped",[1576,1586]],[[64620,64620],"mapped",[1576,1605]],[[64621,64621],"mapped",[1576,1606]],[[64622,64622],"mapped",[1576,1609]],[[64623,64623],"mapped",[1576,1610]],[[64624,64624],"mapped",[1578,1585]],[[64625,64625],"mapped",[1578,1586]],[[64626,64626],"mapped",[1578,1605]],[[64627,64627],"mapped",[1578,1606]],[[64628,64628],"mapped",[1578,1609]],[[64629,64629],"mapped",[1578,1610]],[[64630,64630],"mapped",[1579,1585]],[[64631,64631],"mapped",[1579,1586]],[[64632,64632],"mapped",[1579,1605]],[[64633,64633],"mapped",[1579,1606]],[[64634,64634],"mapped",[1579,1609]],[[64635,64635],"mapped",[1579,1610]],[[64636,64636],"mapped",[1601,1609]],[[64637,64637],"mapped",[1601,1610]],[[64638,64638],"mapped",[1602,1609]],[[64639,64639],"mapped",[1602,1610]],[[64640,64640],"mapped",[1603,1575]],[[64641,64641],"mapped",[1603,1604]],[[64642,64642],"mapped",[1603,1605]],[[64643,64643],"mapped",[1603,1609]],[[64644,64644],"mapped",[1603,1610]],[[64645,64645],"mapped",[1604,1605]],[[64646,64646],"mapped",[1604,1609]],[[64647,64647],"mapped",[1604,1610]],[[64648,64648],"mapped",[1605,1575]],[[64649,64649],"mapped",[1605,1605]],[[64650,64650],"mapped",[1606,1585]],[[64651,64651],"mapped",[1606,1586]],[[64652,64652],"mapped",[1606,1605]],[[64653,64653],"mapped",[1606,1606]],[[64654,64654],"mapped",[1606,1609]],[[64655,64655],"mapped",[1606,1610]],[[64656,64656],"mapped",[1609,1648]],[[64657,64657],"mapped",[1610,1585]],[[64658,64658],"mapped",[1610,1586]],[[64659,64659],"mapped",[1610,1605]],[[64660,64660],"mapped",[1610,1606]],[[64661,64661],"mapped",[1610,1609]],[[64662,64662],"mapped",[1610,1610]],[[64663,64663],"mapped",[1574,1580]],[[64664,64664],"mapped",[1574,1581]],[[64665,64665],"mapped",[1574,1582]],[[64666,64666],"mapped",[1574,1605]],[[64667,64667],"mapped",[1574,1607]],[[64668,64668],"mapped",[1576,1580]],[[64669,64669],"mapped",[1576,1581]],[[64670,64670],"mapped",[1576,1582]],[[64671,64671],"mapped",[1576,1605]],[[64672,64672],"mapped",[1576,1607]],[[64673,64673],"mapped",[1578,1580]],[[64674,64674],"mapped",[1578,1581]],[[64675,64675],"mapped",[1578,1582]],[[64676,64676],"mapped",[1578,1605]],[[64677,64677],"mapped",[1578,1607]],[[64678,64678],"mapped",[1579,1605]],[[64679,64679],"mapped",[1580,1581]],[[64680,64680],"mapped",[1580,1605]],[[64681,64681],"mapped",[1581,1580]],[[64682,64682],"mapped",[1581,1605]],[[64683,64683],"mapped",[1582,1580]],[[64684,64684],"mapped",[1582,1605]],[[64685,64685],"mapped",[1587,1580]],[[64686,64686],"mapped",[1587,1581]],[[64687,64687],"mapped",[1587,1582]],[[64688,64688],"mapped",[1587,1605]],[[64689,64689],"mapped",[1589,1581]],[[64690,64690],"mapped",[1589,1582]],[[64691,64691],"mapped",[1589,1605]],[[64692,64692],"mapped",[1590,1580]],[[64693,64693],"mapped",[1590,1581]],[[64694,64694],"mapped",[1590,1582]],[[64695,64695],"mapped",[1590,1605]],[[64696,64696],"mapped",[1591,1581]],[[64697,64697],"mapped",[1592,1605]],[[64698,64698],"mapped",[1593,1580]],[[64699,64699],"mapped",[1593,1605]],[[64700,64700],"mapped",[1594,1580]],[[64701,64701],"mapped",[1594,1605]],[[64702,64702],"mapped",[1601,1580]],[[64703,64703],"mapped",[1601,1581]],[[64704,64704],"mapped",[1601,1582]],[[64705,64705],"mapped",[1601,1605]],[[64706,64706],"mapped",[1602,1581]],[[64707,64707],"mapped",[1602,1605]],[[64708,64708],"mapped",[1603,1580]],[[64709,64709],"mapped",[1603,1581]],[[64710,64710],"mapped",[1603,1582]],[[64711,64711],"mapped",[1603,1604]],[[64712,64712],"mapped",[1603,1605]],[[64713,64713],"mapped",[1604,1580]],[[64714,64714],"mapped",[1604,1581]],[[64715,64715],"mapped",[1604,1582]],[[64716,64716],"mapped",[1604,1605]],[[64717,64717],"mapped",[1604,1607]],[[64718,64718],"mapped",[1605,1580]],[[64719,64719],"mapped",[1605,1581]],[[64720,64720],"mapped",[1605,1582]],[[64721,64721],"mapped",[1605,1605]],[[64722,64722],"mapped",[1606,1580]],[[64723,64723],"mapped",[1606,1581]],[[64724,64724],"mapped",[1606,1582]],[[64725,64725],"mapped",[1606,1605]],[[64726,64726],"mapped",[1606,1607]],[[64727,64727],"mapped",[1607,1580]],[[64728,64728],"mapped",[1607,1605]],[[64729,64729],"mapped",[1607,1648]],[[64730,64730],"mapped",[1610,1580]],[[64731,64731],"mapped",[1610,1581]],[[64732,64732],"mapped",[1610,1582]],[[64733,64733],"mapped",[1610,1605]],[[64734,64734],"mapped",[1610,1607]],[[64735,64735],"mapped",[1574,1605]],[[64736,64736],"mapped",[1574,1607]],[[64737,64737],"mapped",[1576,1605]],[[64738,64738],"mapped",[1576,1607]],[[64739,64739],"mapped",[1578,1605]],[[64740,64740],"mapped",[1578,1607]],[[64741,64741],"mapped",[1579,1605]],[[64742,64742],"mapped",[1579,1607]],[[64743,64743],"mapped",[1587,1605]],[[64744,64744],"mapped",[1587,1607]],[[64745,64745],"mapped",[1588,1605]],[[64746,64746],"mapped",[1588,1607]],[[64747,64747],"mapped",[1603,1604]],[[64748,64748],"mapped",[1603,1605]],[[64749,64749],"mapped",[1604,1605]],[[64750,64750],"mapped",[1606,1605]],[[64751,64751],"mapped",[1606,1607]],[[64752,64752],"mapped",[1610,1605]],[[64753,64753],"mapped",[1610,1607]],[[64754,64754],"mapped",[1600,1614,1617]],[[64755,64755],"mapped",[1600,1615,1617]],[[64756,64756],"mapped",[1600,1616,1617]],[[64757,64757],"mapped",[1591,1609]],[[64758,64758],"mapped",[1591,1610]],[[64759,64759],"mapped",[1593,1609]],[[64760,64760],"mapped",[1593,1610]],[[64761,64761],"mapped",[1594,1609]],[[64762,64762],"mapped",[1594,1610]],[[64763,64763],"mapped",[1587,1609]],[[64764,64764],"mapped",[1587,1610]],[[64765,64765],"mapped",[1588,1609]],[[64766,64766],"mapped",[1588,1610]],[[64767,64767],"mapped",[1581,1609]],[[64768,64768],"mapped",[1581,1610]],[[64769,64769],"mapped",[1580,1609]],[[64770,64770],"mapped",[1580,1610]],[[64771,64771],"mapped",[1582,1609]],[[64772,64772],"mapped",[1582,1610]],[[64773,64773],"mapped",[1589,1609]],[[64774,64774],"mapped",[1589,1610]],[[64775,64775],"mapped",[1590,1609]],[[64776,64776],"mapped",[1590,1610]],[[64777,64777],"mapped",[1588,1580]],[[64778,64778],"mapped",[1588,1581]],[[64779,64779],"mapped",[1588,1582]],[[64780,64780],"mapped",[1588,1605]],[[64781,64781],"mapped",[1588,1585]],[[64782,64782],"mapped",[1587,1585]],[[64783,64783],"mapped",[1589,1585]],[[64784,64784],"mapped",[1590,1585]],[[64785,64785],"mapped",[1591,1609]],[[64786,64786],"mapped",[1591,1610]],[[64787,64787],"mapped",[1593,1609]],[[64788,64788],"mapped",[1593,1610]],[[64789,64789],"mapped",[1594,1609]],[[64790,64790],"mapped",[1594,1610]],[[64791,64791],"mapped",[1587,1609]],[[64792,64792],"mapped",[1587,1610]],[[64793,64793],"mapped",[1588,1609]],[[64794,64794],"mapped",[1588,1610]],[[64795,64795],"mapped",[1581,1609]],[[64796,64796],"mapped",[1581,1610]],[[64797,64797],"mapped",[1580,1609]],[[64798,64798],"mapped",[1580,1610]],[[64799,64799],"mapped",[1582,1609]],[[64800,64800],"mapped",[1582,1610]],[[64801,64801],"mapped",[1589,1609]],[[64802,64802],"mapped",[1589,1610]],[[64803,64803],"mapped",[1590,1609]],[[64804,64804],"mapped",[1590,1610]],[[64805,64805],"mapped",[1588,1580]],[[64806,64806],"mapped",[1588,1581]],[[64807,64807],"mapped",[1588,1582]],[[64808,64808],"mapped",[1588,1605]],[[64809,64809],"mapped",[1588,1585]],[[64810,64810],"mapped",[1587,1585]],[[64811,64811],"mapped",[1589,1585]],[[64812,64812],"mapped",[1590,1585]],[[64813,64813],"mapped",[1588,1580]],[[64814,64814],"mapped",[1588,1581]],[[64815,64815],"mapped",[1588,1582]],[[64816,64816],"mapped",[1588,1605]],[[64817,64817],"mapped",[1587,1607]],[[64818,64818],"mapped",[1588,1607]],[[64819,64819],"mapped",[1591,1605]],[[64820,64820],"mapped",[1587,1580]],[[64821,64821],"mapped",[1587,1581]],[[64822,64822],"mapped",[1587,1582]],[[64823,64823],"mapped",[1588,1580]],[[64824,64824],"mapped",[1588,1581]],[[64825,64825],"mapped",[1588,1582]],[[64826,64826],"mapped",[1591,1605]],[[64827,64827],"mapped",[1592,1605]],[[64828,64829],"mapped",[1575,1611]],[[64830,64831],"valid",[],"NV8"],[[64832,64847],"disallowed"],[[64848,64848],"mapped",[1578,1580,1605]],[[64849,64850],"mapped",[1578,1581,1580]],[[64851,64851],"mapped",[1578,1581,1605]],[[64852,64852],"mapped",[1578,1582,1605]],[[64853,64853],"mapped",[1578,1605,1580]],[[64854,64854],"mapped",[1578,1605,1581]],[[64855,64855],"mapped",[1578,1605,1582]],[[64856,64857],"mapped",[1580,1605,1581]],[[64858,64858],"mapped",[1581,1605,1610]],[[64859,64859],"mapped",[1581,1605,1609]],[[64860,64860],"mapped",[1587,1581,1580]],[[64861,64861],"mapped",[1587,1580,1581]],[[64862,64862],"mapped",[1587,1580,1609]],[[64863,64864],"mapped",[1587,1605,1581]],[[64865,64865],"mapped",[1587,1605,1580]],[[64866,64867],"mapped",[1587,1605,1605]],[[64868,64869],"mapped",[1589,1581,1581]],[[64870,64870],"mapped",[1589,1605,1605]],[[64871,64872],"mapped",[1588,1581,1605]],[[64873,64873],"mapped",[1588,1580,1610]],[[64874,64875],"mapped",[1588,1605,1582]],[[64876,64877],"mapped",[1588,1605,1605]],[[64878,64878],"mapped",[1590,1581,1609]],[[64879,64880],"mapped",[1590,1582,1605]],[[64881,64882],"mapped",[1591,1605,1581]],[[64883,64883],"mapped",[1591,1605,1605]],[[64884,64884],"mapped",[1591,1605,1610]],[[64885,64885],"mapped",[1593,1580,1605]],[[64886,64887],"mapped",[1593,1605,1605]],[[64888,64888],"mapped",[1593,1605,1609]],[[64889,64889],"mapped",[1594,1605,1605]],[[64890,64890],"mapped",[1594,1605,1610]],[[64891,64891],"mapped",[1594,1605,1609]],[[64892,64893],"mapped",[1601,1582,1605]],[[64894,64894],"mapped",[1602,1605,1581]],[[64895,64895],"mapped",[1602,1605,1605]],[[64896,64896],"mapped",[1604,1581,1605]],[[64897,64897],"mapped",[1604,1581,1610]],[[64898,64898],"mapped",[1604,1581,1609]],[[64899,64900],"mapped",[1604,1580,1580]],[[64901,64902],"mapped",[1604,1582,1605]],[[64903,64904],"mapped",[1604,1605,1581]],[[64905,64905],"mapped",[1605,1581,1580]],[[64906,64906],"mapped",[1605,1581,1605]],[[64907,64907],"mapped",[1605,1581,1610]],[[64908,64908],"mapped",[1605,1580,1581]],[[64909,64909],"mapped",[1605,1580,1605]],[[64910,64910],"mapped",[1605,1582,1580]],[[64911,64911],"mapped",[1605,1582,1605]],[[64912,64913],"disallowed"],[[64914,64914],"mapped",[1605,1580,1582]],[[64915,64915],"mapped",[1607,1605,1580]],[[64916,64916],"mapped",[1607,1605,1605]],[[64917,64917],"mapped",[1606,1581,1605]],[[64918,64918],"mapped",[1606,1581,1609]],[[64919,64920],"mapped",[1606,1580,1605]],[[64921,64921],"mapped",[1606,1580,1609]],[[64922,64922],"mapped",[1606,1605,1610]],[[64923,64923],"mapped",[1606,1605,1609]],[[64924,64925],"mapped",[1610,1605,1605]],[[64926,64926],"mapped",[1576,1582,1610]],[[64927,64927],"mapped",[1578,1580,1610]],[[64928,64928],"mapped",[1578,1580,1609]],[[64929,64929],"mapped",[1578,1582,1610]],[[64930,64930],"mapped",[1578,1582,1609]],[[64931,64931],"mapped",[1578,1605,1610]],[[64932,64932],"mapped",[1578,1605,1609]],[[64933,64933],"mapped",[1580,1605,1610]],[[64934,64934],"mapped",[1580,1581,1609]],[[64935,64935],"mapped",[1580,1605,1609]],[[64936,64936],"mapped",[1587,1582,1609]],[[64937,64937],"mapped",[1589,1581,1610]],[[64938,64938],"mapped",[1588,1581,1610]],[[64939,64939],"mapped",[1590,1581,1610]],[[64940,64940],"mapped",[1604,1580,1610]],[[64941,64941],"mapped",[1604,1605,1610]],[[64942,64942],"mapped",[1610,1581,1610]],[[64943,64943],"mapped",[1610,1580,1610]],[[64944,64944],"mapped",[1610,1605,1610]],[[64945,64945],"mapped",[1605,1605,1610]],[[64946,64946],"mapped",[1602,1605,1610]],[[64947,64947],"mapped",[1606,1581,1610]],[[64948,64948],"mapped",[1602,1605,1581]],[[64949,64949],"mapped",[1604,1581,1605]],[[64950,64950],"mapped",[1593,1605,1610]],[[64951,64951],"mapped",[1603,1605,1610]],[[64952,64952],"mapped",[1606,1580,1581]],[[64953,64953],"mapped",[1605,1582,1610]],[[64954,64954],"mapped",[1604,1580,1605]],[[64955,64955],"mapped",[1603,1605,1605]],[[64956,64956],"mapped",[1604,1580,1605]],[[64957,64957],"mapped",[1606,1580,1581]],[[64958,64958],"mapped",[1580,1581,1610]],[[64959,64959],"mapped",[1581,1580,1610]],[[64960,64960],"mapped",[1605,1580,1610]],[[64961,64961],"mapped",[1601,1605,1610]],[[64962,64962],"mapped",[1576,1581,1610]],[[64963,64963],"mapped",[1603,1605,1605]],[[64964,64964],"mapped",[1593,1580,1605]],[[64965,64965],"mapped",[1589,1605,1605]],[[64966,64966],"mapped",[1587,1582,1610]],[[64967,64967],"mapped",[1606,1580,1610]],[[64968,64975],"disallowed"],[[64976,65007],"disallowed"],[[65008,65008],"mapped",[1589,1604,1746]],[[65009,65009],"mapped",[1602,1604,1746]],[[65010,65010],"mapped",[1575,1604,1604,1607]],[[65011,65011],"mapped",[1575,1603,1576,1585]],[[65012,65012],"mapped",[1605,1581,1605,1583]],[[65013,65013],"mapped",[1589,1604,1593,1605]],[[65014,65014],"mapped",[1585,1587,1608,1604]],[[65015,65015],"mapped",[1593,1604,1610,1607]],[[65016,65016],"mapped",[1608,1587,1604,1605]],[[65017,65017],"mapped",[1589,1604,1609]],[[65018,65018],"disallowed_STD3_mapped",[1589,1604,1609,32,1575,1604,1604,1607,32,1593,1604,1610,1607,32,1608,1587,1604,1605]],[[65019,65019],"disallowed_STD3_mapped",[1580,1604,32,1580,1604,1575,1604,1607]],[[65020,65020],"mapped",[1585,1740,1575,1604]],[[65021,65021],"valid",[],"NV8"],[[65022,65023],"disallowed"],[[65024,65039],"ignored"],[[65040,65040],"disallowed_STD3_mapped",[44]],[[65041,65041],"mapped",[12289]],[[65042,65042],"disallowed"],[[65043,65043],"disallowed_STD3_mapped",[58]],[[65044,65044],"disallowed_STD3_mapped",[59]],[[65045,65045],"disallowed_STD3_mapped",[33]],[[65046,65046],"disallowed_STD3_mapped",[63]],[[65047,65047],"mapped",[12310]],[[65048,65048],"mapped",[12311]],[[65049,65049],"disallowed"],[[65050,65055],"disallowed"],[[65056,65059],"valid"],[[65060,65062],"valid"],[[65063,65069],"valid"],[[65070,65071],"valid"],[[65072,65072],"disallowed"],[[65073,65073],"mapped",[8212]],[[65074,65074],"mapped",[8211]],[[65075,65076],"disallowed_STD3_mapped",[95]],[[65077,65077],"disallowed_STD3_mapped",[40]],[[65078,65078],"disallowed_STD3_mapped",[41]],[[65079,65079],"disallowed_STD3_mapped",[123]],[[65080,65080],"disallowed_STD3_mapped",[125]],[[65081,65081],"mapped",[12308]],[[65082,65082],"mapped",[12309]],[[65083,65083],"mapped",[12304]],[[65084,65084],"mapped",[12305]],[[65085,65085],"mapped",[12298]],[[65086,65086],"mapped",[12299]],[[65087,65087],"mapped",[12296]],[[65088,65088],"mapped",[12297]],[[65089,65089],"mapped",[12300]],[[65090,65090],"mapped",[12301]],[[65091,65091],"mapped",[12302]],[[65092,65092],"mapped",[12303]],[[65093,65094],"valid",[],"NV8"],[[65095,65095],"disallowed_STD3_mapped",[91]],[[65096,65096],"disallowed_STD3_mapped",[93]],[[65097,65100],"disallowed_STD3_mapped",[32,773]],[[65101,65103],"disallowed_STD3_mapped",[95]],[[65104,65104],"disallowed_STD3_mapped",[44]],[[65105,65105],"mapped",[12289]],[[65106,65106],"disallowed"],[[65107,65107],"disallowed"],[[65108,65108],"disallowed_STD3_mapped",[59]],[[65109,65109],"disallowed_STD3_mapped",[58]],[[65110,65110],"disallowed_STD3_mapped",[63]],[[65111,65111],"disallowed_STD3_mapped",[33]],[[65112,65112],"mapped",[8212]],[[65113,65113],"disallowed_STD3_mapped",[40]],[[65114,65114],"disallowed_STD3_mapped",[41]],[[65115,65115],"disallowed_STD3_mapped",[123]],[[65116,65116],"disallowed_STD3_mapped",[125]],[[65117,65117],"mapped",[12308]],[[65118,65118],"mapped",[12309]],[[65119,65119],"disallowed_STD3_mapped",[35]],[[65120,65120],"disallowed_STD3_mapped",[38]],[[65121,65121],"disallowed_STD3_mapped",[42]],[[65122,65122],"disallowed_STD3_mapped",[43]],[[65123,65123],"mapped",[45]],[[65124,65124],"disallowed_STD3_mapped",[60]],[[65125,65125],"disallowed_STD3_mapped",[62]],[[65126,65126],"disallowed_STD3_mapped",[61]],[[65127,65127],"disallowed"],[[65128,65128],"disallowed_STD3_mapped",[92]],[[65129,65129],"disallowed_STD3_mapped",[36]],[[65130,65130],"disallowed_STD3_mapped",[37]],[[65131,65131],"disallowed_STD3_mapped",[64]],[[65132,65135],"disallowed"],[[65136,65136],"disallowed_STD3_mapped",[32,1611]],[[65137,65137],"mapped",[1600,1611]],[[65138,65138],"disallowed_STD3_mapped",[32,1612]],[[65139,65139],"valid"],[[65140,65140],"disallowed_STD3_mapped",[32,1613]],[[65141,65141],"disallowed"],[[65142,65142],"disallowed_STD3_mapped",[32,1614]],[[65143,65143],"mapped",[1600,1614]],[[65144,65144],"disallowed_STD3_mapped",[32,1615]],[[65145,65145],"mapped",[1600,1615]],[[65146,65146],"disallowed_STD3_mapped",[32,1616]],[[65147,65147],"mapped",[1600,1616]],[[65148,65148],"disallowed_STD3_mapped",[32,1617]],[[65149,65149],"mapped",[1600,1617]],[[65150,65150],"disallowed_STD3_mapped",[32,1618]],[[65151,65151],"mapped",[1600,1618]],[[65152,65152],"mapped",[1569]],[[65153,65154],"mapped",[1570]],[[65155,65156],"mapped",[1571]],[[65157,65158],"mapped",[1572]],[[65159,65160],"mapped",[1573]],[[65161,65164],"mapped",[1574]],[[65165,65166],"mapped",[1575]],[[65167,65170],"mapped",[1576]],[[65171,65172],"mapped",[1577]],[[65173,65176],"mapped",[1578]],[[65177,65180],"mapped",[1579]],[[65181,65184],"mapped",[1580]],[[65185,65188],"mapped",[1581]],[[65189,65192],"mapped",[1582]],[[65193,65194],"mapped",[1583]],[[65195,65196],"mapped",[1584]],[[65197,65198],"mapped",[1585]],[[65199,65200],"mapped",[1586]],[[65201,65204],"mapped",[1587]],[[65205,65208],"mapped",[1588]],[[65209,65212],"mapped",[1589]],[[65213,65216],"mapped",[1590]],[[65217,65220],"mapped",[1591]],[[65221,65224],"mapped",[1592]],[[65225,65228],"mapped",[1593]],[[65229,65232],"mapped",[1594]],[[65233,65236],"mapped",[1601]],[[65237,65240],"mapped",[1602]],[[65241,65244],"mapped",[1603]],[[65245,65248],"mapped",[1604]],[[65249,65252],"mapped",[1605]],[[65253,65256],"mapped",[1606]],[[65257,65260],"mapped",[1607]],[[65261,65262],"mapped",[1608]],[[65263,65264],"mapped",[1609]],[[65265,65268],"mapped",[1610]],[[65269,65270],"mapped",[1604,1570]],[[65271,65272],"mapped",[1604,1571]],[[65273,65274],"mapped",[1604,1573]],[[65275,65276],"mapped",[1604,1575]],[[65277,65278],"disallowed"],[[65279,65279],"ignored"],[[65280,65280],"disallowed"],[[65281,65281],"disallowed_STD3_mapped",[33]],[[65282,65282],"disallowed_STD3_mapped",[34]],[[65283,65283],"disallowed_STD3_mapped",[35]],[[65284,65284],"disallowed_STD3_mapped",[36]],[[65285,65285],"disallowed_STD3_mapped",[37]],[[65286,65286],"disallowed_STD3_mapped",[38]],[[65287,65287],"disallowed_STD3_mapped",[39]],[[65288,65288],"disallowed_STD3_mapped",[40]],[[65289,65289],"disallowed_STD3_mapped",[41]],[[65290,65290],"disallowed_STD3_mapped",[42]],[[65291,65291],"disallowed_STD3_mapped",[43]],[[65292,65292],"disallowed_STD3_mapped",[44]],[[65293,65293],"mapped",[45]],[[65294,65294],"mapped",[46]],[[65295,65295],"disallowed_STD3_mapped",[47]],[[65296,65296],"mapped",[48]],[[65297,65297],"mapped",[49]],[[65298,65298],"mapped",[50]],[[65299,65299],"mapped",[51]],[[65300,65300],"mapped",[52]],[[65301,65301],"mapped",[53]],[[65302,65302],"mapped",[54]],[[65303,65303],"mapped",[55]],[[65304,65304],"mapped",[56]],[[65305,65305],"mapped",[57]],[[65306,65306],"disallowed_STD3_mapped",[58]],[[65307,65307],"disallowed_STD3_mapped",[59]],[[65308,65308],"disallowed_STD3_mapped",[60]],[[65309,65309],"disallowed_STD3_mapped",[61]],[[65310,65310],"disallowed_STD3_mapped",[62]],[[65311,65311],"disallowed_STD3_mapped",[63]],[[65312,65312],"disallowed_STD3_mapped",[64]],[[65313,65313],"mapped",[97]],[[65314,65314],"mapped",[98]],[[65315,65315],"mapped",[99]],[[65316,65316],"mapped",[100]],[[65317,65317],"mapped",[101]],[[65318,65318],"mapped",[102]],[[65319,65319],"mapped",[103]],[[65320,65320],"mapped",[104]],[[65321,65321],"mapped",[105]],[[65322,65322],"mapped",[106]],[[65323,65323],"mapped",[107]],[[65324,65324],"mapped",[108]],[[65325,65325],"mapped",[109]],[[65326,65326],"mapped",[110]],[[65327,65327],"mapped",[111]],[[65328,65328],"mapped",[112]],[[65329,65329],"mapped",[113]],[[65330,65330],"mapped",[114]],[[65331,65331],"mapped",[115]],[[65332,65332],"mapped",[116]],[[65333,65333],"mapped",[117]],[[65334,65334],"mapped",[118]],[[65335,65335],"mapped",[119]],[[65336,65336],"mapped",[120]],[[65337,65337],"mapped",[121]],[[65338,65338],"mapped",[122]],[[65339,65339],"disallowed_STD3_mapped",[91]],[[65340,65340],"disallowed_STD3_mapped",[92]],[[65341,65341],"disallowed_STD3_mapped",[93]],[[65342,65342],"disallowed_STD3_mapped",[94]],[[65343,65343],"disallowed_STD3_mapped",[95]],[[65344,65344],"disallowed_STD3_mapped",[96]],[[65345,65345],"mapped",[97]],[[65346,65346],"mapped",[98]],[[65347,65347],"mapped",[99]],[[65348,65348],"mapped",[100]],[[65349,65349],"mapped",[101]],[[65350,65350],"mapped",[102]],[[65351,65351],"mapped",[103]],[[65352,65352],"mapped",[104]],[[65353,65353],"mapped",[105]],[[65354,65354],"mapped",[106]],[[65355,65355],"mapped",[107]],[[65356,65356],"mapped",[108]],[[65357,65357],"mapped",[109]],[[65358,65358],"mapped",[110]],[[65359,65359],"mapped",[111]],[[65360,65360],"mapped",[112]],[[65361,65361],"mapped",[113]],[[65362,65362],"mapped",[114]],[[65363,65363],"mapped",[115]],[[65364,65364],"mapped",[116]],[[65365,65365],"mapped",[117]],[[65366,65366],"mapped",[118]],[[65367,65367],"mapped",[119]],[[65368,65368],"mapped",[120]],[[65369,65369],"mapped",[121]],[[65370,65370],"mapped",[122]],[[65371,65371],"disallowed_STD3_mapped",[123]],[[65372,65372],"disallowed_STD3_mapped",[124]],[[65373,65373],"disallowed_STD3_mapped",[125]],[[65374,65374],"disallowed_STD3_mapped",[126]],[[65375,65375],"mapped",[10629]],[[65376,65376],"mapped",[10630]],[[65377,65377],"mapped",[46]],[[65378,65378],"mapped",[12300]],[[65379,65379],"mapped",[12301]],[[65380,65380],"mapped",[12289]],[[65381,65381],"mapped",[12539]],[[65382,65382],"mapped",[12530]],[[65383,65383],"mapped",[12449]],[[65384,65384],"mapped",[12451]],[[65385,65385],"mapped",[12453]],[[65386,65386],"mapped",[12455]],[[65387,65387],"mapped",[12457]],[[65388,65388],"mapped",[12515]],[[65389,65389],"mapped",[12517]],[[65390,65390],"mapped",[12519]],[[65391,65391],"mapped",[12483]],[[65392,65392],"mapped",[12540]],[[65393,65393],"mapped",[12450]],[[65394,65394],"mapped",[12452]],[[65395,65395],"mapped",[12454]],[[65396,65396],"mapped",[12456]],[[65397,65397],"mapped",[12458]],[[65398,65398],"mapped",[12459]],[[65399,65399],"mapped",[12461]],[[65400,65400],"mapped",[12463]],[[65401,65401],"mapped",[12465]],[[65402,65402],"mapped",[12467]],[[65403,65403],"mapped",[12469]],[[65404,65404],"mapped",[12471]],[[65405,65405],"mapped",[12473]],[[65406,65406],"mapped",[12475]],[[65407,65407],"mapped",[12477]],[[65408,65408],"mapped",[12479]],[[65409,65409],"mapped",[12481]],[[65410,65410],"mapped",[12484]],[[65411,65411],"mapped",[12486]],[[65412,65412],"mapped",[12488]],[[65413,65413],"mapped",[12490]],[[65414,65414],"mapped",[12491]],[[65415,65415],"mapped",[12492]],[[65416,65416],"mapped",[12493]],[[65417,65417],"mapped",[12494]],[[65418,65418],"mapped",[12495]],[[65419,65419],"mapped",[12498]],[[65420,65420],"mapped",[12501]],[[65421,65421],"mapped",[12504]],[[65422,65422],"mapped",[12507]],[[65423,65423],"mapped",[12510]],[[65424,65424],"mapped",[12511]],[[65425,65425],"mapped",[12512]],[[65426,65426],"mapped",[12513]],[[65427,65427],"mapped",[12514]],[[65428,65428],"mapped",[12516]],[[65429,65429],"mapped",[12518]],[[65430,65430],"mapped",[12520]],[[65431,65431],"mapped",[12521]],[[65432,65432],"mapped",[12522]],[[65433,65433],"mapped",[12523]],[[65434,65434],"mapped",[12524]],[[65435,65435],"mapped",[12525]],[[65436,65436],"mapped",[12527]],[[65437,65437],"mapped",[12531]],[[65438,65438],"mapped",[12441]],[[65439,65439],"mapped",[12442]],[[65440,65440],"disallowed"],[[65441,65441],"mapped",[4352]],[[65442,65442],"mapped",[4353]],[[65443,65443],"mapped",[4522]],[[65444,65444],"mapped",[4354]],[[65445,65445],"mapped",[4524]],[[65446,65446],"mapped",[4525]],[[65447,65447],"mapped",[4355]],[[65448,65448],"mapped",[4356]],[[65449,65449],"mapped",[4357]],[[65450,65450],"mapped",[4528]],[[65451,65451],"mapped",[4529]],[[65452,65452],"mapped",[4530]],[[65453,65453],"mapped",[4531]],[[65454,65454],"mapped",[4532]],[[65455,65455],"mapped",[4533]],[[65456,65456],"mapped",[4378]],[[65457,65457],"mapped",[4358]],[[65458,65458],"mapped",[4359]],[[65459,65459],"mapped",[4360]],[[65460,65460],"mapped",[4385]],[[65461,65461],"mapped",[4361]],[[65462,65462],"mapped",[4362]],[[65463,65463],"mapped",[4363]],[[65464,65464],"mapped",[4364]],[[65465,65465],"mapped",[4365]],[[65466,65466],"mapped",[4366]],[[65467,65467],"mapped",[4367]],[[65468,65468],"mapped",[4368]],[[65469,65469],"mapped",[4369]],[[65470,65470],"mapped",[4370]],[[65471,65473],"disallowed"],[[65474,65474],"mapped",[4449]],[[65475,65475],"mapped",[4450]],[[65476,65476],"mapped",[4451]],[[65477,65477],"mapped",[4452]],[[65478,65478],"mapped",[4453]],[[65479,65479],"mapped",[4454]],[[65480,65481],"disallowed"],[[65482,65482],"mapped",[4455]],[[65483,65483],"mapped",[4456]],[[65484,65484],"mapped",[4457]],[[65485,65485],"mapped",[4458]],[[65486,65486],"mapped",[4459]],[[65487,65487],"mapped",[4460]],[[65488,65489],"disallowed"],[[65490,65490],"mapped",[4461]],[[65491,65491],"mapped",[4462]],[[65492,65492],"mapped",[4463]],[[65493,65493],"mapped",[4464]],[[65494,65494],"mapped",[4465]],[[65495,65495],"mapped",[4466]],[[65496,65497],"disallowed"],[[65498,65498],"mapped",[4467]],[[65499,65499],"mapped",[4468]],[[65500,65500],"mapped",[4469]],[[65501,65503],"disallowed"],[[65504,65504],"mapped",[162]],[[65505,65505],"mapped",[163]],[[65506,65506],"mapped",[172]],[[65507,65507],"disallowed_STD3_mapped",[32,772]],[[65508,65508],"mapped",[166]],[[65509,65509],"mapped",[165]],[[65510,65510],"mapped",[8361]],[[65511,65511],"disallowed"],[[65512,65512],"mapped",[9474]],[[65513,65513],"mapped",[8592]],[[65514,65514],"mapped",[8593]],[[65515,65515],"mapped",[8594]],[[65516,65516],"mapped",[8595]],[[65517,65517],"mapped",[9632]],[[65518,65518],"mapped",[9675]],[[65519,65528],"disallowed"],[[65529,65531],"disallowed"],[[65532,65532],"disallowed"],[[65533,65533],"disallowed"],[[65534,65535],"disallowed"],[[65536,65547],"valid"],[[65548,65548],"disallowed"],[[65549,65574],"valid"],[[65575,65575],"disallowed"],[[65576,65594],"valid"],[[65595,65595],"disallowed"],[[65596,65597],"valid"],[[65598,65598],"disallowed"],[[65599,65613],"valid"],[[65614,65615],"disallowed"],[[65616,65629],"valid"],[[65630,65663],"disallowed"],[[65664,65786],"valid"],[[65787,65791],"disallowed"],[[65792,65794],"valid",[],"NV8"],[[65795,65798],"disallowed"],[[65799,65843],"valid",[],"NV8"],[[65844,65846],"disallowed"],[[65847,65855],"valid",[],"NV8"],[[65856,65930],"valid",[],"NV8"],[[65931,65932],"valid",[],"NV8"],[[65933,65935],"disallowed"],[[65936,65947],"valid",[],"NV8"],[[65948,65951],"disallowed"],[[65952,65952],"valid",[],"NV8"],[[65953,65999],"disallowed"],[[66000,66044],"valid",[],"NV8"],[[66045,66045],"valid"],[[66046,66175],"disallowed"],[[66176,66204],"valid"],[[66205,66207],"disallowed"],[[66208,66256],"valid"],[[66257,66271],"disallowed"],[[66272,66272],"valid"],[[66273,66299],"valid",[],"NV8"],[[66300,66303],"disallowed"],[[66304,66334],"valid"],[[66335,66335],"valid"],[[66336,66339],"valid",[],"NV8"],[[66340,66351],"disallowed"],[[66352,66368],"valid"],[[66369,66369],"valid",[],"NV8"],[[66370,66377],"valid"],[[66378,66378],"valid",[],"NV8"],[[66379,66383],"disallowed"],[[66384,66426],"valid"],[[66427,66431],"disallowed"],[[66432,66461],"valid"],[[66462,66462],"disallowed"],[[66463,66463],"valid",[],"NV8"],[[66464,66499],"valid"],[[66500,66503],"disallowed"],[[66504,66511],"valid"],[[66512,66517],"valid",[],"NV8"],[[66518,66559],"disallowed"],[[66560,66560],"mapped",[66600]],[[66561,66561],"mapped",[66601]],[[66562,66562],"mapped",[66602]],[[66563,66563],"mapped",[66603]],[[66564,66564],"mapped",[66604]],[[66565,66565],"mapped",[66605]],[[66566,66566],"mapped",[66606]],[[66567,66567],"mapped",[66607]],[[66568,66568],"mapped",[66608]],[[66569,66569],"mapped",[66609]],[[66570,66570],"mapped",[66610]],[[66571,66571],"mapped",[66611]],[[66572,66572],"mapped",[66612]],[[66573,66573],"mapped",[66613]],[[66574,66574],"mapped",[66614]],[[66575,66575],"mapped",[66615]],[[66576,66576],"mapped",[66616]],[[66577,66577],"mapped",[66617]],[[66578,66578],"mapped",[66618]],[[66579,66579],"mapped",[66619]],[[66580,66580],"mapped",[66620]],[[66581,66581],"mapped",[66621]],[[66582,66582],"mapped",[66622]],[[66583,66583],"mapped",[66623]],[[66584,66584],"mapped",[66624]],[[66585,66585],"mapped",[66625]],[[66586,66586],"mapped",[66626]],[[66587,66587],"mapped",[66627]],[[66588,66588],"mapped",[66628]],[[66589,66589],"mapped",[66629]],[[66590,66590],"mapped",[66630]],[[66591,66591],"mapped",[66631]],[[66592,66592],"mapped",[66632]],[[66593,66593],"mapped",[66633]],[[66594,66594],"mapped",[66634]],[[66595,66595],"mapped",[66635]],[[66596,66596],"mapped",[66636]],[[66597,66597],"mapped",[66637]],[[66598,66598],"mapped",[66638]],[[66599,66599],"mapped",[66639]],[[66600,66637],"valid"],[[66638,66717],"valid"],[[66718,66719],"disallowed"],[[66720,66729],"valid"],[[66730,66815],"disallowed"],[[66816,66855],"valid"],[[66856,66863],"disallowed"],[[66864,66915],"valid"],[[66916,66926],"disallowed"],[[66927,66927],"valid",[],"NV8"],[[66928,67071],"disallowed"],[[67072,67382],"valid"],[[67383,67391],"disallowed"],[[67392,67413],"valid"],[[67414,67423],"disallowed"],[[67424,67431],"valid"],[[67432,67583],"disallowed"],[[67584,67589],"valid"],[[67590,67591],"disallowed"],[[67592,67592],"valid"],[[67593,67593],"disallowed"],[[67594,67637],"valid"],[[67638,67638],"disallowed"],[[67639,67640],"valid"],[[67641,67643],"disallowed"],[[67644,67644],"valid"],[[67645,67646],"disallowed"],[[67647,67647],"valid"],[[67648,67669],"valid"],[[67670,67670],"disallowed"],[[67671,67679],"valid",[],"NV8"],[[67680,67702],"valid"],[[67703,67711],"valid",[],"NV8"],[[67712,67742],"valid"],[[67743,67750],"disallowed"],[[67751,67759],"valid",[],"NV8"],[[67760,67807],"disallowed"],[[67808,67826],"valid"],[[67827,67827],"disallowed"],[[67828,67829],"valid"],[[67830,67834],"disallowed"],[[67835,67839],"valid",[],"NV8"],[[67840,67861],"valid"],[[67862,67865],"valid",[],"NV8"],[[67866,67867],"valid",[],"NV8"],[[67868,67870],"disallowed"],[[67871,67871],"valid",[],"NV8"],[[67872,67897],"valid"],[[67898,67902],"disallowed"],[[67903,67903],"valid",[],"NV8"],[[67904,67967],"disallowed"],[[67968,68023],"valid"],[[68024,68027],"disallowed"],[[68028,68029],"valid",[],"NV8"],[[68030,68031],"valid"],[[68032,68047],"valid",[],"NV8"],[[68048,68049],"disallowed"],[[68050,68095],"valid",[],"NV8"],[[68096,68099],"valid"],[[68100,68100],"disallowed"],[[68101,68102],"valid"],[[68103,68107],"disallowed"],[[68108,68115],"valid"],[[68116,68116],"disallowed"],[[68117,68119],"valid"],[[68120,68120],"disallowed"],[[68121,68147],"valid"],[[68148,68151],"disallowed"],[[68152,68154],"valid"],[[68155,68158],"disallowed"],[[68159,68159],"valid"],[[68160,68167],"valid",[],"NV8"],[[68168,68175],"disallowed"],[[68176,68184],"valid",[],"NV8"],[[68185,68191],"disallowed"],[[68192,68220],"valid"],[[68221,68223],"valid",[],"NV8"],[[68224,68252],"valid"],[[68253,68255],"valid",[],"NV8"],[[68256,68287],"disallowed"],[[68288,68295],"valid"],[[68296,68296],"valid",[],"NV8"],[[68297,68326],"valid"],[[68327,68330],"disallowed"],[[68331,68342],"valid",[],"NV8"],[[68343,68351],"disallowed"],[[68352,68405],"valid"],[[68406,68408],"disallowed"],[[68409,68415],"valid",[],"NV8"],[[68416,68437],"valid"],[[68438,68439],"disallowed"],[[68440,68447],"valid",[],"NV8"],[[68448,68466],"valid"],[[68467,68471],"disallowed"],[[68472,68479],"valid",[],"NV8"],[[68480,68497],"valid"],[[68498,68504],"disallowed"],[[68505,68508],"valid",[],"NV8"],[[68509,68520],"disallowed"],[[68521,68527],"valid",[],"NV8"],[[68528,68607],"disallowed"],[[68608,68680],"valid"],[[68681,68735],"disallowed"],[[68736,68736],"mapped",[68800]],[[68737,68737],"mapped",[68801]],[[68738,68738],"mapped",[68802]],[[68739,68739],"mapped",[68803]],[[68740,68740],"mapped",[68804]],[[68741,68741],"mapped",[68805]],[[68742,68742],"mapped",[68806]],[[68743,68743],"mapped",[68807]],[[68744,68744],"mapped",[68808]],[[68745,68745],"mapped",[68809]],[[68746,68746],"mapped",[68810]],[[68747,68747],"mapped",[68811]],[[68748,68748],"mapped",[68812]],[[68749,68749],"mapped",[68813]],[[68750,68750],"mapped",[68814]],[[68751,68751],"mapped",[68815]],[[68752,68752],"mapped",[68816]],[[68753,68753],"mapped",[68817]],[[68754,68754],"mapped",[68818]],[[68755,68755],"mapped",[68819]],[[68756,68756],"mapped",[68820]],[[68757,68757],"mapped",[68821]],[[68758,68758],"mapped",[68822]],[[68759,68759],"mapped",[68823]],[[68760,68760],"mapped",[68824]],[[68761,68761],"mapped",[68825]],[[68762,68762],"mapped",[68826]],[[68763,68763],"mapped",[68827]],[[68764,68764],"mapped",[68828]],[[68765,68765],"mapped",[68829]],[[68766,68766],"mapped",[68830]],[[68767,68767],"mapped",[68831]],[[68768,68768],"mapped",[68832]],[[68769,68769],"mapped",[68833]],[[68770,68770],"mapped",[68834]],[[68771,68771],"mapped",[68835]],[[68772,68772],"mapped",[68836]],[[68773,68773],"mapped",[68837]],[[68774,68774],"mapped",[68838]],[[68775,68775],"mapped",[68839]],[[68776,68776],"mapped",[68840]],[[68777,68777],"mapped",[68841]],[[68778,68778],"mapped",[68842]],[[68779,68779],"mapped",[68843]],[[68780,68780],"mapped",[68844]],[[68781,68781],"mapped",[68845]],[[68782,68782],"mapped",[68846]],[[68783,68783],"mapped",[68847]],[[68784,68784],"mapped",[68848]],[[68785,68785],"mapped",[68849]],[[68786,68786],"mapped",[68850]],[[68787,68799],"disallowed"],[[68800,68850],"valid"],[[68851,68857],"disallowed"],[[68858,68863],"valid",[],"NV8"],[[68864,69215],"disallowed"],[[69216,69246],"valid",[],"NV8"],[[69247,69631],"disallowed"],[[69632,69702],"valid"],[[69703,69709],"valid",[],"NV8"],[[69710,69713],"disallowed"],[[69714,69733],"valid",[],"NV8"],[[69734,69743],"valid"],[[69744,69758],"disallowed"],[[69759,69759],"valid"],[[69760,69818],"valid"],[[69819,69820],"valid",[],"NV8"],[[69821,69821],"disallowed"],[[69822,69825],"valid",[],"NV8"],[[69826,69839],"disallowed"],[[69840,69864],"valid"],[[69865,69871],"disallowed"],[[69872,69881],"valid"],[[69882,69887],"disallowed"],[[69888,69940],"valid"],[[69941,69941],"disallowed"],[[69942,69951],"valid"],[[69952,69955],"valid",[],"NV8"],[[69956,69967],"disallowed"],[[69968,70003],"valid"],[[70004,70005],"valid",[],"NV8"],[[70006,70006],"valid"],[[70007,70015],"disallowed"],[[70016,70084],"valid"],[[70085,70088],"valid",[],"NV8"],[[70089,70089],"valid",[],"NV8"],[[70090,70092],"valid"],[[70093,70093],"valid",[],"NV8"],[[70094,70095],"disallowed"],[[70096,70105],"valid"],[[70106,70106],"valid"],[[70107,70107],"valid",[],"NV8"],[[70108,70108],"valid"],[[70109,70111],"valid",[],"NV8"],[[70112,70112],"disallowed"],[[70113,70132],"valid",[],"NV8"],[[70133,70143],"disallowed"],[[70144,70161],"valid"],[[70162,70162],"disallowed"],[[70163,70199],"valid"],[[70200,70205],"valid",[],"NV8"],[[70206,70271],"disallowed"],[[70272,70278],"valid"],[[70279,70279],"disallowed"],[[70280,70280],"valid"],[[70281,70281],"disallowed"],[[70282,70285],"valid"],[[70286,70286],"disallowed"],[[70287,70301],"valid"],[[70302,70302],"disallowed"],[[70303,70312],"valid"],[[70313,70313],"valid",[],"NV8"],[[70314,70319],"disallowed"],[[70320,70378],"valid"],[[70379,70383],"disallowed"],[[70384,70393],"valid"],[[70394,70399],"disallowed"],[[70400,70400],"valid"],[[70401,70403],"valid"],[[70404,70404],"disallowed"],[[70405,70412],"valid"],[[70413,70414],"disallowed"],[[70415,70416],"valid"],[[70417,70418],"disallowed"],[[70419,70440],"valid"],[[70441,70441],"disallowed"],[[70442,70448],"valid"],[[70449,70449],"disallowed"],[[70450,70451],"valid"],[[70452,70452],"disallowed"],[[70453,70457],"valid"],[[70458,70459],"disallowed"],[[70460,70468],"valid"],[[70469,70470],"disallowed"],[[70471,70472],"valid"],[[70473,70474],"disallowed"],[[70475,70477],"valid"],[[70478,70479],"disallowed"],[[70480,70480],"valid"],[[70481,70486],"disallowed"],[[70487,70487],"valid"],[[70488,70492],"disallowed"],[[70493,70499],"valid"],[[70500,70501],"disallowed"],[[70502,70508],"valid"],[[70509,70511],"disallowed"],[[70512,70516],"valid"],[[70517,70783],"disallowed"],[[70784,70853],"valid"],[[70854,70854],"valid",[],"NV8"],[[70855,70855],"valid"],[[70856,70863],"disallowed"],[[70864,70873],"valid"],[[70874,71039],"disallowed"],[[71040,71093],"valid"],[[71094,71095],"disallowed"],[[71096,71104],"valid"],[[71105,71113],"valid",[],"NV8"],[[71114,71127],"valid",[],"NV8"],[[71128,71133],"valid"],[[71134,71167],"disallowed"],[[71168,71232],"valid"],[[71233,71235],"valid",[],"NV8"],[[71236,71236],"valid"],[[71237,71247],"disallowed"],[[71248,71257],"valid"],[[71258,71295],"disallowed"],[[71296,71351],"valid"],[[71352,71359],"disallowed"],[[71360,71369],"valid"],[[71370,71423],"disallowed"],[[71424,71449],"valid"],[[71450,71452],"disallowed"],[[71453,71467],"valid"],[[71468,71471],"disallowed"],[[71472,71481],"valid"],[[71482,71487],"valid",[],"NV8"],[[71488,71839],"disallowed"],[[71840,71840],"mapped",[71872]],[[71841,71841],"mapped",[71873]],[[71842,71842],"mapped",[71874]],[[71843,71843],"mapped",[71875]],[[71844,71844],"mapped",[71876]],[[71845,71845],"mapped",[71877]],[[71846,71846],"mapped",[71878]],[[71847,71847],"mapped",[71879]],[[71848,71848],"mapped",[71880]],[[71849,71849],"mapped",[71881]],[[71850,71850],"mapped",[71882]],[[71851,71851],"mapped",[71883]],[[71852,71852],"mapped",[71884]],[[71853,71853],"mapped",[71885]],[[71854,71854],"mapped",[71886]],[[71855,71855],"mapped",[71887]],[[71856,71856],"mapped",[71888]],[[71857,71857],"mapped",[71889]],[[71858,71858],"mapped",[71890]],[[71859,71859],"mapped",[71891]],[[71860,71860],"mapped",[71892]],[[71861,71861],"mapped",[71893]],[[71862,71862],"mapped",[71894]],[[71863,71863],"mapped",[71895]],[[71864,71864],"mapped",[71896]],[[71865,71865],"mapped",[71897]],[[71866,71866],"mapped",[71898]],[[71867,71867],"mapped",[71899]],[[71868,71868],"mapped",[71900]],[[71869,71869],"mapped",[71901]],[[71870,71870],"mapped",[71902]],[[71871,71871],"mapped",[71903]],[[71872,71913],"valid"],[[71914,71922],"valid",[],"NV8"],[[71923,71934],"disallowed"],[[71935,71935],"valid"],[[71936,72383],"disallowed"],[[72384,72440],"valid"],[[72441,73727],"disallowed"],[[73728,74606],"valid"],[[74607,74648],"valid"],[[74649,74649],"valid"],[[74650,74751],"disallowed"],[[74752,74850],"valid",[],"NV8"],[[74851,74862],"valid",[],"NV8"],[[74863,74863],"disallowed"],[[74864,74867],"valid",[],"NV8"],[[74868,74868],"valid",[],"NV8"],[[74869,74879],"disallowed"],[[74880,75075],"valid"],[[75076,77823],"disallowed"],[[77824,78894],"valid"],[[78895,82943],"disallowed"],[[82944,83526],"valid"],[[83527,92159],"disallowed"],[[92160,92728],"valid"],[[92729,92735],"disallowed"],[[92736,92766],"valid"],[[92767,92767],"disallowed"],[[92768,92777],"valid"],[[92778,92781],"disallowed"],[[92782,92783],"valid",[],"NV8"],[[92784,92879],"disallowed"],[[92880,92909],"valid"],[[92910,92911],"disallowed"],[[92912,92916],"valid"],[[92917,92917],"valid",[],"NV8"],[[92918,92927],"disallowed"],[[92928,92982],"valid"],[[92983,92991],"valid",[],"NV8"],[[92992,92995],"valid"],[[92996,92997],"valid",[],"NV8"],[[92998,93007],"disallowed"],[[93008,93017],"valid"],[[93018,93018],"disallowed"],[[93019,93025],"valid",[],"NV8"],[[93026,93026],"disallowed"],[[93027,93047],"valid"],[[93048,93052],"disallowed"],[[93053,93071],"valid"],[[93072,93951],"disallowed"],[[93952,94020],"valid"],[[94021,94031],"disallowed"],[[94032,94078],"valid"],[[94079,94094],"disallowed"],[[94095,94111],"valid"],[[94112,110591],"disallowed"],[[110592,110593],"valid"],[[110594,113663],"disallowed"],[[113664,113770],"valid"],[[113771,113775],"disallowed"],[[113776,113788],"valid"],[[113789,113791],"disallowed"],[[113792,113800],"valid"],[[113801,113807],"disallowed"],[[113808,113817],"valid"],[[113818,113819],"disallowed"],[[113820,113820],"valid",[],"NV8"],[[113821,113822],"valid"],[[113823,113823],"valid",[],"NV8"],[[113824,113827],"ignored"],[[113828,118783],"disallowed"],[[118784,119029],"valid",[],"NV8"],[[119030,119039],"disallowed"],[[119040,119078],"valid",[],"NV8"],[[119079,119080],"disallowed"],[[119081,119081],"valid",[],"NV8"],[[119082,119133],"valid",[],"NV8"],[[119134,119134],"mapped",[119127,119141]],[[119135,119135],"mapped",[119128,119141]],[[119136,119136],"mapped",[119128,119141,119150]],[[119137,119137],"mapped",[119128,119141,119151]],[[119138,119138],"mapped",[119128,119141,119152]],[[119139,119139],"mapped",[119128,119141,119153]],[[119140,119140],"mapped",[119128,119141,119154]],[[119141,119154],"valid",[],"NV8"],[[119155,119162],"disallowed"],[[119163,119226],"valid",[],"NV8"],[[119227,119227],"mapped",[119225,119141]],[[119228,119228],"mapped",[119226,119141]],[[119229,119229],"mapped",[119225,119141,119150]],[[119230,119230],"mapped",[119226,119141,119150]],[[119231,119231],"mapped",[119225,119141,119151]],[[119232,119232],"mapped",[119226,119141,119151]],[[119233,119261],"valid",[],"NV8"],[[119262,119272],"valid",[],"NV8"],[[119273,119295],"disallowed"],[[119296,119365],"valid",[],"NV8"],[[119366,119551],"disallowed"],[[119552,119638],"valid",[],"NV8"],[[119639,119647],"disallowed"],[[119648,119665],"valid",[],"NV8"],[[119666,119807],"disallowed"],[[119808,119808],"mapped",[97]],[[119809,119809],"mapped",[98]],[[119810,119810],"mapped",[99]],[[119811,119811],"mapped",[100]],[[119812,119812],"mapped",[101]],[[119813,119813],"mapped",[102]],[[119814,119814],"mapped",[103]],[[119815,119815],"mapped",[104]],[[119816,119816],"mapped",[105]],[[119817,119817],"mapped",[106]],[[119818,119818],"mapped",[107]],[[119819,119819],"mapped",[108]],[[119820,119820],"mapped",[109]],[[119821,119821],"mapped",[110]],[[119822,119822],"mapped",[111]],[[119823,119823],"mapped",[112]],[[119824,119824],"mapped",[113]],[[119825,119825],"mapped",[114]],[[119826,119826],"mapped",[115]],[[119827,119827],"mapped",[116]],[[119828,119828],"mapped",[117]],[[119829,119829],"mapped",[118]],[[119830,119830],"mapped",[119]],[[119831,119831],"mapped",[120]],[[119832,119832],"mapped",[121]],[[119833,119833],"mapped",[122]],[[119834,119834],"mapped",[97]],[[119835,119835],"mapped",[98]],[[119836,119836],"mapped",[99]],[[119837,119837],"mapped",[100]],[[119838,119838],"mapped",[101]],[[119839,119839],"mapped",[102]],[[119840,119840],"mapped",[103]],[[119841,119841],"mapped",[104]],[[119842,119842],"mapped",[105]],[[119843,119843],"mapped",[106]],[[119844,119844],"mapped",[107]],[[119845,119845],"mapped",[108]],[[119846,119846],"mapped",[109]],[[119847,119847],"mapped",[110]],[[119848,119848],"mapped",[111]],[[119849,119849],"mapped",[112]],[[119850,119850],"mapped",[113]],[[119851,119851],"mapped",[114]],[[119852,119852],"mapped",[115]],[[119853,119853],"mapped",[116]],[[119854,119854],"mapped",[117]],[[119855,119855],"mapped",[118]],[[119856,119856],"mapped",[119]],[[119857,119857],"mapped",[120]],[[119858,119858],"mapped",[121]],[[119859,119859],"mapped",[122]],[[119860,119860],"mapped",[97]],[[119861,119861],"mapped",[98]],[[119862,119862],"mapped",[99]],[[119863,119863],"mapped",[100]],[[119864,119864],"mapped",[101]],[[119865,119865],"mapped",[102]],[[119866,119866],"mapped",[103]],[[119867,119867],"mapped",[104]],[[119868,119868],"mapped",[105]],[[119869,119869],"mapped",[106]],[[119870,119870],"mapped",[107]],[[119871,119871],"mapped",[108]],[[119872,119872],"mapped",[109]],[[119873,119873],"mapped",[110]],[[119874,119874],"mapped",[111]],[[119875,119875],"mapped",[112]],[[119876,119876],"mapped",[113]],[[119877,119877],"mapped",[114]],[[119878,119878],"mapped",[115]],[[119879,119879],"mapped",[116]],[[119880,119880],"mapped",[117]],[[119881,119881],"mapped",[118]],[[119882,119882],"mapped",[119]],[[119883,119883],"mapped",[120]],[[119884,119884],"mapped",[121]],[[119885,119885],"mapped",[122]],[[119886,119886],"mapped",[97]],[[119887,119887],"mapped",[98]],[[119888,119888],"mapped",[99]],[[119889,119889],"mapped",[100]],[[119890,119890],"mapped",[101]],[[119891,119891],"mapped",[102]],[[119892,119892],"mapped",[103]],[[119893,119893],"disallowed"],[[119894,119894],"mapped",[105]],[[119895,119895],"mapped",[106]],[[119896,119896],"mapped",[107]],[[119897,119897],"mapped",[108]],[[119898,119898],"mapped",[109]],[[119899,119899],"mapped",[110]],[[119900,119900],"mapped",[111]],[[119901,119901],"mapped",[112]],[[119902,119902],"mapped",[113]],[[119903,119903],"mapped",[114]],[[119904,119904],"mapped",[115]],[[119905,119905],"mapped",[116]],[[119906,119906],"mapped",[117]],[[119907,119907],"mapped",[118]],[[119908,119908],"mapped",[119]],[[119909,119909],"mapped",[120]],[[119910,119910],"mapped",[121]],[[119911,119911],"mapped",[122]],[[119912,119912],"mapped",[97]],[[119913,119913],"mapped",[98]],[[119914,119914],"mapped",[99]],[[119915,119915],"mapped",[100]],[[119916,119916],"mapped",[101]],[[119917,119917],"mapped",[102]],[[119918,119918],"mapped",[103]],[[119919,119919],"mapped",[104]],[[119920,119920],"mapped",[105]],[[119921,119921],"mapped",[106]],[[119922,119922],"mapped",[107]],[[119923,119923],"mapped",[108]],[[119924,119924],"mapped",[109]],[[119925,119925],"mapped",[110]],[[119926,119926],"mapped",[111]],[[119927,119927],"mapped",[112]],[[119928,119928],"mapped",[113]],[[119929,119929],"mapped",[114]],[[119930,119930],"mapped",[115]],[[119931,119931],"mapped",[116]],[[119932,119932],"mapped",[117]],[[119933,119933],"mapped",[118]],[[119934,119934],"mapped",[119]],[[119935,119935],"mapped",[120]],[[119936,119936],"mapped",[121]],[[119937,119937],"mapped",[122]],[[119938,119938],"mapped",[97]],[[119939,119939],"mapped",[98]],[[119940,119940],"mapped",[99]],[[119941,119941],"mapped",[100]],[[119942,119942],"mapped",[101]],[[119943,119943],"mapped",[102]],[[119944,119944],"mapped",[103]],[[119945,119945],"mapped",[104]],[[119946,119946],"mapped",[105]],[[119947,119947],"mapped",[106]],[[119948,119948],"mapped",[107]],[[119949,119949],"mapped",[108]],[[119950,119950],"mapped",[109]],[[119951,119951],"mapped",[110]],[[119952,119952],"mapped",[111]],[[119953,119953],"mapped",[112]],[[119954,119954],"mapped",[113]],[[119955,119955],"mapped",[114]],[[119956,119956],"mapped",[115]],[[119957,119957],"mapped",[116]],[[119958,119958],"mapped",[117]],[[119959,119959],"mapped",[118]],[[119960,119960],"mapped",[119]],[[119961,119961],"mapped",[120]],[[119962,119962],"mapped",[121]],[[119963,119963],"mapped",[122]],[[119964,119964],"mapped",[97]],[[119965,119965],"disallowed"],[[119966,119966],"mapped",[99]],[[119967,119967],"mapped",[100]],[[119968,119969],"disallowed"],[[119970,119970],"mapped",[103]],[[119971,119972],"disallowed"],[[119973,119973],"mapped",[106]],[[119974,119974],"mapped",[107]],[[119975,119976],"disallowed"],[[119977,119977],"mapped",[110]],[[119978,119978],"mapped",[111]],[[119979,119979],"mapped",[112]],[[119980,119980],"mapped",[113]],[[119981,119981],"disallowed"],[[119982,119982],"mapped",[115]],[[119983,119983],"mapped",[116]],[[119984,119984],"mapped",[117]],[[119985,119985],"mapped",[118]],[[119986,119986],"mapped",[119]],[[119987,119987],"mapped",[120]],[[119988,119988],"mapped",[121]],[[119989,119989],"mapped",[122]],[[119990,119990],"mapped",[97]],[[119991,119991],"mapped",[98]],[[119992,119992],"mapped",[99]],[[119993,119993],"mapped",[100]],[[119994,119994],"disallowed"],[[119995,119995],"mapped",[102]],[[119996,119996],"disallowed"],[[119997,119997],"mapped",[104]],[[119998,119998],"mapped",[105]],[[119999,119999],"mapped",[106]],[[120000,120000],"mapped",[107]],[[120001,120001],"mapped",[108]],[[120002,120002],"mapped",[109]],[[120003,120003],"mapped",[110]],[[120004,120004],"disallowed"],[[120005,120005],"mapped",[112]],[[120006,120006],"mapped",[113]],[[120007,120007],"mapped",[114]],[[120008,120008],"mapped",[115]],[[120009,120009],"mapped",[116]],[[120010,120010],"mapped",[117]],[[120011,120011],"mapped",[118]],[[120012,120012],"mapped",[119]],[[120013,120013],"mapped",[120]],[[120014,120014],"mapped",[121]],[[120015,120015],"mapped",[122]],[[120016,120016],"mapped",[97]],[[120017,120017],"mapped",[98]],[[120018,120018],"mapped",[99]],[[120019,120019],"mapped",[100]],[[120020,120020],"mapped",[101]],[[120021,120021],"mapped",[102]],[[120022,120022],"mapped",[103]],[[120023,120023],"mapped",[104]],[[120024,120024],"mapped",[105]],[[120025,120025],"mapped",[106]],[[120026,120026],"mapped",[107]],[[120027,120027],"mapped",[108]],[[120028,120028],"mapped",[109]],[[120029,120029],"mapped",[110]],[[120030,120030],"mapped",[111]],[[120031,120031],"mapped",[112]],[[120032,120032],"mapped",[113]],[[120033,120033],"mapped",[114]],[[120034,120034],"mapped",[115]],[[120035,120035],"mapped",[116]],[[120036,120036],"mapped",[117]],[[120037,120037],"mapped",[118]],[[120038,120038],"mapped",[119]],[[120039,120039],"mapped",[120]],[[120040,120040],"mapped",[121]],[[120041,120041],"mapped",[122]],[[120042,120042],"mapped",[97]],[[120043,120043],"mapped",[98]],[[120044,120044],"mapped",[99]],[[120045,120045],"mapped",[100]],[[120046,120046],"mapped",[101]],[[120047,120047],"mapped",[102]],[[120048,120048],"mapped",[103]],[[120049,120049],"mapped",[104]],[[120050,120050],"mapped",[105]],[[120051,120051],"mapped",[106]],[[120052,120052],"mapped",[107]],[[120053,120053],"mapped",[108]],[[120054,120054],"mapped",[109]],[[120055,120055],"mapped",[110]],[[120056,120056],"mapped",[111]],[[120057,120057],"mapped",[112]],[[120058,120058],"mapped",[113]],[[120059,120059],"mapped",[114]],[[120060,120060],"mapped",[115]],[[120061,120061],"mapped",[116]],[[120062,120062],"mapped",[117]],[[120063,120063],"mapped",[118]],[[120064,120064],"mapped",[119]],[[120065,120065],"mapped",[120]],[[120066,120066],"mapped",[121]],[[120067,120067],"mapped",[122]],[[120068,120068],"mapped",[97]],[[120069,120069],"mapped",[98]],[[120070,120070],"disallowed"],[[120071,120071],"mapped",[100]],[[120072,120072],"mapped",[101]],[[120073,120073],"mapped",[102]],[[120074,120074],"mapped",[103]],[[120075,120076],"disallowed"],[[120077,120077],"mapped",[106]],[[120078,120078],"mapped",[107]],[[120079,120079],"mapped",[108]],[[120080,120080],"mapped",[109]],[[120081,120081],"mapped",[110]],[[120082,120082],"mapped",[111]],[[120083,120083],"mapped",[112]],[[120084,120084],"mapped",[113]],[[120085,120085],"disallowed"],[[120086,120086],"mapped",[115]],[[120087,120087],"mapped",[116]],[[120088,120088],"mapped",[117]],[[120089,120089],"mapped",[118]],[[120090,120090],"mapped",[119]],[[120091,120091],"mapped",[120]],[[120092,120092],"mapped",[121]],[[120093,120093],"disallowed"],[[120094,120094],"mapped",[97]],[[120095,120095],"mapped",[98]],[[120096,120096],"mapped",[99]],[[120097,120097],"mapped",[100]],[[120098,120098],"mapped",[101]],[[120099,120099],"mapped",[102]],[[120100,120100],"mapped",[103]],[[120101,120101],"mapped",[104]],[[120102,120102],"mapped",[105]],[[120103,120103],"mapped",[106]],[[120104,120104],"mapped",[107]],[[120105,120105],"mapped",[108]],[[120106,120106],"mapped",[109]],[[120107,120107],"mapped",[110]],[[120108,120108],"mapped",[111]],[[120109,120109],"mapped",[112]],[[120110,120110],"mapped",[113]],[[120111,120111],"mapped",[114]],[[120112,120112],"mapped",[115]],[[120113,120113],"mapped",[116]],[[120114,120114],"mapped",[117]],[[120115,120115],"mapped",[118]],[[120116,120116],"mapped",[119]],[[120117,120117],"mapped",[120]],[[120118,120118],"mapped",[121]],[[120119,120119],"mapped",[122]],[[120120,120120],"mapped",[97]],[[120121,120121],"mapped",[98]],[[120122,120122],"disallowed"],[[120123,120123],"mapped",[100]],[[120124,120124],"mapped",[101]],[[120125,120125],"mapped",[102]],[[120126,120126],"mapped",[103]],[[120127,120127],"disallowed"],[[120128,120128],"mapped",[105]],[[120129,120129],"mapped",[106]],[[120130,120130],"mapped",[107]],[[120131,120131],"mapped",[108]],[[120132,120132],"mapped",[109]],[[120133,120133],"disallowed"],[[120134,120134],"mapped",[111]],[[120135,120137],"disallowed"],[[120138,120138],"mapped",[115]],[[120139,120139],"mapped",[116]],[[120140,120140],"mapped",[117]],[[120141,120141],"mapped",[118]],[[120142,120142],"mapped",[119]],[[120143,120143],"mapped",[120]],[[120144,120144],"mapped",[121]],[[120145,120145],"disallowed"],[[120146,120146],"mapped",[97]],[[120147,120147],"mapped",[98]],[[120148,120148],"mapped",[99]],[[120149,120149],"mapped",[100]],[[120150,120150],"mapped",[101]],[[120151,120151],"mapped",[102]],[[120152,120152],"mapped",[103]],[[120153,120153],"mapped",[104]],[[120154,120154],"mapped",[105]],[[120155,120155],"mapped",[106]],[[120156,120156],"mapped",[107]],[[120157,120157],"mapped",[108]],[[120158,120158],"mapped",[109]],[[120159,120159],"mapped",[110]],[[120160,120160],"mapped",[111]],[[120161,120161],"mapped",[112]],[[120162,120162],"mapped",[113]],[[120163,120163],"mapped",[114]],[[120164,120164],"mapped",[115]],[[120165,120165],"mapped",[116]],[[120166,120166],"mapped",[117]],[[120167,120167],"mapped",[118]],[[120168,120168],"mapped",[119]],[[120169,120169],"mapped",[120]],[[120170,120170],"mapped",[121]],[[120171,120171],"mapped",[122]],[[120172,120172],"mapped",[97]],[[120173,120173],"mapped",[98]],[[120174,120174],"mapped",[99]],[[120175,120175],"mapped",[100]],[[120176,120176],"mapped",[101]],[[120177,120177],"mapped",[102]],[[120178,120178],"mapped",[103]],[[120179,120179],"mapped",[104]],[[120180,120180],"mapped",[105]],[[120181,120181],"mapped",[106]],[[120182,120182],"mapped",[107]],[[120183,120183],"mapped",[108]],[[120184,120184],"mapped",[109]],[[120185,120185],"mapped",[110]],[[120186,120186],"mapped",[111]],[[120187,120187],"mapped",[112]],[[120188,120188],"mapped",[113]],[[120189,120189],"mapped",[114]],[[120190,120190],"mapped",[115]],[[120191,120191],"mapped",[116]],[[120192,120192],"mapped",[117]],[[120193,120193],"mapped",[118]],[[120194,120194],"mapped",[119]],[[120195,120195],"mapped",[120]],[[120196,120196],"mapped",[121]],[[120197,120197],"mapped",[122]],[[120198,120198],"mapped",[97]],[[120199,120199],"mapped",[98]],[[120200,120200],"mapped",[99]],[[120201,120201],"mapped",[100]],[[120202,120202],"mapped",[101]],[[120203,120203],"mapped",[102]],[[120204,120204],"mapped",[103]],[[120205,120205],"mapped",[104]],[[120206,120206],"mapped",[105]],[[120207,120207],"mapped",[106]],[[120208,120208],"mapped",[107]],[[120209,120209],"mapped",[108]],[[120210,120210],"mapped",[109]],[[120211,120211],"mapped",[110]],[[120212,120212],"mapped",[111]],[[120213,120213],"mapped",[112]],[[120214,120214],"mapped",[113]],[[120215,120215],"mapped",[114]],[[120216,120216],"mapped",[115]],[[120217,120217],"mapped",[116]],[[120218,120218],"mapped",[117]],[[120219,120219],"mapped",[118]],[[120220,120220],"mapped",[119]],[[120221,120221],"mapped",[120]],[[120222,120222],"mapped",[121]],[[120223,120223],"mapped",[122]],[[120224,120224],"mapped",[97]],[[120225,120225],"mapped",[98]],[[120226,120226],"mapped",[99]],[[120227,120227],"mapped",[100]],[[120228,120228],"mapped",[101]],[[120229,120229],"mapped",[102]],[[120230,120230],"mapped",[103]],[[120231,120231],"mapped",[104]],[[120232,120232],"mapped",[105]],[[120233,120233],"mapped",[106]],[[120234,120234],"mapped",[107]],[[120235,120235],"mapped",[108]],[[120236,120236],"mapped",[109]],[[120237,120237],"mapped",[110]],[[120238,120238],"mapped",[111]],[[120239,120239],"mapped",[112]],[[120240,120240],"mapped",[113]],[[120241,120241],"mapped",[114]],[[120242,120242],"mapped",[115]],[[120243,120243],"mapped",[116]],[[120244,120244],"mapped",[117]],[[120245,120245],"mapped",[118]],[[120246,120246],"mapped",[119]],[[120247,120247],"mapped",[120]],[[120248,120248],"mapped",[121]],[[120249,120249],"mapped",[122]],[[120250,120250],"mapped",[97]],[[120251,120251],"mapped",[98]],[[120252,120252],"mapped",[99]],[[120253,120253],"mapped",[100]],[[120254,120254],"mapped",[101]],[[120255,120255],"mapped",[102]],[[120256,120256],"mapped",[103]],[[120257,120257],"mapped",[104]],[[120258,120258],"mapped",[105]],[[120259,120259],"mapped",[106]],[[120260,120260],"mapped",[107]],[[120261,120261],"mapped",[108]],[[120262,120262],"mapped",[109]],[[120263,120263],"mapped",[110]],[[120264,120264],"mapped",[111]],[[120265,120265],"mapped",[112]],[[120266,120266],"mapped",[113]],[[120267,120267],"mapped",[114]],[[120268,120268],"mapped",[115]],[[120269,120269],"mapped",[116]],[[120270,120270],"mapped",[117]],[[120271,120271],"mapped",[118]],[[120272,120272],"mapped",[119]],[[120273,120273],"mapped",[120]],[[120274,120274],"mapped",[121]],[[120275,120275],"mapped",[122]],[[120276,120276],"mapped",[97]],[[120277,120277],"mapped",[98]],[[120278,120278],"mapped",[99]],[[120279,120279],"mapped",[100]],[[120280,120280],"mapped",[101]],[[120281,120281],"mapped",[102]],[[120282,120282],"mapped",[103]],[[120283,120283],"mapped",[104]],[[120284,120284],"mapped",[105]],[[120285,120285],"mapped",[106]],[[120286,120286],"mapped",[107]],[[120287,120287],"mapped",[108]],[[120288,120288],"mapped",[109]],[[120289,120289],"mapped",[110]],[[120290,120290],"mapped",[111]],[[120291,120291],"mapped",[112]],[[120292,120292],"mapped",[113]],[[120293,120293],"mapped",[114]],[[120294,120294],"mapped",[115]],[[120295,120295],"mapped",[116]],[[120296,120296],"mapped",[117]],[[120297,120297],"mapped",[118]],[[120298,120298],"mapped",[119]],[[120299,120299],"mapped",[120]],[[120300,120300],"mapped",[121]],[[120301,120301],"mapped",[122]],[[120302,120302],"mapped",[97]],[[120303,120303],"mapped",[98]],[[120304,120304],"mapped",[99]],[[120305,120305],"mapped",[100]],[[120306,120306],"mapped",[101]],[[120307,120307],"mapped",[102]],[[120308,120308],"mapped",[103]],[[120309,120309],"mapped",[104]],[[120310,120310],"mapped",[105]],[[120311,120311],"mapped",[106]],[[120312,120312],"mapped",[107]],[[120313,120313],"mapped",[108]],[[120314,120314],"mapped",[109]],[[120315,120315],"mapped",[110]],[[120316,120316],"mapped",[111]],[[120317,120317],"mapped",[112]],[[120318,120318],"mapped",[113]],[[120319,120319],"mapped",[114]],[[120320,120320],"mapped",[115]],[[120321,120321],"mapped",[116]],[[120322,120322],"mapped",[117]],[[120323,120323],"mapped",[118]],[[120324,120324],"mapped",[119]],[[120325,120325],"mapped",[120]],[[120326,120326],"mapped",[121]],[[120327,120327],"mapped",[122]],[[120328,120328],"mapped",[97]],[[120329,120329],"mapped",[98]],[[120330,120330],"mapped",[99]],[[120331,120331],"mapped",[100]],[[120332,120332],"mapped",[101]],[[120333,120333],"mapped",[102]],[[120334,120334],"mapped",[103]],[[120335,120335],"mapped",[104]],[[120336,120336],"mapped",[105]],[[120337,120337],"mapped",[106]],[[120338,120338],"mapped",[107]],[[120339,120339],"mapped",[108]],[[120340,120340],"mapped",[109]],[[120341,120341],"mapped",[110]],[[120342,120342],"mapped",[111]],[[120343,120343],"mapped",[112]],[[120344,120344],"mapped",[113]],[[120345,120345],"mapped",[114]],[[120346,120346],"mapped",[115]],[[120347,120347],"mapped",[116]],[[120348,120348],"mapped",[117]],[[120349,120349],"mapped",[118]],[[120350,120350],"mapped",[119]],[[120351,120351],"mapped",[120]],[[120352,120352],"mapped",[121]],[[120353,120353],"mapped",[122]],[[120354,120354],"mapped",[97]],[[120355,120355],"mapped",[98]],[[120356,120356],"mapped",[99]],[[120357,120357],"mapped",[100]],[[120358,120358],"mapped",[101]],[[120359,120359],"mapped",[102]],[[120360,120360],"mapped",[103]],[[120361,120361],"mapped",[104]],[[120362,120362],"mapped",[105]],[[120363,120363],"mapped",[106]],[[120364,120364],"mapped",[107]],[[120365,120365],"mapped",[108]],[[120366,120366],"mapped",[109]],[[120367,120367],"mapped",[110]],[[120368,120368],"mapped",[111]],[[120369,120369],"mapped",[112]],[[120370,120370],"mapped",[113]],[[120371,120371],"mapped",[114]],[[120372,120372],"mapped",[115]],[[120373,120373],"mapped",[116]],[[120374,120374],"mapped",[117]],[[120375,120375],"mapped",[118]],[[120376,120376],"mapped",[119]],[[120377,120377],"mapped",[120]],[[120378,120378],"mapped",[121]],[[120379,120379],"mapped",[122]],[[120380,120380],"mapped",[97]],[[120381,120381],"mapped",[98]],[[120382,120382],"mapped",[99]],[[120383,120383],"mapped",[100]],[[120384,120384],"mapped",[101]],[[120385,120385],"mapped",[102]],[[120386,120386],"mapped",[103]],[[120387,120387],"mapped",[104]],[[120388,120388],"mapped",[105]],[[120389,120389],"mapped",[106]],[[120390,120390],"mapped",[107]],[[120391,120391],"mapped",[108]],[[120392,120392],"mapped",[109]],[[120393,120393],"mapped",[110]],[[120394,120394],"mapped",[111]],[[120395,120395],"mapped",[112]],[[120396,120396],"mapped",[113]],[[120397,120397],"mapped",[114]],[[120398,120398],"mapped",[115]],[[120399,120399],"mapped",[116]],[[120400,120400],"mapped",[117]],[[120401,120401],"mapped",[118]],[[120402,120402],"mapped",[119]],[[120403,120403],"mapped",[120]],[[120404,120404],"mapped",[121]],[[120405,120405],"mapped",[122]],[[120406,120406],"mapped",[97]],[[120407,120407],"mapped",[98]],[[120408,120408],"mapped",[99]],[[120409,120409],"mapped",[100]],[[120410,120410],"mapped",[101]],[[120411,120411],"mapped",[102]],[[120412,120412],"mapped",[103]],[[120413,120413],"mapped",[104]],[[120414,120414],"mapped",[105]],[[120415,120415],"mapped",[106]],[[120416,120416],"mapped",[107]],[[120417,120417],"mapped",[108]],[[120418,120418],"mapped",[109]],[[120419,120419],"mapped",[110]],[[120420,120420],"mapped",[111]],[[120421,120421],"mapped",[112]],[[120422,120422],"mapped",[113]],[[120423,120423],"mapped",[114]],[[120424,120424],"mapped",[115]],[[120425,120425],"mapped",[116]],[[120426,120426],"mapped",[117]],[[120427,120427],"mapped",[118]],[[120428,120428],"mapped",[119]],[[120429,120429],"mapped",[120]],[[120430,120430],"mapped",[121]],[[120431,120431],"mapped",[122]],[[120432,120432],"mapped",[97]],[[120433,120433],"mapped",[98]],[[120434,120434],"mapped",[99]],[[120435,120435],"mapped",[100]],[[120436,120436],"mapped",[101]],[[120437,120437],"mapped",[102]],[[120438,120438],"mapped",[103]],[[120439,120439],"mapped",[104]],[[120440,120440],"mapped",[105]],[[120441,120441],"mapped",[106]],[[120442,120442],"mapped",[107]],[[120443,120443],"mapped",[108]],[[120444,120444],"mapped",[109]],[[120445,120445],"mapped",[110]],[[120446,120446],"mapped",[111]],[[120447,120447],"mapped",[112]],[[120448,120448],"mapped",[113]],[[120449,120449],"mapped",[114]],[[120450,120450],"mapped",[115]],[[120451,120451],"mapped",[116]],[[120452,120452],"mapped",[117]],[[120453,120453],"mapped",[118]],[[120454,120454],"mapped",[119]],[[120455,120455],"mapped",[120]],[[120456,120456],"mapped",[121]],[[120457,120457],"mapped",[122]],[[120458,120458],"mapped",[97]],[[120459,120459],"mapped",[98]],[[120460,120460],"mapped",[99]],[[120461,120461],"mapped",[100]],[[120462,120462],"mapped",[101]],[[120463,120463],"mapped",[102]],[[120464,120464],"mapped",[103]],[[120465,120465],"mapped",[104]],[[120466,120466],"mapped",[105]],[[120467,120467],"mapped",[106]],[[120468,120468],"mapped",[107]],[[120469,120469],"mapped",[108]],[[120470,120470],"mapped",[109]],[[120471,120471],"mapped",[110]],[[120472,120472],"mapped",[111]],[[120473,120473],"mapped",[112]],[[120474,120474],"mapped",[113]],[[120475,120475],"mapped",[114]],[[120476,120476],"mapped",[115]],[[120477,120477],"mapped",[116]],[[120478,120478],"mapped",[117]],[[120479,120479],"mapped",[118]],[[120480,120480],"mapped",[119]],[[120481,120481],"mapped",[120]],[[120482,120482],"mapped",[121]],[[120483,120483],"mapped",[122]],[[120484,120484],"mapped",[305]],[[120485,120485],"mapped",[567]],[[120486,120487],"disallowed"],[[120488,120488],"mapped",[945]],[[120489,120489],"mapped",[946]],[[120490,120490],"mapped",[947]],[[120491,120491],"mapped",[948]],[[120492,120492],"mapped",[949]],[[120493,120493],"mapped",[950]],[[120494,120494],"mapped",[951]],[[120495,120495],"mapped",[952]],[[120496,120496],"mapped",[953]],[[120497,120497],"mapped",[954]],[[120498,120498],"mapped",[955]],[[120499,120499],"mapped",[956]],[[120500,120500],"mapped",[957]],[[120501,120501],"mapped",[958]],[[120502,120502],"mapped",[959]],[[120503,120503],"mapped",[960]],[[120504,120504],"mapped",[961]],[[120505,120505],"mapped",[952]],[[120506,120506],"mapped",[963]],[[120507,120507],"mapped",[964]],[[120508,120508],"mapped",[965]],[[120509,120509],"mapped",[966]],[[120510,120510],"mapped",[967]],[[120511,120511],"mapped",[968]],[[120512,120512],"mapped",[969]],[[120513,120513],"mapped",[8711]],[[120514,120514],"mapped",[945]],[[120515,120515],"mapped",[946]],[[120516,120516],"mapped",[947]],[[120517,120517],"mapped",[948]],[[120518,120518],"mapped",[949]],[[120519,120519],"mapped",[950]],[[120520,120520],"mapped",[951]],[[120521,120521],"mapped",[952]],[[120522,120522],"mapped",[953]],[[120523,120523],"mapped",[954]],[[120524,120524],"mapped",[955]],[[120525,120525],"mapped",[956]],[[120526,120526],"mapped",[957]],[[120527,120527],"mapped",[958]],[[120528,120528],"mapped",[959]],[[120529,120529],"mapped",[960]],[[120530,120530],"mapped",[961]],[[120531,120532],"mapped",[963]],[[120533,120533],"mapped",[964]],[[120534,120534],"mapped",[965]],[[120535,120535],"mapped",[966]],[[120536,120536],"mapped",[967]],[[120537,120537],"mapped",[968]],[[120538,120538],"mapped",[969]],[[120539,120539],"mapped",[8706]],[[120540,120540],"mapped",[949]],[[120541,120541],"mapped",[952]],[[120542,120542],"mapped",[954]],[[120543,120543],"mapped",[966]],[[120544,120544],"mapped",[961]],[[120545,120545],"mapped",[960]],[[120546,120546],"mapped",[945]],[[120547,120547],"mapped",[946]],[[120548,120548],"mapped",[947]],[[120549,120549],"mapped",[948]],[[120550,120550],"mapped",[949]],[[120551,120551],"mapped",[950]],[[120552,120552],"mapped",[951]],[[120553,120553],"mapped",[952]],[[120554,120554],"mapped",[953]],[[120555,120555],"mapped",[954]],[[120556,120556],"mapped",[955]],[[120557,120557],"mapped",[956]],[[120558,120558],"mapped",[957]],[[120559,120559],"mapped",[958]],[[120560,120560],"mapped",[959]],[[120561,120561],"mapped",[960]],[[120562,120562],"mapped",[961]],[[120563,120563],"mapped",[952]],[[120564,120564],"mapped",[963]],[[120565,120565],"mapped",[964]],[[120566,120566],"mapped",[965]],[[120567,120567],"mapped",[966]],[[120568,120568],"mapped",[967]],[[120569,120569],"mapped",[968]],[[120570,120570],"mapped",[969]],[[120571,120571],"mapped",[8711]],[[120572,120572],"mapped",[945]],[[120573,120573],"mapped",[946]],[[120574,120574],"mapped",[947]],[[120575,120575],"mapped",[948]],[[120576,120576],"mapped",[949]],[[120577,120577],"mapped",[950]],[[120578,120578],"mapped",[951]],[[120579,120579],"mapped",[952]],[[120580,120580],"mapped",[953]],[[120581,120581],"mapped",[954]],[[120582,120582],"mapped",[955]],[[120583,120583],"mapped",[956]],[[120584,120584],"mapped",[957]],[[120585,120585],"mapped",[958]],[[120586,120586],"mapped",[959]],[[120587,120587],"mapped",[960]],[[120588,120588],"mapped",[961]],[[120589,120590],"mapped",[963]],[[120591,120591],"mapped",[964]],[[120592,120592],"mapped",[965]],[[120593,120593],"mapped",[966]],[[120594,120594],"mapped",[967]],[[120595,120595],"mapped",[968]],[[120596,120596],"mapped",[969]],[[120597,120597],"mapped",[8706]],[[120598,120598],"mapped",[949]],[[120599,120599],"mapped",[952]],[[120600,120600],"mapped",[954]],[[120601,120601],"mapped",[966]],[[120602,120602],"mapped",[961]],[[120603,120603],"mapped",[960]],[[120604,120604],"mapped",[945]],[[120605,120605],"mapped",[946]],[[120606,120606],"mapped",[947]],[[120607,120607],"mapped",[948]],[[120608,120608],"mapped",[949]],[[120609,120609],"mapped",[950]],[[120610,120610],"mapped",[951]],[[120611,120611],"mapped",[952]],[[120612,120612],"mapped",[953]],[[120613,120613],"mapped",[954]],[[120614,120614],"mapped",[955]],[[120615,120615],"mapped",[956]],[[120616,120616],"mapped",[957]],[[120617,120617],"mapped",[958]],[[120618,120618],"mapped",[959]],[[120619,120619],"mapped",[960]],[[120620,120620],"mapped",[961]],[[120621,120621],"mapped",[952]],[[120622,120622],"mapped",[963]],[[120623,120623],"mapped",[964]],[[120624,120624],"mapped",[965]],[[120625,120625],"mapped",[966]],[[120626,120626],"mapped",[967]],[[120627,120627],"mapped",[968]],[[120628,120628],"mapped",[969]],[[120629,120629],"mapped",[8711]],[[120630,120630],"mapped",[945]],[[120631,120631],"mapped",[946]],[[120632,120632],"mapped",[947]],[[120633,120633],"mapped",[948]],[[120634,120634],"mapped",[949]],[[120635,120635],"mapped",[950]],[[120636,120636],"mapped",[951]],[[120637,120637],"mapped",[952]],[[120638,120638],"mapped",[953]],[[120639,120639],"mapped",[954]],[[120640,120640],"mapped",[955]],[[120641,120641],"mapped",[956]],[[120642,120642],"mapped",[957]],[[120643,120643],"mapped",[958]],[[120644,120644],"mapped",[959]],[[120645,120645],"mapped",[960]],[[120646,120646],"mapped",[961]],[[120647,120648],"mapped",[963]],[[120649,120649],"mapped",[964]],[[120650,120650],"mapped",[965]],[[120651,120651],"mapped",[966]],[[120652,120652],"mapped",[967]],[[120653,120653],"mapped",[968]],[[120654,120654],"mapped",[969]],[[120655,120655],"mapped",[8706]],[[120656,120656],"mapped",[949]],[[120657,120657],"mapped",[952]],[[120658,120658],"mapped",[954]],[[120659,120659],"mapped",[966]],[[120660,120660],"mapped",[961]],[[120661,120661],"mapped",[960]],[[120662,120662],"mapped",[945]],[[120663,120663],"mapped",[946]],[[120664,120664],"mapped",[947]],[[120665,120665],"mapped",[948]],[[120666,120666],"mapped",[949]],[[120667,120667],"mapped",[950]],[[120668,120668],"mapped",[951]],[[120669,120669],"mapped",[952]],[[120670,120670],"mapped",[953]],[[120671,120671],"mapped",[954]],[[120672,120672],"mapped",[955]],[[120673,120673],"mapped",[956]],[[120674,120674],"mapped",[957]],[[120675,120675],"mapped",[958]],[[120676,120676],"mapped",[959]],[[120677,120677],"mapped",[960]],[[120678,120678],"mapped",[961]],[[120679,120679],"mapped",[952]],[[120680,120680],"mapped",[963]],[[120681,120681],"mapped",[964]],[[120682,120682],"mapped",[965]],[[120683,120683],"mapped",[966]],[[120684,120684],"mapped",[967]],[[120685,120685],"mapped",[968]],[[120686,120686],"mapped",[969]],[[120687,120687],"mapped",[8711]],[[120688,120688],"mapped",[945]],[[120689,120689],"mapped",[946]],[[120690,120690],"mapped",[947]],[[120691,120691],"mapped",[948]],[[120692,120692],"mapped",[949]],[[120693,120693],"mapped",[950]],[[120694,120694],"mapped",[951]],[[120695,120695],"mapped",[952]],[[120696,120696],"mapped",[953]],[[120697,120697],"mapped",[954]],[[120698,120698],"mapped",[955]],[[120699,120699],"mapped",[956]],[[120700,120700],"mapped",[957]],[[120701,120701],"mapped",[958]],[[120702,120702],"mapped",[959]],[[120703,120703],"mapped",[960]],[[120704,120704],"mapped",[961]],[[120705,120706],"mapped",[963]],[[120707,120707],"mapped",[964]],[[120708,120708],"mapped",[965]],[[120709,120709],"mapped",[966]],[[120710,120710],"mapped",[967]],[[120711,120711],"mapped",[968]],[[120712,120712],"mapped",[969]],[[120713,120713],"mapped",[8706]],[[120714,120714],"mapped",[949]],[[120715,120715],"mapped",[952]],[[120716,120716],"mapped",[954]],[[120717,120717],"mapped",[966]],[[120718,120718],"mapped",[961]],[[120719,120719],"mapped",[960]],[[120720,120720],"mapped",[945]],[[120721,120721],"mapped",[946]],[[120722,120722],"mapped",[947]],[[120723,120723],"mapped",[948]],[[120724,120724],"mapped",[949]],[[120725,120725],"mapped",[950]],[[120726,120726],"mapped",[951]],[[120727,120727],"mapped",[952]],[[120728,120728],"mapped",[953]],[[120729,120729],"mapped",[954]],[[120730,120730],"mapped",[955]],[[120731,120731],"mapped",[956]],[[120732,120732],"mapped",[957]],[[120733,120733],"mapped",[958]],[[120734,120734],"mapped",[959]],[[120735,120735],"mapped",[960]],[[120736,120736],"mapped",[961]],[[120737,120737],"mapped",[952]],[[120738,120738],"mapped",[963]],[[120739,120739],"mapped",[964]],[[120740,120740],"mapped",[965]],[[120741,120741],"mapped",[966]],[[120742,120742],"mapped",[967]],[[120743,120743],"mapped",[968]],[[120744,120744],"mapped",[969]],[[120745,120745],"mapped",[8711]],[[120746,120746],"mapped",[945]],[[120747,120747],"mapped",[946]],[[120748,120748],"mapped",[947]],[[120749,120749],"mapped",[948]],[[120750,120750],"mapped",[949]],[[120751,120751],"mapped",[950]],[[120752,120752],"mapped",[951]],[[120753,120753],"mapped",[952]],[[120754,120754],"mapped",[953]],[[120755,120755],"mapped",[954]],[[120756,120756],"mapped",[955]],[[120757,120757],"mapped",[956]],[[120758,120758],"mapped",[957]],[[120759,120759],"mapped",[958]],[[120760,120760],"mapped",[959]],[[120761,120761],"mapped",[960]],[[120762,120762],"mapped",[961]],[[120763,120764],"mapped",[963]],[[120765,120765],"mapped",[964]],[[120766,120766],"mapped",[965]],[[120767,120767],"mapped",[966]],[[120768,120768],"mapped",[967]],[[120769,120769],"mapped",[968]],[[120770,120770],"mapped",[969]],[[120771,120771],"mapped",[8706]],[[120772,120772],"mapped",[949]],[[120773,120773],"mapped",[952]],[[120774,120774],"mapped",[954]],[[120775,120775],"mapped",[966]],[[120776,120776],"mapped",[961]],[[120777,120777],"mapped",[960]],[[120778,120779],"mapped",[989]],[[120780,120781],"disallowed"],[[120782,120782],"mapped",[48]],[[120783,120783],"mapped",[49]],[[120784,120784],"mapped",[50]],[[120785,120785],"mapped",[51]],[[120786,120786],"mapped",[52]],[[120787,120787],"mapped",[53]],[[120788,120788],"mapped",[54]],[[120789,120789],"mapped",[55]],[[120790,120790],"mapped",[56]],[[120791,120791],"mapped",[57]],[[120792,120792],"mapped",[48]],[[120793,120793],"mapped",[49]],[[120794,120794],"mapped",[50]],[[120795,120795],"mapped",[51]],[[120796,120796],"mapped",[52]],[[120797,120797],"mapped",[53]],[[120798,120798],"mapped",[54]],[[120799,120799],"mapped",[55]],[[120800,120800],"mapped",[56]],[[120801,120801],"mapped",[57]],[[120802,120802],"mapped",[48]],[[120803,120803],"mapped",[49]],[[120804,120804],"mapped",[50]],[[120805,120805],"mapped",[51]],[[120806,120806],"mapped",[52]],[[120807,120807],"mapped",[53]],[[120808,120808],"mapped",[54]],[[120809,120809],"mapped",[55]],[[120810,120810],"mapped",[56]],[[120811,120811],"mapped",[57]],[[120812,120812],"mapped",[48]],[[120813,120813],"mapped",[49]],[[120814,120814],"mapped",[50]],[[120815,120815],"mapped",[51]],[[120816,120816],"mapped",[52]],[[120817,120817],"mapped",[53]],[[120818,120818],"mapped",[54]],[[120819,120819],"mapped",[55]],[[120820,120820],"mapped",[56]],[[120821,120821],"mapped",[57]],[[120822,120822],"mapped",[48]],[[120823,120823],"mapped",[49]],[[120824,120824],"mapped",[50]],[[120825,120825],"mapped",[51]],[[120826,120826],"mapped",[52]],[[120827,120827],"mapped",[53]],[[120828,120828],"mapped",[54]],[[120829,120829],"mapped",[55]],[[120830,120830],"mapped",[56]],[[120831,120831],"mapped",[57]],[[120832,121343],"valid",[],"NV8"],[[121344,121398],"valid"],[[121399,121402],"valid",[],"NV8"],[[121403,121452],"valid"],[[121453,121460],"valid",[],"NV8"],[[121461,121461],"valid"],[[121462,121475],"valid",[],"NV8"],[[121476,121476],"valid"],[[121477,121483],"valid",[],"NV8"],[[121484,121498],"disallowed"],[[121499,121503],"valid"],[[121504,121504],"disallowed"],[[121505,121519],"valid"],[[121520,124927],"disallowed"],[[124928,125124],"valid"],[[125125,125126],"disallowed"],[[125127,125135],"valid",[],"NV8"],[[125136,125142],"valid"],[[125143,126463],"disallowed"],[[126464,126464],"mapped",[1575]],[[126465,126465],"mapped",[1576]],[[126466,126466],"mapped",[1580]],[[126467,126467],"mapped",[1583]],[[126468,126468],"disallowed"],[[126469,126469],"mapped",[1608]],[[126470,126470],"mapped",[1586]],[[126471,126471],"mapped",[1581]],[[126472,126472],"mapped",[1591]],[[126473,126473],"mapped",[1610]],[[126474,126474],"mapped",[1603]],[[126475,126475],"mapped",[1604]],[[126476,126476],"mapped",[1605]],[[126477,126477],"mapped",[1606]],[[126478,126478],"mapped",[1587]],[[126479,126479],"mapped",[1593]],[[126480,126480],"mapped",[1601]],[[126481,126481],"mapped",[1589]],[[126482,126482],"mapped",[1602]],[[126483,126483],"mapped",[1585]],[[126484,126484],"mapped",[1588]],[[126485,126485],"mapped",[1578]],[[126486,126486],"mapped",[1579]],[[126487,126487],"mapped",[1582]],[[126488,126488],"mapped",[1584]],[[126489,126489],"mapped",[1590]],[[126490,126490],"mapped",[1592]],[[126491,126491],"mapped",[1594]],[[126492,126492],"mapped",[1646]],[[126493,126493],"mapped",[1722]],[[126494,126494],"mapped",[1697]],[[126495,126495],"mapped",[1647]],[[126496,126496],"disallowed"],[[126497,126497],"mapped",[1576]],[[126498,126498],"mapped",[1580]],[[126499,126499],"disallowed"],[[126500,126500],"mapped",[1607]],[[126501,126502],"disallowed"],[[126503,126503],"mapped",[1581]],[[126504,126504],"disallowed"],[[126505,126505],"mapped",[1610]],[[126506,126506],"mapped",[1603]],[[126507,126507],"mapped",[1604]],[[126508,126508],"mapped",[1605]],[[126509,126509],"mapped",[1606]],[[126510,126510],"mapped",[1587]],[[126511,126511],"mapped",[1593]],[[126512,126512],"mapped",[1601]],[[126513,126513],"mapped",[1589]],[[126514,126514],"mapped",[1602]],[[126515,126515],"disallowed"],[[126516,126516],"mapped",[1588]],[[126517,126517],"mapped",[1578]],[[126518,126518],"mapped",[1579]],[[126519,126519],"mapped",[1582]],[[126520,126520],"disallowed"],[[126521,126521],"mapped",[1590]],[[126522,126522],"disallowed"],[[126523,126523],"mapped",[1594]],[[126524,126529],"disallowed"],[[126530,126530],"mapped",[1580]],[[126531,126534],"disallowed"],[[126535,126535],"mapped",[1581]],[[126536,126536],"disallowed"],[[126537,126537],"mapped",[1610]],[[126538,126538],"disallowed"],[[126539,126539],"mapped",[1604]],[[126540,126540],"disallowed"],[[126541,126541],"mapped",[1606]],[[126542,126542],"mapped",[1587]],[[126543,126543],"mapped",[1593]],[[126544,126544],"disallowed"],[[126545,126545],"mapped",[1589]],[[126546,126546],"mapped",[1602]],[[126547,126547],"disallowed"],[[126548,126548],"mapped",[1588]],[[126549,126550],"disallowed"],[[126551,126551],"mapped",[1582]],[[126552,126552],"disallowed"],[[126553,126553],"mapped",[1590]],[[126554,126554],"disallowed"],[[126555,126555],"mapped",[1594]],[[126556,126556],"disallowed"],[[126557,126557],"mapped",[1722]],[[126558,126558],"disallowed"],[[126559,126559],"mapped",[1647]],[[126560,126560],"disallowed"],[[126561,126561],"mapped",[1576]],[[126562,126562],"mapped",[1580]],[[126563,126563],"disallowed"],[[126564,126564],"mapped",[1607]],[[126565,126566],"disallowed"],[[126567,126567],"mapped",[1581]],[[126568,126568],"mapped",[1591]],[[126569,126569],"mapped",[1610]],[[126570,126570],"mapped",[1603]],[[126571,126571],"disallowed"],[[126572,126572],"mapped",[1605]],[[126573,126573],"mapped",[1606]],[[126574,126574],"mapped",[1587]],[[126575,126575],"mapped",[1593]],[[126576,126576],"mapped",[1601]],[[126577,126577],"mapped",[1589]],[[126578,126578],"mapped",[1602]],[[126579,126579],"disallowed"],[[126580,126580],"mapped",[1588]],[[126581,126581],"mapped",[1578]],[[126582,126582],"mapped",[1579]],[[126583,126583],"mapped",[1582]],[[126584,126584],"disallowed"],[[126585,126585],"mapped",[1590]],[[126586,126586],"mapped",[1592]],[[126587,126587],"mapped",[1594]],[[126588,126588],"mapped",[1646]],[[126589,126589],"disallowed"],[[126590,126590],"mapped",[1697]],[[126591,126591],"disallowed"],[[126592,126592],"mapped",[1575]],[[126593,126593],"mapped",[1576]],[[126594,126594],"mapped",[1580]],[[126595,126595],"mapped",[1583]],[[126596,126596],"mapped",[1607]],[[126597,126597],"mapped",[1608]],[[126598,126598],"mapped",[1586]],[[126599,126599],"mapped",[1581]],[[126600,126600],"mapped",[1591]],[[126601,126601],"mapped",[1610]],[[126602,126602],"disallowed"],[[126603,126603],"mapped",[1604]],[[126604,126604],"mapped",[1605]],[[126605,126605],"mapped",[1606]],[[126606,126606],"mapped",[1587]],[[126607,126607],"mapped",[1593]],[[126608,126608],"mapped",[1601]],[[126609,126609],"mapped",[1589]],[[126610,126610],"mapped",[1602]],[[126611,126611],"mapped",[1585]],[[126612,126612],"mapped",[1588]],[[126613,126613],"mapped",[1578]],[[126614,126614],"mapped",[1579]],[[126615,126615],"mapped",[1582]],[[126616,126616],"mapped",[1584]],[[126617,126617],"mapped",[1590]],[[126618,126618],"mapped",[1592]],[[126619,126619],"mapped",[1594]],[[126620,126624],"disallowed"],[[126625,126625],"mapped",[1576]],[[126626,126626],"mapped",[1580]],[[126627,126627],"mapped",[1583]],[[126628,126628],"disallowed"],[[126629,126629],"mapped",[1608]],[[126630,126630],"mapped",[1586]],[[126631,126631],"mapped",[1581]],[[126632,126632],"mapped",[1591]],[[126633,126633],"mapped",[1610]],[[126634,126634],"disallowed"],[[126635,126635],"mapped",[1604]],[[126636,126636],"mapped",[1605]],[[126637,126637],"mapped",[1606]],[[126638,126638],"mapped",[1587]],[[126639,126639],"mapped",[1593]],[[126640,126640],"mapped",[1601]],[[126641,126641],"mapped",[1589]],[[126642,126642],"mapped",[1602]],[[126643,126643],"mapped",[1585]],[[126644,126644],"mapped",[1588]],[[126645,126645],"mapped",[1578]],[[126646,126646],"mapped",[1579]],[[126647,126647],"mapped",[1582]],[[126648,126648],"mapped",[1584]],[[126649,126649],"mapped",[1590]],[[126650,126650],"mapped",[1592]],[[126651,126651],"mapped",[1594]],[[126652,126703],"disallowed"],[[126704,126705],"valid",[],"NV8"],[[126706,126975],"disallowed"],[[126976,127019],"valid",[],"NV8"],[[127020,127023],"disallowed"],[[127024,127123],"valid",[],"NV8"],[[127124,127135],"disallowed"],[[127136,127150],"valid",[],"NV8"],[[127151,127152],"disallowed"],[[127153,127166],"valid",[],"NV8"],[[127167,127167],"valid",[],"NV8"],[[127168,127168],"disallowed"],[[127169,127183],"valid",[],"NV8"],[[127184,127184],"disallowed"],[[127185,127199],"valid",[],"NV8"],[[127200,127221],"valid",[],"NV8"],[[127222,127231],"disallowed"],[[127232,127232],"disallowed"],[[127233,127233],"disallowed_STD3_mapped",[48,44]],[[127234,127234],"disallowed_STD3_mapped",[49,44]],[[127235,127235],"disallowed_STD3_mapped",[50,44]],[[127236,127236],"disallowed_STD3_mapped",[51,44]],[[127237,127237],"disallowed_STD3_mapped",[52,44]],[[127238,127238],"disallowed_STD3_mapped",[53,44]],[[127239,127239],"disallowed_STD3_mapped",[54,44]],[[127240,127240],"disallowed_STD3_mapped",[55,44]],[[127241,127241],"disallowed_STD3_mapped",[56,44]],[[127242,127242],"disallowed_STD3_mapped",[57,44]],[[127243,127244],"valid",[],"NV8"],[[127245,127247],"disallowed"],[[127248,127248],"disallowed_STD3_mapped",[40,97,41]],[[127249,127249],"disallowed_STD3_mapped",[40,98,41]],[[127250,127250],"disallowed_STD3_mapped",[40,99,41]],[[127251,127251],"disallowed_STD3_mapped",[40,100,41]],[[127252,127252],"disallowed_STD3_mapped",[40,101,41]],[[127253,127253],"disallowed_STD3_mapped",[40,102,41]],[[127254,127254],"disallowed_STD3_mapped",[40,103,41]],[[127255,127255],"disallowed_STD3_mapped",[40,104,41]],[[127256,127256],"disallowed_STD3_mapped",[40,105,41]],[[127257,127257],"disallowed_STD3_mapped",[40,106,41]],[[127258,127258],"disallowed_STD3_mapped",[40,107,41]],[[127259,127259],"disallowed_STD3_mapped",[40,108,41]],[[127260,127260],"disallowed_STD3_mapped",[40,109,41]],[[127261,127261],"disallowed_STD3_mapped",[40,110,41]],[[127262,127262],"disallowed_STD3_mapped",[40,111,41]],[[127263,127263],"disallowed_STD3_mapped",[40,112,41]],[[127264,127264],"disallowed_STD3_mapped",[40,113,41]],[[127265,127265],"disallowed_STD3_mapped",[40,114,41]],[[127266,127266],"disallowed_STD3_mapped",[40,115,41]],[[127267,127267],"disallowed_STD3_mapped",[40,116,41]],[[127268,127268],"disallowed_STD3_mapped",[40,117,41]],[[127269,127269],"disallowed_STD3_mapped",[40,118,41]],[[127270,127270],"disallowed_STD3_mapped",[40,119,41]],[[127271,127271],"disallowed_STD3_mapped",[40,120,41]],[[127272,127272],"disallowed_STD3_mapped",[40,121,41]],[[127273,127273],"disallowed_STD3_mapped",[40,122,41]],[[127274,127274],"mapped",[12308,115,12309]],[[127275,127275],"mapped",[99]],[[127276,127276],"mapped",[114]],[[127277,127277],"mapped",[99,100]],[[127278,127278],"mapped",[119,122]],[[127279,127279],"disallowed"],[[127280,127280],"mapped",[97]],[[127281,127281],"mapped",[98]],[[127282,127282],"mapped",[99]],[[127283,127283],"mapped",[100]],[[127284,127284],"mapped",[101]],[[127285,127285],"mapped",[102]],[[127286,127286],"mapped",[103]],[[127287,127287],"mapped",[104]],[[127288,127288],"mapped",[105]],[[127289,127289],"mapped",[106]],[[127290,127290],"mapped",[107]],[[127291,127291],"mapped",[108]],[[127292,127292],"mapped",[109]],[[127293,127293],"mapped",[110]],[[127294,127294],"mapped",[111]],[[127295,127295],"mapped",[112]],[[127296,127296],"mapped",[113]],[[127297,127297],"mapped",[114]],[[127298,127298],"mapped",[115]],[[127299,127299],"mapped",[116]],[[127300,127300],"mapped",[117]],[[127301,127301],"mapped",[118]],[[127302,127302],"mapped",[119]],[[127303,127303],"mapped",[120]],[[127304,127304],"mapped",[121]],[[127305,127305],"mapped",[122]],[[127306,127306],"mapped",[104,118]],[[127307,127307],"mapped",[109,118]],[[127308,127308],"mapped",[115,100]],[[127309,127309],"mapped",[115,115]],[[127310,127310],"mapped",[112,112,118]],[[127311,127311],"mapped",[119,99]],[[127312,127318],"valid",[],"NV8"],[[127319,127319],"valid",[],"NV8"],[[127320,127326],"valid",[],"NV8"],[[127327,127327],"valid",[],"NV8"],[[127328,127337],"valid",[],"NV8"],[[127338,127338],"mapped",[109,99]],[[127339,127339],"mapped",[109,100]],[[127340,127343],"disallowed"],[[127344,127352],"valid",[],"NV8"],[[127353,127353],"valid",[],"NV8"],[[127354,127354],"valid",[],"NV8"],[[127355,127356],"valid",[],"NV8"],[[127357,127358],"valid",[],"NV8"],[[127359,127359],"valid",[],"NV8"],[[127360,127369],"valid",[],"NV8"],[[127370,127373],"valid",[],"NV8"],[[127374,127375],"valid",[],"NV8"],[[127376,127376],"mapped",[100,106]],[[127377,127386],"valid",[],"NV8"],[[127387,127461],"disallowed"],[[127462,127487],"valid",[],"NV8"],[[127488,127488],"mapped",[12411,12363]],[[127489,127489],"mapped",[12467,12467]],[[127490,127490],"mapped",[12469]],[[127491,127503],"disallowed"],[[127504,127504],"mapped",[25163]],[[127505,127505],"mapped",[23383]],[[127506,127506],"mapped",[21452]],[[127507,127507],"mapped",[12487]],[[127508,127508],"mapped",[20108]],[[127509,127509],"mapped",[22810]],[[127510,127510],"mapped",[35299]],[[127511,127511],"mapped",[22825]],[[127512,127512],"mapped",[20132]],[[127513,127513],"mapped",[26144]],[[127514,127514],"mapped",[28961]],[[127515,127515],"mapped",[26009]],[[127516,127516],"mapped",[21069]],[[127517,127517],"mapped",[24460]],[[127518,127518],"mapped",[20877]],[[127519,127519],"mapped",[26032]],[[127520,127520],"mapped",[21021]],[[127521,127521],"mapped",[32066]],[[127522,127522],"mapped",[29983]],[[127523,127523],"mapped",[36009]],[[127524,127524],"mapped",[22768]],[[127525,127525],"mapped",[21561]],[[127526,127526],"mapped",[28436]],[[127527,127527],"mapped",[25237]],[[127528,127528],"mapped",[25429]],[[127529,127529],"mapped",[19968]],[[127530,127530],"mapped",[19977]],[[127531,127531],"mapped",[36938]],[[127532,127532],"mapped",[24038]],[[127533,127533],"mapped",[20013]],[[127534,127534],"mapped",[21491]],[[127535,127535],"mapped",[25351]],[[127536,127536],"mapped",[36208]],[[127537,127537],"mapped",[25171]],[[127538,127538],"mapped",[31105]],[[127539,127539],"mapped",[31354]],[[127540,127540],"mapped",[21512]],[[127541,127541],"mapped",[28288]],[[127542,127542],"mapped",[26377]],[[127543,127543],"mapped",[26376]],[[127544,127544],"mapped",[30003]],[[127545,127545],"mapped",[21106]],[[127546,127546],"mapped",[21942]],[[127547,127551],"disallowed"],[[127552,127552],"mapped",[12308,26412,12309]],[[127553,127553],"mapped",[12308,19977,12309]],[[127554,127554],"mapped",[12308,20108,12309]],[[127555,127555],"mapped",[12308,23433,12309]],[[127556,127556],"mapped",[12308,28857,12309]],[[127557,127557],"mapped",[12308,25171,12309]],[[127558,127558],"mapped",[12308,30423,12309]],[[127559,127559],"mapped",[12308,21213,12309]],[[127560,127560],"mapped",[12308,25943,12309]],[[127561,127567],"disallowed"],[[127568,127568],"mapped",[24471]],[[127569,127569],"mapped",[21487]],[[127570,127743],"disallowed"],[[127744,127776],"valid",[],"NV8"],[[127777,127788],"valid",[],"NV8"],[[127789,127791],"valid",[],"NV8"],[[127792,127797],"valid",[],"NV8"],[[127798,127798],"valid",[],"NV8"],[[127799,127868],"valid",[],"NV8"],[[127869,127869],"valid",[],"NV8"],[[127870,127871],"valid",[],"NV8"],[[127872,127891],"valid",[],"NV8"],[[127892,127903],"valid",[],"NV8"],[[127904,127940],"valid",[],"NV8"],[[127941,127941],"valid",[],"NV8"],[[127942,127946],"valid",[],"NV8"],[[127947,127950],"valid",[],"NV8"],[[127951,127955],"valid",[],"NV8"],[[127956,127967],"valid",[],"NV8"],[[127968,127984],"valid",[],"NV8"],[[127985,127991],"valid",[],"NV8"],[[127992,127999],"valid",[],"NV8"],[[128000,128062],"valid",[],"NV8"],[[128063,128063],"valid",[],"NV8"],[[128064,128064],"valid",[],"NV8"],[[128065,128065],"valid",[],"NV8"],[[128066,128247],"valid",[],"NV8"],[[128248,128248],"valid",[],"NV8"],[[128249,128252],"valid",[],"NV8"],[[128253,128254],"valid",[],"NV8"],[[128255,128255],"valid",[],"NV8"],[[128256,128317],"valid",[],"NV8"],[[128318,128319],"valid",[],"NV8"],[[128320,128323],"valid",[],"NV8"],[[128324,128330],"valid",[],"NV8"],[[128331,128335],"valid",[],"NV8"],[[128336,128359],"valid",[],"NV8"],[[128360,128377],"valid",[],"NV8"],[[128378,128378],"disallowed"],[[128379,128419],"valid",[],"NV8"],[[128420,128420],"disallowed"],[[128421,128506],"valid",[],"NV8"],[[128507,128511],"valid",[],"NV8"],[[128512,128512],"valid",[],"NV8"],[[128513,128528],"valid",[],"NV8"],[[128529,128529],"valid",[],"NV8"],[[128530,128532],"valid",[],"NV8"],[[128533,128533],"valid",[],"NV8"],[[128534,128534],"valid",[],"NV8"],[[128535,128535],"valid",[],"NV8"],[[128536,128536],"valid",[],"NV8"],[[128537,128537],"valid",[],"NV8"],[[128538,128538],"valid",[],"NV8"],[[128539,128539],"valid",[],"NV8"],[[128540,128542],"valid",[],"NV8"],[[128543,128543],"valid",[],"NV8"],[[128544,128549],"valid",[],"NV8"],[[128550,128551],"valid",[],"NV8"],[[128552,128555],"valid",[],"NV8"],[[128556,128556],"valid",[],"NV8"],[[128557,128557],"valid",[],"NV8"],[[128558,128559],"valid",[],"NV8"],[[128560,128563],"valid",[],"NV8"],[[128564,128564],"valid",[],"NV8"],[[128565,128576],"valid",[],"NV8"],[[128577,128578],"valid",[],"NV8"],[[128579,128580],"valid",[],"NV8"],[[128581,128591],"valid",[],"NV8"],[[128592,128639],"valid",[],"NV8"],[[128640,128709],"valid",[],"NV8"],[[128710,128719],"valid",[],"NV8"],[[128720,128720],"valid",[],"NV8"],[[128721,128735],"disallowed"],[[128736,128748],"valid",[],"NV8"],[[128749,128751],"disallowed"],[[128752,128755],"valid",[],"NV8"],[[128756,128767],"disallowed"],[[128768,128883],"valid",[],"NV8"],[[128884,128895],"disallowed"],[[128896,128980],"valid",[],"NV8"],[[128981,129023],"disallowed"],[[129024,129035],"valid",[],"NV8"],[[129036,129039],"disallowed"],[[129040,129095],"valid",[],"NV8"],[[129096,129103],"disallowed"],[[129104,129113],"valid",[],"NV8"],[[129114,129119],"disallowed"],[[129120,129159],"valid",[],"NV8"],[[129160,129167],"disallowed"],[[129168,129197],"valid",[],"NV8"],[[129198,129295],"disallowed"],[[129296,129304],"valid",[],"NV8"],[[129305,129407],"disallowed"],[[129408,129412],"valid",[],"NV8"],[[129413,129471],"disallowed"],[[129472,129472],"valid",[],"NV8"],[[129473,131069],"disallowed"],[[131070,131071],"disallowed"],[[131072,173782],"valid"],[[173783,173823],"disallowed"],[[173824,177972],"valid"],[[177973,177983],"disallowed"],[[177984,178205],"valid"],[[178206,178207],"disallowed"],[[178208,183969],"valid"],[[183970,194559],"disallowed"],[[194560,194560],"mapped",[20029]],[[194561,194561],"mapped",[20024]],[[194562,194562],"mapped",[20033]],[[194563,194563],"mapped",[131362]],[[194564,194564],"mapped",[20320]],[[194565,194565],"mapped",[20398]],[[194566,194566],"mapped",[20411]],[[194567,194567],"mapped",[20482]],[[194568,194568],"mapped",[20602]],[[194569,194569],"mapped",[20633]],[[194570,194570],"mapped",[20711]],[[194571,194571],"mapped",[20687]],[[194572,194572],"mapped",[13470]],[[194573,194573],"mapped",[132666]],[[194574,194574],"mapped",[20813]],[[194575,194575],"mapped",[20820]],[[194576,194576],"mapped",[20836]],[[194577,194577],"mapped",[20855]],[[194578,194578],"mapped",[132380]],[[194579,194579],"mapped",[13497]],[[194580,194580],"mapped",[20839]],[[194581,194581],"mapped",[20877]],[[194582,194582],"mapped",[132427]],[[194583,194583],"mapped",[20887]],[[194584,194584],"mapped",[20900]],[[194585,194585],"mapped",[20172]],[[194586,194586],"mapped",[20908]],[[194587,194587],"mapped",[20917]],[[194588,194588],"mapped",[168415]],[[194589,194589],"mapped",[20981]],[[194590,194590],"mapped",[20995]],[[194591,194591],"mapped",[13535]],[[194592,194592],"mapped",[21051]],[[194593,194593],"mapped",[21062]],[[194594,194594],"mapped",[21106]],[[194595,194595],"mapped",[21111]],[[194596,194596],"mapped",[13589]],[[194597,194597],"mapped",[21191]],[[194598,194598],"mapped",[21193]],[[194599,194599],"mapped",[21220]],[[194600,194600],"mapped",[21242]],[[194601,194601],"mapped",[21253]],[[194602,194602],"mapped",[21254]],[[194603,194603],"mapped",[21271]],[[194604,194604],"mapped",[21321]],[[194605,194605],"mapped",[21329]],[[194606,194606],"mapped",[21338]],[[194607,194607],"mapped",[21363]],[[194608,194608],"mapped",[21373]],[[194609,194611],"mapped",[21375]],[[194612,194612],"mapped",[133676]],[[194613,194613],"mapped",[28784]],[[194614,194614],"mapped",[21450]],[[194615,194615],"mapped",[21471]],[[194616,194616],"mapped",[133987]],[[194617,194617],"mapped",[21483]],[[194618,194618],"mapped",[21489]],[[194619,194619],"mapped",[21510]],[[194620,194620],"mapped",[21662]],[[194621,194621],"mapped",[21560]],[[194622,194622],"mapped",[21576]],[[194623,194623],"mapped",[21608]],[[194624,194624],"mapped",[21666]],[[194625,194625],"mapped",[21750]],[[194626,194626],"mapped",[21776]],[[194627,194627],"mapped",[21843]],[[194628,194628],"mapped",[21859]],[[194629,194630],"mapped",[21892]],[[194631,194631],"mapped",[21913]],[[194632,194632],"mapped",[21931]],[[194633,194633],"mapped",[21939]],[[194634,194634],"mapped",[21954]],[[194635,194635],"mapped",[22294]],[[194636,194636],"mapped",[22022]],[[194637,194637],"mapped",[22295]],[[194638,194638],"mapped",[22097]],[[194639,194639],"mapped",[22132]],[[194640,194640],"mapped",[20999]],[[194641,194641],"mapped",[22766]],[[194642,194642],"mapped",[22478]],[[194643,194643],"mapped",[22516]],[[194644,194644],"mapped",[22541]],[[194645,194645],"mapped",[22411]],[[194646,194646],"mapped",[22578]],[[194647,194647],"mapped",[22577]],[[194648,194648],"mapped",[22700]],[[194649,194649],"mapped",[136420]],[[194650,194650],"mapped",[22770]],[[194651,194651],"mapped",[22775]],[[194652,194652],"mapped",[22790]],[[194653,194653],"mapped",[22810]],[[194654,194654],"mapped",[22818]],[[194655,194655],"mapped",[22882]],[[194656,194656],"mapped",[136872]],[[194657,194657],"mapped",[136938]],[[194658,194658],"mapped",[23020]],[[194659,194659],"mapped",[23067]],[[194660,194660],"mapped",[23079]],[[194661,194661],"mapped",[23000]],[[194662,194662],"mapped",[23142]],[[194663,194663],"mapped",[14062]],[[194664,194664],"disallowed"],[[194665,194665],"mapped",[23304]],[[194666,194667],"mapped",[23358]],[[194668,194668],"mapped",[137672]],[[194669,194669],"mapped",[23491]],[[194670,194670],"mapped",[23512]],[[194671,194671],"mapped",[23527]],[[194672,194672],"mapped",[23539]],[[194673,194673],"mapped",[138008]],[[194674,194674],"mapped",[23551]],[[194675,194675],"mapped",[23558]],[[194676,194676],"disallowed"],[[194677,194677],"mapped",[23586]],[[194678,194678],"mapped",[14209]],[[194679,194679],"mapped",[23648]],[[194680,194680],"mapped",[23662]],[[194681,194681],"mapped",[23744]],[[194682,194682],"mapped",[23693]],[[194683,194683],"mapped",[138724]],[[194684,194684],"mapped",[23875]],[[194685,194685],"mapped",[138726]],[[194686,194686],"mapped",[23918]],[[194687,194687],"mapped",[23915]],[[194688,194688],"mapped",[23932]],[[194689,194689],"mapped",[24033]],[[194690,194690],"mapped",[24034]],[[194691,194691],"mapped",[14383]],[[194692,194692],"mapped",[24061]],[[194693,194693],"mapped",[24104]],[[194694,194694],"mapped",[24125]],[[194695,194695],"mapped",[24169]],[[194696,194696],"mapped",[14434]],[[194697,194697],"mapped",[139651]],[[194698,194698],"mapped",[14460]],[[194699,194699],"mapped",[24240]],[[194700,194700],"mapped",[24243]],[[194701,194701],"mapped",[24246]],[[194702,194702],"mapped",[24266]],[[194703,194703],"mapped",[172946]],[[194704,194704],"mapped",[24318]],[[194705,194706],"mapped",[140081]],[[194707,194707],"mapped",[33281]],[[194708,194709],"mapped",[24354]],[[194710,194710],"mapped",[14535]],[[194711,194711],"mapped",[144056]],[[194712,194712],"mapped",[156122]],[[194713,194713],"mapped",[24418]],[[194714,194714],"mapped",[24427]],[[194715,194715],"mapped",[14563]],[[194716,194716],"mapped",[24474]],[[194717,194717],"mapped",[24525]],[[194718,194718],"mapped",[24535]],[[194719,194719],"mapped",[24569]],[[194720,194720],"mapped",[24705]],[[194721,194721],"mapped",[14650]],[[194722,194722],"mapped",[14620]],[[194723,194723],"mapped",[24724]],[[194724,194724],"mapped",[141012]],[[194725,194725],"mapped",[24775]],[[194726,194726],"mapped",[24904]],[[194727,194727],"mapped",[24908]],[[194728,194728],"mapped",[24910]],[[194729,194729],"mapped",[24908]],[[194730,194730],"mapped",[24954]],[[194731,194731],"mapped",[24974]],[[194732,194732],"mapped",[25010]],[[194733,194733],"mapped",[24996]],[[194734,194734],"mapped",[25007]],[[194735,194735],"mapped",[25054]],[[194736,194736],"mapped",[25074]],[[194737,194737],"mapped",[25078]],[[194738,194738],"mapped",[25104]],[[194739,194739],"mapped",[25115]],[[194740,194740],"mapped",[25181]],[[194741,194741],"mapped",[25265]],[[194742,194742],"mapped",[25300]],[[194743,194743],"mapped",[25424]],[[194744,194744],"mapped",[142092]],[[194745,194745],"mapped",[25405]],[[194746,194746],"mapped",[25340]],[[194747,194747],"mapped",[25448]],[[194748,194748],"mapped",[25475]],[[194749,194749],"mapped",[25572]],[[194750,194750],"mapped",[142321]],[[194751,194751],"mapped",[25634]],[[194752,194752],"mapped",[25541]],[[194753,194753],"mapped",[25513]],[[194754,194754],"mapped",[14894]],[[194755,194755],"mapped",[25705]],[[194756,194756],"mapped",[25726]],[[194757,194757],"mapped",[25757]],[[194758,194758],"mapped",[25719]],[[194759,194759],"mapped",[14956]],[[194760,194760],"mapped",[25935]],[[194761,194761],"mapped",[25964]],[[194762,194762],"mapped",[143370]],[[194763,194763],"mapped",[26083]],[[194764,194764],"mapped",[26360]],[[194765,194765],"mapped",[26185]],[[194766,194766],"mapped",[15129]],[[194767,194767],"mapped",[26257]],[[194768,194768],"mapped",[15112]],[[194769,194769],"mapped",[15076]],[[194770,194770],"mapped",[20882]],[[194771,194771],"mapped",[20885]],[[194772,194772],"mapped",[26368]],[[194773,194773],"mapped",[26268]],[[194774,194774],"mapped",[32941]],[[194775,194775],"mapped",[17369]],[[194776,194776],"mapped",[26391]],[[194777,194777],"mapped",[26395]],[[194778,194778],"mapped",[26401]],[[194779,194779],"mapped",[26462]],[[194780,194780],"mapped",[26451]],[[194781,194781],"mapped",[144323]],[[194782,194782],"mapped",[15177]],[[194783,194783],"mapped",[26618]],[[194784,194784],"mapped",[26501]],[[194785,194785],"mapped",[26706]],[[194786,194786],"mapped",[26757]],[[194787,194787],"mapped",[144493]],[[194788,194788],"mapped",[26766]],[[194789,194789],"mapped",[26655]],[[194790,194790],"mapped",[26900]],[[194791,194791],"mapped",[15261]],[[194792,194792],"mapped",[26946]],[[194793,194793],"mapped",[27043]],[[194794,194794],"mapped",[27114]],[[194795,194795],"mapped",[27304]],[[194796,194796],"mapped",[145059]],[[194797,194797],"mapped",[27355]],[[194798,194798],"mapped",[15384]],[[194799,194799],"mapped",[27425]],[[194800,194800],"mapped",[145575]],[[194801,194801],"mapped",[27476]],[[194802,194802],"mapped",[15438]],[[194803,194803],"mapped",[27506]],[[194804,194804],"mapped",[27551]],[[194805,194805],"mapped",[27578]],[[194806,194806],"mapped",[27579]],[[194807,194807],"mapped",[146061]],[[194808,194808],"mapped",[138507]],[[194809,194809],"mapped",[146170]],[[194810,194810],"mapped",[27726]],[[194811,194811],"mapped",[146620]],[[194812,194812],"mapped",[27839]],[[194813,194813],"mapped",[27853]],[[194814,194814],"mapped",[27751]],[[194815,194815],"mapped",[27926]],[[194816,194816],"mapped",[27966]],[[194817,194817],"mapped",[28023]],[[194818,194818],"mapped",[27969]],[[194819,194819],"mapped",[28009]],[[194820,194820],"mapped",[28024]],[[194821,194821],"mapped",[28037]],[[194822,194822],"mapped",[146718]],[[194823,194823],"mapped",[27956]],[[194824,194824],"mapped",[28207]],[[194825,194825],"mapped",[28270]],[[194826,194826],"mapped",[15667]],[[194827,194827],"mapped",[28363]],[[194828,194828],"mapped",[28359]],[[194829,194829],"mapped",[147153]],[[194830,194830],"mapped",[28153]],[[194831,194831],"mapped",[28526]],[[194832,194832],"mapped",[147294]],[[194833,194833],"mapped",[147342]],[[194834,194834],"mapped",[28614]],[[194835,194835],"mapped",[28729]],[[194836,194836],"mapped",[28702]],[[194837,194837],"mapped",[28699]],[[194838,194838],"mapped",[15766]],[[194839,194839],"mapped",[28746]],[[194840,194840],"mapped",[28797]],[[194841,194841],"mapped",[28791]],[[194842,194842],"mapped",[28845]],[[194843,194843],"mapped",[132389]],[[194844,194844],"mapped",[28997]],[[194845,194845],"mapped",[148067]],[[194846,194846],"mapped",[29084]],[[194847,194847],"disallowed"],[[194848,194848],"mapped",[29224]],[[194849,194849],"mapped",[29237]],[[194850,194850],"mapped",[29264]],[[194851,194851],"mapped",[149000]],[[194852,194852],"mapped",[29312]],[[194853,194853],"mapped",[29333]],[[194854,194854],"mapped",[149301]],[[194855,194855],"mapped",[149524]],[[194856,194856],"mapped",[29562]],[[194857,194857],"mapped",[29579]],[[194858,194858],"mapped",[16044]],[[194859,194859],"mapped",[29605]],[[194860,194861],"mapped",[16056]],[[194862,194862],"mapped",[29767]],[[194863,194863],"mapped",[29788]],[[194864,194864],"mapped",[29809]],[[194865,194865],"mapped",[29829]],[[194866,194866],"mapped",[29898]],[[194867,194867],"mapped",[16155]],[[194868,194868],"mapped",[29988]],[[194869,194869],"mapped",[150582]],[[194870,194870],"mapped",[30014]],[[194871,194871],"mapped",[150674]],[[194872,194872],"mapped",[30064]],[[194873,194873],"mapped",[139679]],[[194874,194874],"mapped",[30224]],[[194875,194875],"mapped",[151457]],[[194876,194876],"mapped",[151480]],[[194877,194877],"mapped",[151620]],[[194878,194878],"mapped",[16380]],[[194879,194879],"mapped",[16392]],[[194880,194880],"mapped",[30452]],[[194881,194881],"mapped",[151795]],[[194882,194882],"mapped",[151794]],[[194883,194883],"mapped",[151833]],[[194884,194884],"mapped",[151859]],[[194885,194885],"mapped",[30494]],[[194886,194887],"mapped",[30495]],[[194888,194888],"mapped",[30538]],[[194889,194889],"mapped",[16441]],[[194890,194890],"mapped",[30603]],[[194891,194891],"mapped",[16454]],[[194892,194892],"mapped",[16534]],[[194893,194893],"mapped",[152605]],[[194894,194894],"mapped",[30798]],[[194895,194895],"mapped",[30860]],[[194896,194896],"mapped",[30924]],[[194897,194897],"mapped",[16611]],[[194898,194898],"mapped",[153126]],[[194899,194899],"mapped",[31062]],[[194900,194900],"mapped",[153242]],[[194901,194901],"mapped",[153285]],[[194902,194902],"mapped",[31119]],[[194903,194903],"mapped",[31211]],[[194904,194904],"mapped",[16687]],[[194905,194905],"mapped",[31296]],[[194906,194906],"mapped",[31306]],[[194907,194907],"mapped",[31311]],[[194908,194908],"mapped",[153980]],[[194909,194910],"mapped",[154279]],[[194911,194911],"disallowed"],[[194912,194912],"mapped",[16898]],[[194913,194913],"mapped",[154539]],[[194914,194914],"mapped",[31686]],[[194915,194915],"mapped",[31689]],[[194916,194916],"mapped",[16935]],[[194917,194917],"mapped",[154752]],[[194918,194918],"mapped",[31954]],[[194919,194919],"mapped",[17056]],[[194920,194920],"mapped",[31976]],[[194921,194921],"mapped",[31971]],[[194922,194922],"mapped",[32000]],[[194923,194923],"mapped",[155526]],[[194924,194924],"mapped",[32099]],[[194925,194925],"mapped",[17153]],[[194926,194926],"mapped",[32199]],[[194927,194927],"mapped",[32258]],[[194928,194928],"mapped",[32325]],[[194929,194929],"mapped",[17204]],[[194930,194930],"mapped",[156200]],[[194931,194931],"mapped",[156231]],[[194932,194932],"mapped",[17241]],[[194933,194933],"mapped",[156377]],[[194934,194934],"mapped",[32634]],[[194935,194935],"mapped",[156478]],[[194936,194936],"mapped",[32661]],[[194937,194937],"mapped",[32762]],[[194938,194938],"mapped",[32773]],[[194939,194939],"mapped",[156890]],[[194940,194940],"mapped",[156963]],[[194941,194941],"mapped",[32864]],[[194942,194942],"mapped",[157096]],[[194943,194943],"mapped",[32880]],[[194944,194944],"mapped",[144223]],[[194945,194945],"mapped",[17365]],[[194946,194946],"mapped",[32946]],[[194947,194947],"mapped",[33027]],[[194948,194948],"mapped",[17419]],[[194949,194949],"mapped",[33086]],[[194950,194950],"mapped",[23221]],[[194951,194951],"mapped",[157607]],[[194952,194952],"mapped",[157621]],[[194953,194953],"mapped",[144275]],[[194954,194954],"mapped",[144284]],[[194955,194955],"mapped",[33281]],[[194956,194956],"mapped",[33284]],[[194957,194957],"mapped",[36766]],[[194958,194958],"mapped",[17515]],[[194959,194959],"mapped",[33425]],[[194960,194960],"mapped",[33419]],[[194961,194961],"mapped",[33437]],[[194962,194962],"mapped",[21171]],[[194963,194963],"mapped",[33457]],[[194964,194964],"mapped",[33459]],[[194965,194965],"mapped",[33469]],[[194966,194966],"mapped",[33510]],[[194967,194967],"mapped",[158524]],[[194968,194968],"mapped",[33509]],[[194969,194969],"mapped",[33565]],[[194970,194970],"mapped",[33635]],[[194971,194971],"mapped",[33709]],[[194972,194972],"mapped",[33571]],[[194973,194973],"mapped",[33725]],[[194974,194974],"mapped",[33767]],[[194975,194975],"mapped",[33879]],[[194976,194976],"mapped",[33619]],[[194977,194977],"mapped",[33738]],[[194978,194978],"mapped",[33740]],[[194979,194979],"mapped",[33756]],[[194980,194980],"mapped",[158774]],[[194981,194981],"mapped",[159083]],[[194982,194982],"mapped",[158933]],[[194983,194983],"mapped",[17707]],[[194984,194984],"mapped",[34033]],[[194985,194985],"mapped",[34035]],[[194986,194986],"mapped",[34070]],[[194987,194987],"mapped",[160714]],[[194988,194988],"mapped",[34148]],[[194989,194989],"mapped",[159532]],[[194990,194990],"mapped",[17757]],[[194991,194991],"mapped",[17761]],[[194992,194992],"mapped",[159665]],[[194993,194993],"mapped",[159954]],[[194994,194994],"mapped",[17771]],[[194995,194995],"mapped",[34384]],[[194996,194996],"mapped",[34396]],[[194997,194997],"mapped",[34407]],[[194998,194998],"mapped",[34409]],[[194999,194999],"mapped",[34473]],[[195000,195000],"mapped",[34440]],[[195001,195001],"mapped",[34574]],[[195002,195002],"mapped",[34530]],[[195003,195003],"mapped",[34681]],[[195004,195004],"mapped",[34600]],[[195005,195005],"mapped",[34667]],[[195006,195006],"mapped",[34694]],[[195007,195007],"disallowed"],[[195008,195008],"mapped",[34785]],[[195009,195009],"mapped",[34817]],[[195010,195010],"mapped",[17913]],[[195011,195011],"mapped",[34912]],[[195012,195012],"mapped",[34915]],[[195013,195013],"mapped",[161383]],[[195014,195014],"mapped",[35031]],[[195015,195015],"mapped",[35038]],[[195016,195016],"mapped",[17973]],[[195017,195017],"mapped",[35066]],[[195018,195018],"mapped",[13499]],[[195019,195019],"mapped",[161966]],[[195020,195020],"mapped",[162150]],[[195021,195021],"mapped",[18110]],[[195022,195022],"mapped",[18119]],[[195023,195023],"mapped",[35488]],[[195024,195024],"mapped",[35565]],[[195025,195025],"mapped",[35722]],[[195026,195026],"mapped",[35925]],[[195027,195027],"mapped",[162984]],[[195028,195028],"mapped",[36011]],[[195029,195029],"mapped",[36033]],[[195030,195030],"mapped",[36123]],[[195031,195031],"mapped",[36215]],[[195032,195032],"mapped",[163631]],[[195033,195033],"mapped",[133124]],[[195034,195034],"mapped",[36299]],[[195035,195035],"mapped",[36284]],[[195036,195036],"mapped",[36336]],[[195037,195037],"mapped",[133342]],[[195038,195038],"mapped",[36564]],[[195039,195039],"mapped",[36664]],[[195040,195040],"mapped",[165330]],[[195041,195041],"mapped",[165357]],[[195042,195042],"mapped",[37012]],[[195043,195043],"mapped",[37105]],[[195044,195044],"mapped",[37137]],[[195045,195045],"mapped",[165678]],[[195046,195046],"mapped",[37147]],[[195047,195047],"mapped",[37432]],[[195048,195048],"mapped",[37591]],[[195049,195049],"mapped",[37592]],[[195050,195050],"mapped",[37500]],[[195051,195051],"mapped",[37881]],[[195052,195052],"mapped",[37909]],[[195053,195053],"mapped",[166906]],[[195054,195054],"mapped",[38283]],[[195055,195055],"mapped",[18837]],[[195056,195056],"mapped",[38327]],[[195057,195057],"mapped",[167287]],[[195058,195058],"mapped",[18918]],[[195059,195059],"mapped",[38595]],[[195060,195060],"mapped",[23986]],[[195061,195061],"mapped",[38691]],[[195062,195062],"mapped",[168261]],[[195063,195063],"mapped",[168474]],[[195064,195064],"mapped",[19054]],[[195065,195065],"mapped",[19062]],[[195066,195066],"mapped",[38880]],[[195067,195067],"mapped",[168970]],[[195068,195068],"mapped",[19122]],[[195069,195069],"mapped",[169110]],[[195070,195071],"mapped",[38923]],[[195072,195072],"mapped",[38953]],[[195073,195073],"mapped",[169398]],[[195074,195074],"mapped",[39138]],[[195075,195075],"mapped",[19251]],[[195076,195076],"mapped",[39209]],[[195077,195077],"mapped",[39335]],[[195078,195078],"mapped",[39362]],[[195079,195079],"mapped",[39422]],[[195080,195080],"mapped",[19406]],[[195081,195081],"mapped",[170800]],[[195082,195082],"mapped",[39698]],[[195083,195083],"mapped",[40000]],[[195084,195084],"mapped",[40189]],[[195085,195085],"mapped",[19662]],[[195086,195086],"mapped",[19693]],[[195087,195087],"mapped",[40295]],[[195088,195088],"mapped",[172238]],[[195089,195089],"mapped",[19704]],[[195090,195090],"mapped",[172293]],[[195091,195091],"mapped",[172558]],[[195092,195092],"mapped",[172689]],[[195093,195093],"mapped",[40635]],[[195094,195094],"mapped",[19798]],[[195095,195095],"mapped",[40697]],[[195096,195096],"mapped",[40702]],[[195097,195097],"mapped",[40709]],[[195098,195098],"mapped",[40719]],[[195099,195099],"mapped",[40726]],[[195100,195100],"mapped",[40763]],[[195101,195101],"mapped",[173568]],[[195102,196605],"disallowed"],[[196606,196607],"disallowed"],[[196608,262141],"disallowed"],[[262142,262143],"disallowed"],[[262144,327677],"disallowed"],[[327678,327679],"disallowed"],[[327680,393213],"disallowed"],[[393214,393215],"disallowed"],[[393216,458749],"disallowed"],[[458750,458751],"disallowed"],[[458752,524285],"disallowed"],[[524286,524287],"disallowed"],[[524288,589821],"disallowed"],[[589822,589823],"disallowed"],[[589824,655357],"disallowed"],[[655358,655359],"disallowed"],[[655360,720893],"disallowed"],[[720894,720895],"disallowed"],[[720896,786429],"disallowed"],[[786430,786431],"disallowed"],[[786432,851965],"disallowed"],[[851966,851967],"disallowed"],[[851968,917501],"disallowed"],[[917502,917503],"disallowed"],[[917504,917504],"disallowed"],[[917505,917505],"disallowed"],[[917506,917535],"disallowed"],[[917536,917631],"disallowed"],[[917632,917759],"disallowed"],[[917760,917999],"ignored"],[[918000,983037],"disallowed"],[[983038,983039],"disallowed"],[[983040,1048573],"disallowed"],[[1048574,1048575],"disallowed"],[[1048576,1114109],"disallowed"],[[1114110,1114111],"disallowed"]]'); /***/ }) /******/ }); /************************************************************************/ /******/ // The module cache /******/ var __webpack_module_cache__ = {}; /******/ /******/ // The require function /******/ function __nccwpck_require__(moduleId) { /******/ // Check if module is in cache /******/ var cachedModule = __webpack_module_cache__[moduleId]; /******/ if (cachedModule !== undefined) { /******/ return cachedModule.exports; /******/ } /******/ // Create a new module (and put it into the cache) /******/ var module = __webpack_module_cache__[moduleId] = { /******/ // no module.id needed /******/ // no module.loaded needed /******/ exports: {} /******/ }; /******/ /******/ // Execute the module function /******/ var threw = true; /******/ try { /******/ __webpack_modules__[moduleId].call(module.exports, module, module.exports, __nccwpck_require__); /******/ threw = false; /******/ } finally { /******/ if(threw) delete __webpack_module_cache__[moduleId]; /******/ } /******/ /******/ // Return the exports of the module /******/ return module.exports; /******/ } /******/ /************************************************************************/ /******/ /* webpack/runtime/compat get default export */ /******/ (() => { /******/ // getDefaultExport function for compatibility with non-harmony modules /******/ __nccwpck_require__.n = (module) => { /******/ var getter = module && module.__esModule ? /******/ () => (module['default']) : /******/ () => (module); /******/ __nccwpck_require__.d(getter, { a: getter }); /******/ return getter; /******/ }; /******/ })(); /******/ /******/ /* webpack/runtime/define property getters */ /******/ (() => { /******/ // define getter functions for harmony exports /******/ __nccwpck_require__.d = (exports, definition) => { /******/ for(var key in definition) { /******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { /******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); /******/ } /******/ } /******/ }; /******/ })(); /******/ /******/ /* webpack/runtime/hasOwnProperty shorthand */ /******/ (() => { /******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) /******/ })(); /******/ /******/ /* webpack/runtime/compat */ /******/ /******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; /******/ /************************************************************************/ var __webpack_exports__ = {}; // This entry need to be wrapped in an IIFE because it need to be in strict mode. (() => { "use strict"; // EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js var lib_core = __nccwpck_require__(37484); // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js var lib_io = __nccwpck_require__(94994); // EXTERNAL MODULE: external "fs" var external_fs_ = __nccwpck_require__(79896); var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_); // EXTERNAL MODULE: external "path" var external_path_ = __nccwpck_require__(16928); var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_); // EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js var glob = __nccwpck_require__(47206); // EXTERNAL MODULE: external "crypto" var external_crypto_ = __nccwpck_require__(76982); var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_); // EXTERNAL MODULE: external "fs/promises" var promises_ = __nccwpck_require__(91943); var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_); // EXTERNAL MODULE: external "os" var external_os_ = __nccwpck_require__(70857); var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ function getLineColFromPtr(string, ptr) { let lines = string.slice(0, ptr).split(/\r\n|\n|\r/g); return [lines.length, lines.pop().length + 1]; } function makeCodeBlock(string, line, column) { let lines = string.split(/\r\n|\n|\r/g); let codeblock = ''; let numberLen = (Math.log10(line + 1) | 0) + 1; for (let i = line - 1; i <= line + 1; i++) { let l = lines[i - 1]; if (!l) continue; codeblock += i.toString().padEnd(numberLen, ' '); codeblock += ': '; codeblock += l; codeblock += '\n'; if (i === line) { codeblock += ' '.repeat(numberLen + column + 2); codeblock += '^\n'; } } return codeblock; } class TomlError extends Error { line; column; codeblock; constructor(message, options) { const [line, column] = getLineColFromPtr(options.toml, options.ptr); const codeblock = makeCodeBlock(options.toml, line, column); super(`Invalid TOML document: ${message}\n\n${codeblock}`, options); this.line = line; this.column = column; this.codeblock = codeblock; } } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/util.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ function indexOfNewline(str, start = 0, end = str.length) { let idx = str.indexOf('\n', start); if (str[idx - 1] === '\r') idx--; return idx <= end ? idx : -1; } function skipComment(str, ptr) { for (let i = ptr; i < str.length; i++) { let c = str[i]; if (c === '\n') return i; if (c === '\r' && str[i + 1] === '\n') return i + 1; if ((c < '\x20' && c !== '\t') || c === '\x7f') { throw new TomlError('control characters are not allowed in comments', { toml: str, ptr: ptr, }); } } return str.length; } function skipVoid(str, ptr, banNewLines, banComments) { let c; while ((c = str[ptr]) === ' ' || c === '\t' || (!banNewLines && (c === '\n' || c === '\r' && str[ptr + 1] === '\n'))) ptr++; return banComments || c !== '#' ? ptr : skipVoid(str, skipComment(str, ptr), banNewLines); } function skipUntil(str, ptr, sep, end, banNewLines = false) { if (!end) { ptr = indexOfNewline(str, ptr); return ptr < 0 ? str.length : ptr; } for (let i = ptr; i < str.length; i++) { let c = str[i]; if (c === '#') { i = indexOfNewline(str, i); } else if (c === sep) { return i + 1; } else if (c === end) { return i; } else if (banNewLines && (c === '\n' || c === '\r' && str[i + 1] === '\n')) { return i; } } throw new TomlError('cannot find end of structure', { toml: str, ptr: ptr }); } function getStringEnd(str, seek) { let first = str[seek]; let target = first === str[seek + 1] && str[seek + 1] === str[seek + 2] ? str.slice(seek, seek + 3) : first; seek += target.length - 1; do seek = str.indexOf(target, ++seek); while (seek > -1 && first !== "'" && str[seek - 1] === '\\' && str[seek - 2] !== '\\'); if (seek > -1) { seek += target.length; if (target.length > 1) { if (str[seek] === first) seek++; if (str[seek] === first) seek++; } } return seek; } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/date.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ let DATE_TIME_RE = /^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i; class TomlDate extends Date { #hasDate = false; #hasTime = false; #offset = null; constructor(date) { let hasDate = true; let hasTime = true; let offset = 'Z'; if (typeof date === 'string') { let match = date.match(DATE_TIME_RE); if (match) { if (!match[1]) { hasDate = false; date = `0000-01-01T${date}`; } hasTime = !!match[2]; // Do not allow rollover hours if (match[2] && +match[2] > 23) { date = ''; } else { offset = match[3] || null; date = date.toUpperCase(); if (!offset && hasTime) date += 'Z'; } } else { date = ''; } } super(date); if (!isNaN(this.getTime())) { this.#hasDate = hasDate; this.#hasTime = hasTime; this.#offset = offset; } } isDateTime() { return this.#hasDate && this.#hasTime; } isLocal() { return !this.#hasDate || !this.#hasTime || !this.#offset; } isDate() { return this.#hasDate && !this.#hasTime; } isTime() { return this.#hasTime && !this.#hasDate; } isValid() { return this.#hasDate || this.#hasTime; } toISOString() { let iso = super.toISOString(); // Local Date if (this.isDate()) return iso.slice(0, 10); // Local Time if (this.isTime()) return iso.slice(11, 23); // Local DateTime if (this.#offset === null) return iso.slice(0, -1); // Offset DateTime if (this.#offset === 'Z') return iso; // This part is quite annoying: JS strips the original timezone from the ISO string representation // Instead of using a "modified" date and "Z", we restore the representation "as authored" let offset = (+(this.#offset.slice(1, 3)) * 60) + +(this.#offset.slice(4, 6)); offset = this.#offset[0] === '-' ? offset : -offset; let offsetDate = new Date(this.getTime() - (offset * 60e3)); return offsetDate.toISOString().slice(0, -1) + this.#offset; } static wrapAsOffsetDateTime(jsDate, offset = 'Z') { let date = new TomlDate(jsDate); date.#offset = offset; return date; } static wrapAsLocalDateTime(jsDate) { let date = new TomlDate(jsDate); date.#offset = null; return date; } static wrapAsLocalDate(jsDate) { let date = new TomlDate(jsDate); date.#hasTime = false; date.#offset = null; return date; } static wrapAsLocalTime(jsDate) { let date = new TomlDate(jsDate); date.#hasDate = false; date.#offset = null; return date; } } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/primitive.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ let INT_REGEX = /^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/; let FLOAT_REGEX = /^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/; let LEADING_ZERO = /^[+-]?0[0-9_]/; let ESCAPE_REGEX = /^[0-9a-f]{4,8}$/i; let ESC_MAP = { b: '\b', t: '\t', n: '\n', f: '\f', r: '\r', '"': '"', '\\': '\\', }; function parseString(str, ptr = 0, endPtr = str.length) { let isLiteral = str[ptr] === "'"; let isMultiline = str[ptr++] === str[ptr] && str[ptr] === str[ptr + 1]; if (isMultiline) { endPtr -= 2; if (str[ptr += 2] === '\r') ptr++; if (str[ptr] === '\n') ptr++; } let tmp = 0; let isEscape; let parsed = ''; let sliceStart = ptr; while (ptr < endPtr - 1) { let c = str[ptr++]; if (c === '\n' || (c === '\r' && str[ptr] === '\n')) { if (!isMultiline) { throw new TomlError('newlines are not allowed in strings', { toml: str, ptr: ptr - 1 }); } } else if ((c < '\x20' && c !== '\t') || c === '\x7f') { throw new TomlError('control characters are not allowed in strings', { toml: str, ptr: ptr - 1 }); } if (isEscape) { isEscape = false; if (c === 'u' || c === 'U') { // Unicode escape let code = str.slice(ptr, (ptr += (c === 'u' ? 4 : 8))); if (!ESCAPE_REGEX.test(code)) { throw new TomlError('invalid unicode escape', { toml: str, ptr: tmp }); } try { parsed += String.fromCodePoint(parseInt(code, 16)); } catch { throw new TomlError('invalid unicode escape', { toml: str, ptr: tmp }); } } else if (isMultiline && (c === '\n' || c === ' ' || c === '\t' || c === '\r')) { // Multiline escape ptr = skipVoid(str, ptr - 1, true); if (str[ptr] !== '\n' && str[ptr] !== '\r') { throw new TomlError('invalid escape: only line-ending whitespace may be escaped', { toml: str, ptr: tmp }); } ptr = skipVoid(str, ptr); } else if (c in ESC_MAP) { // Classic escape parsed += ESC_MAP[c]; } else { throw new TomlError('unrecognized escape sequence', { toml: str, ptr: tmp }); } sliceStart = ptr; } else if (!isLiteral && c === '\\') { tmp = ptr - 1; isEscape = true; parsed += str.slice(sliceStart, tmp); } } return parsed + str.slice(sliceStart, endPtr - 1); } function parseValue(value, toml, ptr) { // Constant values if (value === 'true') return true; if (value === 'false') return false; if (value === '-inf') return -Infinity; if (value === 'inf' || value === '+inf') return Infinity; if (value === 'nan' || value === '+nan' || value === '-nan') return NaN; if (value === '-0') return 0; // Avoid FP representation of -0 // Numbers let isInt; if ((isInt = INT_REGEX.test(value)) || FLOAT_REGEX.test(value)) { if (LEADING_ZERO.test(value)) { throw new TomlError('leading zeroes are not allowed', { toml: toml, ptr: ptr }); } let numeric = +(value.replace(/_/g, '')); if (isNaN(numeric)) { throw new TomlError('invalid number', { toml: toml, ptr: ptr }); } if (isInt && !Number.isSafeInteger(numeric)) { throw new TomlError('integer value cannot be represented losslessly', { toml: toml, ptr: ptr }); } return numeric; } let date = new TomlDate(value); if (!date.isValid()) { throw new TomlError('invalid value', { toml: toml, ptr: ptr }); } return date; } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/extract.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ function sliceAndTrimEndOf(str, startPtr, endPtr, allowNewLines) { let value = str.slice(startPtr, endPtr); let commentIdx = value.indexOf('#'); if (commentIdx > -1) { // The call to skipComment allows to "validate" the comment // (absence of control characters) skipComment(str, commentIdx); value = value.slice(0, commentIdx); } let trimmed = value.trimEnd(); if (!allowNewLines) { let newlineIdx = value.indexOf('\n', trimmed.length); if (newlineIdx > -1) { throw new TomlError('newlines are not allowed in inline tables', { toml: str, ptr: startPtr + newlineIdx }); } } return [trimmed, commentIdx]; } function extractValue(str, ptr, end, depth) { if (depth === 0) { throw new TomlError('document contains excessively nested structures. aborting.', { toml: str, ptr: ptr }); } let c = str[ptr]; if (c === '[' || c === '{') { let [value, endPtr] = c === '[' ? parseArray(str, ptr, depth) : parseInlineTable(str, ptr, depth); let newPtr = skipUntil(str, endPtr, ',', end); if (end === '}') { let nextNewLine = indexOfNewline(str, endPtr, newPtr); if (nextNewLine > -1) { throw new TomlError('newlines are not allowed in inline tables', { toml: str, ptr: nextNewLine }); } } return [value, newPtr]; } let endPtr; if (c === '"' || c === "'") { endPtr = getStringEnd(str, ptr); let parsed = parseString(str, ptr, endPtr); if (end) { endPtr = skipVoid(str, endPtr, end !== ']'); if (str[endPtr] && str[endPtr] !== ',' && str[endPtr] !== end && str[endPtr] !== '\n' && str[endPtr] !== '\r') { throw new TomlError('unexpected character encountered', { toml: str, ptr: endPtr, }); } endPtr += (+(str[endPtr] === ',')); } return [parsed, endPtr]; } endPtr = skipUntil(str, ptr, ',', end); let slice = sliceAndTrimEndOf(str, ptr, endPtr - (+(str[endPtr - 1] === ',')), end === ']'); if (!slice[0]) { throw new TomlError('incomplete key-value declaration: no value specified', { toml: str, ptr: ptr }); } if (end && slice[1] > -1) { endPtr = skipVoid(str, ptr + slice[1]); endPtr += +(str[endPtr] === ','); } return [ parseValue(slice[0], str, ptr), endPtr, ]; } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/struct.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ let KEY_PART_RE = /^[a-zA-Z0-9-_]+[ \t]*$/; function parseKey(str, ptr, end = '=') { let dot = ptr - 1; let parsed = []; let endPtr = str.indexOf(end, ptr); if (endPtr < 0) { throw new TomlError('incomplete key-value: cannot find end of key', { toml: str, ptr: ptr }); } do { let c = str[ptr = ++dot]; // If it's whitespace, ignore if (c !== ' ' && c !== '\t') { // If it's a string if (c === '"' || c === "'") { if (c === str[ptr + 1] && c === str[ptr + 2]) { throw new TomlError('multiline strings are not allowed in keys', { toml: str, ptr: ptr, }); } let eos = getStringEnd(str, ptr); if (eos < 0) { throw new TomlError('unfinished string encountered', { toml: str, ptr: ptr, }); } dot = str.indexOf('.', eos); let strEnd = str.slice(eos, dot < 0 || dot > endPtr ? endPtr : dot); let newLine = indexOfNewline(strEnd); if (newLine > -1) { throw new TomlError('newlines are not allowed in keys', { toml: str, ptr: ptr + dot + newLine, }); } if (strEnd.trimStart()) { throw new TomlError('found extra tokens after the string part', { toml: str, ptr: eos, }); } if (endPtr < eos) { endPtr = str.indexOf(end, eos); if (endPtr < 0) { throw new TomlError('incomplete key-value: cannot find end of key', { toml: str, ptr: ptr, }); } } parsed.push(parseString(str, ptr, eos)); } else { // Normal raw key part consumption and validation dot = str.indexOf('.', ptr); let part = str.slice(ptr, dot < 0 || dot > endPtr ? endPtr : dot); if (!KEY_PART_RE.test(part)) { throw new TomlError('only letter, numbers, dashes and underscores are allowed in keys', { toml: str, ptr: ptr, }); } parsed.push(part.trimEnd()); } } // Until there's no more dot } while (dot + 1 && dot < endPtr); return [parsed, skipVoid(str, endPtr + 1, true, true)]; } function parseInlineTable(str, ptr, depth) { let res = {}; let seen = new Set(); let c; let comma = 0; ptr++; while ((c = str[ptr++]) !== '}' && c) { if (c === '\n') { throw new TomlError('newlines are not allowed in inline tables', { toml: str, ptr: ptr - 1 }); } else if (c === '#') { throw new TomlError('inline tables cannot contain comments', { toml: str, ptr: ptr - 1 }); } else if (c === ',') { throw new TomlError('expected key-value, found comma', { toml: str, ptr: ptr - 1 }); } else if (c !== ' ' && c !== '\t') { let k; let t = res; let hasOwn = false; let [key, keyEndPtr] = parseKey(str, ptr - 1); for (let i = 0; i < key.length; i++) { if (i) t = hasOwn ? t[k] : (t[k] = {}); k = key[i]; if ((hasOwn = Object.hasOwn(t, k)) && (typeof t[k] !== 'object' || seen.has(t[k]))) { throw new TomlError('trying to redefine an already defined value', { toml: str, ptr: ptr }); } if (!hasOwn && k === '__proto__') { Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); } } if (hasOwn) { throw new TomlError('trying to redefine an already defined value', { toml: str, ptr: ptr }); } let [value, valueEndPtr] = extractValue(str, keyEndPtr, '}', depth - 1); seen.add(value); t[k] = value; ptr = valueEndPtr; comma = str[ptr - 1] === ',' ? ptr - 1 : 0; } } if (comma) { throw new TomlError('trailing commas are not allowed in inline tables', { toml: str, ptr: comma }); } if (!c) { throw new TomlError('unfinished table encountered', { toml: str, ptr: ptr }); } return [res, ptr]; } function parseArray(str, ptr, depth) { let res = []; let c; ptr++; while ((c = str[ptr++]) !== ']' && c) { if (c === ',') { throw new TomlError('expected value, found comma', { toml: str, ptr: ptr - 1 }); } else if (c === '#') ptr = skipComment(str, ptr); else if (c !== ' ' && c !== '\t' && c !== '\n' && c !== '\r') { let e = extractValue(str, ptr - 1, ']', depth - 1); res.push(e[0]); ptr = e[1]; } } if (!c) { throw new TomlError('unfinished array encountered', { toml: str, ptr: ptr }); } return [res, ptr]; } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/parse.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ function peekTable(key, table, meta, type) { let t = table; let m = meta; let k; let hasOwn = false; let state; for (let i = 0; i < key.length; i++) { if (i) { t = hasOwn ? t[k] : (t[k] = {}); m = (state = m[k]).c; if (type === 0 /* Type.DOTTED */ && (state.t === 1 /* Type.EXPLICIT */ || state.t === 2 /* Type.ARRAY */)) { return null; } if (state.t === 2 /* Type.ARRAY */) { let l = t.length - 1; t = t[l]; m = m[l].c; } } k = key[i]; if ((hasOwn = Object.hasOwn(t, k)) && m[k]?.t === 0 /* Type.DOTTED */ && m[k]?.d) { return null; } if (!hasOwn) { if (k === '__proto__') { Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); Object.defineProperty(m, k, { enumerable: true, configurable: true, writable: true }); } m[k] = { t: i < key.length - 1 && type === 2 /* Type.ARRAY */ ? 3 /* Type.ARRAY_DOTTED */ : type, d: false, i: 0, c: {}, }; } } state = m[k]; if (state.t !== type && !(type === 1 /* Type.EXPLICIT */ && state.t === 3 /* Type.ARRAY_DOTTED */)) { // Bad key type! return null; } if (type === 2 /* Type.ARRAY */) { if (!state.d) { state.d = true; t[k] = []; } t[k].push(t = {}); state.c[state.i++] = (state = { t: 1 /* Type.EXPLICIT */, d: false, i: 0, c: {} }); } if (state.d) { // Redefining a table! return null; } state.d = true; if (type === 1 /* Type.EXPLICIT */) { t = hasOwn ? t[k] : (t[k] = {}); } else if (type === 0 /* Type.DOTTED */ && hasOwn) { return null; } return [k, t, state.c]; } function parse(toml, opts) { let maxDepth = opts?.maxDepth ?? 1000; let res = {}; let meta = {}; let tbl = res; let m = meta; for (let ptr = skipVoid(toml, 0); ptr < toml.length;) { if (toml[ptr] === '[') { let isTableArray = toml[++ptr] === '['; let k = parseKey(toml, ptr += +isTableArray, ']'); if (isTableArray) { if (toml[k[1] - 1] !== ']') { throw new TomlError('expected end of table declaration', { toml: toml, ptr: k[1] - 1, }); } k[1]++; } let p = peekTable(k[0], res, meta, isTableArray ? 2 /* Type.ARRAY */ : 1 /* Type.EXPLICIT */); if (!p) { throw new TomlError('trying to redefine an already defined table or value', { toml: toml, ptr: ptr, }); } m = p[2]; tbl = p[1]; ptr = k[1]; } else { let k = parseKey(toml, ptr); let p = peekTable(k[0], tbl, m, 0 /* Type.DOTTED */); if (!p) { throw new TomlError('trying to redefine an already defined table or value', { toml: toml, ptr: ptr, }); } let v = extractValue(toml, k[1], void 0, maxDepth); p[1][p[0]] = v[0]; ptr = v[1]; } ptr = skipVoid(toml, ptr, true); if (toml[ptr] && toml[ptr] !== '\n' && toml[ptr] !== '\r') { throw new TomlError('each key-value declaration must be followed by an end-of-line', { toml: toml, ptr: ptr }); } ptr = skipVoid(toml, ptr); } return res; } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/stringify.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ let BARE_KEY = /^[a-z0-9-_]+$/i; function extendedTypeOf(obj) { let type = typeof obj; if (type === 'object') { if (Array.isArray(obj)) return 'array'; if (obj instanceof Date) return 'date'; } return type; } function isArrayOfTables(obj) { for (let i = 0; i < obj.length; i++) { if (extendedTypeOf(obj[i]) !== 'object') return false; } return obj.length != 0; } function formatString(s) { return JSON.stringify(s).replace(/\x7f/g, '\\u007f'); } function stringifyValue(val, type, depth) { if (depth === 0) { throw new Error("Could not stringify the object: maximum object depth exceeded"); } if (type === 'number') { if (isNaN(val)) return 'nan'; if (val === Infinity) return 'inf'; if (val === -Infinity) return '-inf'; return val.toString(); } if (type === 'bigint' || type === 'boolean') { return val.toString(); } if (type === 'string') { return formatString(val); } if (type === 'date') { if (isNaN(val.getTime())) { throw new TypeError('cannot serialize invalid date'); } return val.toISOString(); } if (type === 'object') { return stringifyInlineTable(val, depth); } if (type === 'array') { return stringifyArray(val, depth); } } function stringifyInlineTable(obj, depth) { let keys = Object.keys(obj); if (keys.length === 0) return '{}'; let res = '{ '; for (let i = 0; i < keys.length; i++) { let k = keys[i]; if (i) res += ', '; res += BARE_KEY.test(k) ? k : formatString(k); res += ' = '; res += stringifyValue(obj[k], extendedTypeOf(obj[k]), depth - 1); } return res + ' }'; } function stringifyArray(array, depth) { if (array.length === 0) return '[]'; let res = '[ '; for (let i = 0; i < array.length; i++) { if (i) res += ', '; if (array[i] === null || array[i] === void 0) { throw new TypeError('arrays cannot contain null or undefined values'); } res += stringifyValue(array[i], extendedTypeOf(array[i]), depth - 1); } return res + ' ]'; } function stringifyArrayTable(array, key, depth) { if (depth === 0) { throw new Error("Could not stringify the object: maximum object depth exceeded"); } let res = ''; for (let i = 0; i < array.length; i++) { res += `[[${key}]]\n`; res += stringifyTable(array[i], key, depth); res += '\n\n'; } return res; } function stringifyTable(obj, prefix, depth) { if (depth === 0) { throw new Error("Could not stringify the object: maximum object depth exceeded"); } let preamble = ''; let tables = ''; let keys = Object.keys(obj); for (let i = 0; i < keys.length; i++) { let k = keys[i]; if (obj[k] !== null && obj[k] !== void 0) { let type = extendedTypeOf(obj[k]); if (type === 'symbol' || type === 'function') { throw new TypeError(`cannot serialize values of type '${type}'`); } let key = BARE_KEY.test(k) ? k : formatString(k); if (type === 'array' && isArrayOfTables(obj[k])) { tables += stringifyArrayTable(obj[k], prefix ? `${prefix}.${key}` : key, depth - 1); } else if (type === 'object') { let tblKey = prefix ? `${prefix}.${key}` : key; tables += `[${tblKey}]\n`; tables += stringifyTable(obj[k], tblKey, depth - 1); tables += '\n\n'; } else { preamble += key; preamble += ' = '; preamble += stringifyValue(obj[k], type, depth); preamble += '\n'; } } } return `${preamble}\n${tables}`.trim(); } function stringify(obj, opts) { if (extendedTypeOf(obj) !== 'object') { throw new TypeError('stringify can only be called with an object'); } let maxDepth = opts?.maxDepth ?? 1000; return stringifyTable(obj, '', maxDepth); } ;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/index.js /*! * Copyright (c) Squirrel Chat et al., All rights reserved. * SPDX-License-Identifier: BSD-3-Clause * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the names of its contributors * may be used to endorse or promote products derived from this software without * specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ /* harmony default export */ const dist = ({ parse: parse, stringify: stringify, TomlDate: TomlDate, TomlError: TomlError }); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __nccwpck_require__(95236); // EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js var lib_cache = __nccwpck_require__(24318); // EXTERNAL MODULE: ./node_modules/@actions/warpbuild-cache/lib/cache.js var warpbuild_cache_lib_cache = __nccwpck_require__(22343); // EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js var cache_lib_cache = __nccwpck_require__(5116); ;// CONCATENATED MODULE: ./src/utils.ts function reportError(e) { const { commandFailed } = e; if (commandFailed) { lib_core.error(`Command failed: ${commandFailed.command}`); lib_core.error(commandFailed.stderr); } else { lib_core.error(`${e.stack}`); } } async function getCmdOutput(cmd, args = [], options = {}) { let stdout = ""; let stderr = ""; try { await exec.exec(cmd, args, { silent: true, listeners: { stdout(data) { stdout += data.toString(); }, stderr(data) { stderr += data.toString(); }, }, ...options, }); } catch (e) { e.commandFailed = { command: `${cmd} ${args.join(" ")}`, stderr, }; throw e; } return stdout; } function getCacheProvider() { const cacheProvider = lib_core.getInput("cache-provider"); let cache; switch (cacheProvider) { case "github": cache = cache_lib_cache; break; case "buildjet": cache = lib_cache; break; case "warpbuild": cache = warpbuild_cache_lib_cache; break; default: throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); } return { name: cacheProvider, cache: cache, }; } async function utils_exists(path) { try { await external_fs_default().promises.access(path); return true; } catch { return false; } } ;// CONCATENATED MODULE: ./src/workspace.ts const SAVE_TARGETS = new Set(["lib", "proc-macro"]); class Workspace { constructor(root, target) { this.root = root; this.target = target; } async getPackages(filter, ...extraArgs) { let packages = []; try { lib_core.debug(`collecting metadata for "${this.root}"`); const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { cwd: this.root, })); lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); for (const pkg of meta.packages.filter(filter)) { const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); } } catch (err) { console.error(err); } return packages; } async getPackagesOutsideWorkspaceRoot() { return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); } async getWorkspaceMembers() { return await this.getPackages((_) => true, "--no-deps"); } } ;// CONCATENATED MODULE: ./src/config.ts const HOME = external_os_default().homedir(); const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const STATE_CONFIG = "RUST_CACHE_CONFIG"; const HASH_LENGTH = 8; class CacheConfig { constructor() { /** All the paths we want to cache */ this.cachePaths = []; /** The primary cache key */ this.cacheKey = ""; /** The secondary (restore) key that only contains the prefix and environment */ this.restoreKey = ""; /** Whether to cache CARGO_HOME/.bin */ this.cacheBin = true; /** The workspace configurations */ this.workspaces = []; /** The cargo binaries present during main step */ this.cargoBins = []; /** The prefix portion of the cache key */ this.keyPrefix = ""; /** The rust version considered for the cache key */ this.keyRust = ""; /** The environment variables considered for the cache key */ this.keyEnvs = []; /** The files considered for the cache key */ this.keyFiles = []; } /** * Constructs a [`CacheConfig`] with all the paths and keys. * * This will read the action `input`s, and read and persist `state` as necessary. */ static async new() { const self = new CacheConfig(); // Construct key prefix: // This uses either the `shared-key` input, // or the `key` input combined with the `job` key. let key = lib_core.getInput("prefix-key") || "v0-rust"; const sharedKey = lib_core.getInput("shared-key"); if (sharedKey) { key += `-${sharedKey}`; } else { const inputKey = lib_core.getInput("key"); if (inputKey) { key += `-${inputKey}`; } const job = process.env.GITHUB_JOB; if (job) { key += `-${job}`; } } // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache const runnerOS = external_os_default().type(); const runnerArch = external_os_default().arch(); key += `-${runnerOS}-${runnerArch}`; self.keyPrefix = key; // Construct environment portion of the key: // This consists of a hash that considers the rust version // as well as all the environment variables as given by a default list // and the `env-vars` input. // The env vars are sorted, matched by prefix and hashed into the // resulting environment hash. let hasher = external_crypto_default().createHash("sha1"); const rustVersion = await getRustVersion(); let keyRust = `${rustVersion.release} ${rustVersion.host}`; hasher.update(keyRust); hasher.update(rustVersion["commit-hash"]); keyRust += ` (${rustVersion["commit-hash"]})`; self.keyRust = keyRust; // these prefixes should cover most of the compiler / rust / cargo keys const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); // sort the available env vars so we have a more stable hash const keyEnvs = []; const envKeys = Object.keys(process.env); envKeys.sort((a, b) => a.localeCompare(b)); for (const key of envKeys) { const value = process.env[key]; if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { hasher.update(`${key}=${value}`); keyEnvs.push(key); } } self.keyEnvs = keyEnvs; key += `-${digest(hasher)}`; self.restoreKey = key; // Construct the lockfiles portion of the key: // This considers all the files found via globbing for various manifests // and lockfiles. self.cacheBin = lib_core.getInput("cache-bin").toLowerCase() == "true"; // Constructs the workspace config and paths to restore: // The workspaces are given using a `$workspace -> $target` syntax. const workspaces = []; const workspacesInput = lib_core.getInput("workspaces") || "."; for (const workspace of workspacesInput.trim().split("\n")) { let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); root = external_path_default().resolve(root); target = external_path_default().join(root, target); workspaces.push(new Workspace(root, target)); } self.workspaces = workspaces; let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed hasher = external_crypto_default().createHash("sha1"); for (const workspace of workspaces) { const root = workspace.root; keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); const workspaceMembers = await workspace.getWorkspaceMembers(); const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml"))); for (const cargo_manifest of cargo_manifests) { try { const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" }); // Use any since TomlPrimitive is not exposed const parsed = parse(content); if ("package" in parsed) { const pack = parsed.package; if ("version" in pack) { pack["version"] = "0.0.0"; } } for (const prefix of ["", "build-", "dev-"]) { const section_name = `${prefix}dependencies`; if (!(section_name in parsed)) { continue; } const deps = parsed[section_name]; for (const key of Object.keys(deps)) { const dep = deps[key]; try { if ("path" in dep) { dep.version = "0.0.0"; dep.path = ""; } } catch (_e) { // Not an object, probably a string (version), // continue. continue; } } } hasher.update(JSON.stringify(parsed)); parsedKeyFiles.push(cargo_manifest); } catch (e) { // Fallback to caching them as regular file lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); keyFiles.push(cargo_manifest); } } const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); if (await utils_exists(cargo_lock)) { try { const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); const parsed = parse(content); if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { // Fallback to caching them as regular file since this action // can only handle Cargo.lock format version 3 lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); keyFiles.push(cargo_lock); continue; } // Package without `[[package]].source` and `[[package]].checksum` // are the one with `path = "..."` to crates within the workspace. const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); hasher.update(JSON.stringify(packages)); parsedKeyFiles.push(cargo_lock); } catch (e) { // Fallback to caching them as regular file lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); keyFiles.push(cargo_lock); } } } keyFiles = sort_and_uniq(keyFiles); for (const file of keyFiles) { for await (const chunk of external_fs_default().createReadStream(file)) { hasher.update(chunk); } } let lockHash = digest(hasher); keyFiles.push(...parsedKeyFiles); self.keyFiles = sort_and_uniq(keyFiles); key += `-${lockHash}`; self.cacheKey = key; self.cachePaths = [external_path_default().join(config_CARGO_HOME, "registry"), external_path_default().join(config_CARGO_HOME, "git")]; if (self.cacheBin) { self.cachePaths = [ external_path_default().join(config_CARGO_HOME, "bin"), external_path_default().join(config_CARGO_HOME, ".crates.toml"), external_path_default().join(config_CARGO_HOME, ".crates2.json"), ...self.cachePaths, ]; } const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true"; if (cacheTargets === "true") { self.cachePaths.push(...workspaces.map((ws) => ws.target)); } const cacheDirectories = lib_core.getInput("cache-directories"); for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { self.cachePaths.push(dir); } const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); return self; } /** * Reads and returns the cache config from the action `state`. * * @throws {Error} if the state is not present. * @returns {CacheConfig} the configuration. * @see {@link CacheConfig#saveState} * @see {@link CacheConfig#new} */ static fromState() { const source = lib_core.getState(STATE_CONFIG); if (!source) { throw new Error("Cache configuration not found in state"); } const self = new CacheConfig(); Object.assign(self, JSON.parse(source)); self.workspaces = self.workspaces.map((w) => new Workspace(w.root, w.target)); return self; } /** * Prints the configuration to the action log. */ printInfo(cacheProvider) { lib_core.startGroup("Cache Configuration"); lib_core.info(`Cache Provider:`); lib_core.info(` ${cacheProvider.name}`); lib_core.info(`Workspaces:`); for (const workspace of this.workspaces) { lib_core.info(` ${workspace.root}`); } lib_core.info(`Cache Paths:`); for (const path of this.cachePaths) { lib_core.info(` ${path}`); } lib_core.info(`Restore Key:`); lib_core.info(` ${this.restoreKey}`); lib_core.info(`Cache Key:`); lib_core.info(` ${this.cacheKey}`); lib_core.info(`.. Prefix:`); lib_core.info(` - ${this.keyPrefix}`); lib_core.info(`.. Environment considered:`); lib_core.info(` - Rust Version: ${this.keyRust}`); for (const env of this.keyEnvs) { lib_core.info(` - ${env}`); } lib_core.info(`.. Lockfiles considered:`); for (const file of this.keyFiles) { lib_core.info(` - ${file}`); } lib_core.endGroup(); } /** * Saves the configuration to the state store. * This is used to restore the configuration in the post action. */ saveState() { lib_core.saveState(STATE_CONFIG, this); } } /** * Checks if the cache is up to date. * * @returns `true` if the cache is up to date, `false` otherwise. */ function isCacheUpToDate() { return core.getState(STATE_CONFIG) === ""; } /** * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. * * @param hasher The hasher to digest. * @returns The hex digest. */ function digest(hasher) { return hasher.digest("hex").substring(0, HASH_LENGTH); } async function getRustVersion() { const stdout = await getCmdOutput("rustc", ["-vV"]); let splits = stdout .split(/[\n\r]+/) .filter(Boolean) .map((s) => s.split(":").map((s) => s.trim())) .filter((s) => s.length === 2); return Object.fromEntries(splits); } async function globFiles(pattern) { const globber = await glob.create(pattern, { followSymbolicLinks: false, }); // fs.statSync resolve the symbolic link and returns stat for the // file it pointed to, so isFile would make sure the resolved // file is actually a regular file. return (await globber.glob()).filter((file) => external_fs_default().statSync(file).isFile()); } function sort_and_uniq(a) { return a .sort((a, b) => a.localeCompare(b)) .reduce((accumulator, currentValue) => { const len = accumulator.length; // If accumulator is empty or its last element != currentValue // Since array is already sorted, elements with the same value // are grouped together to be continugous in space. // // If currentValue != last element, then it must be unique. if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { accumulator.push(currentValue); } return accumulator; }, []); } ;// CONCATENATED MODULE: ./src/cleanup.ts async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { lib_core.debug(`cleaning target directory "${targetDir}"`); // remove all *files* from the profile directory let dir = await external_fs_default().promises.opendir(targetDir); for await (const dirent of dir) { if (dirent.isDirectory()) { let dirName = external_path_default().join(dir.path, dirent.name); // is it a profile dir, or a nested target dir? let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); try { if (isNestedTarget) { await cleanTargetDir(dirName, packages, checkTimestamp); } else { await cleanProfileTarget(dirName, packages, checkTimestamp); } } catch { } } else if (dirent.name !== "CACHEDIR.TAG") { await rm(dir.path, dirent); } } } async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { lib_core.debug(`cleaning profile directory "${profileDir}"`); // Quite a few testing utility crates store compilation artifacts as nested // workspaces under `target/tests`. Notably, `target/tests/target` and // `target/tests/trybuild`. if (external_path_default().basename(profileDir) === "tests") { try { // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 cleanTargetDir(external_path_default().join(profileDir, "target"), packages, checkTimestamp); } catch { } try { // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 cleanTargetDir(external_path_default().join(profileDir, "trybuild"), packages, checkTimestamp); } catch { } // Delete everything else. await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); return; } let keepProfile = new Set(["build", ".fingerprint", "deps"]); await rmExcept(profileDir, keepProfile); const keepPkg = new Set(packages.map((p) => p.name)); await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); const keepDeps = new Set(packages.flatMap((p) => { const names = []; for (const n of [p.name, ...p.targets]) { const name = n.replace(/-/g, "_"); names.push(name, `lib${name}`); } return names; })); await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); } async function getCargoBins() { const bins = new Set(); try { const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); for (const pkg of Object.values(installs)) { for (const bin of pkg.bins) { bins.add(bin); } } } catch { } return bins; } /** * Clean the cargo bin directory, removing the binaries that existed * when the action started, as they were not created by the build. * * @param oldBins The binaries that existed when the action started. */ async function cleanBin(oldBins) { const bins = await getCargoBins(); for (const bin of oldBins) { bins.delete(bin); } const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); for await (const dirent of dir) { if (dirent.isFile() && !bins.has(dirent.name)) { await rm(dir.path, dirent); } } } async function cleanRegistry(packages, crates = true) { // remove `.cargo/credentials.toml` try { const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml"); core.debug(`deleting "${credentials}"`); await fs.promises.unlink(credentials); } catch { } // `.cargo/registry/index` let pkgSet = new Set(packages.map((p) => p.name)); const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); for await (const dirent of indexDir) { if (dirent.isDirectory()) { // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` const dirPath = path.join(indexDir.path, dirent.name); // for a git registry, we can remove `.cache`, as cargo will recreate it from git if (await exists(path.join(dirPath, ".git"))) { await rmRF(path.join(dirPath, ".cache")); } else { await cleanRegistryIndexCache(dirPath, pkgSet); } } } if (!crates) { core.debug("skipping registry cache and src cleanup"); return; } // `.cargo/registry/src` // Cargo usually re-creates these from the `.crate` cache below, // but for some reason that does not work for `-sys` crates that check timestamps // to decide if rebuilds are necessary. pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src")); for await (const dirent of srcDir) { if (dirent.isDirectory()) { // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name)); for await (const dirent of dir) { if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { await rmRF(path.join(dir.path, dirent.name)); } } } } // `.cargo/registry/cache` pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); for await (const dirent of cacheDir) { if (dirent.isDirectory()) { // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); for await (const dirent of dir) { // here we check that the downloaded `.crate` matches one from our dependencies if (dirent.isFile() && !pkgSet.has(dirent.name)) { await rm(dir.path, dirent); } } } } } /// Recursively walks and cleans the index `.cache` async function cleanRegistryIndexCache(dirName, keepPkg) { let dirIsEmpty = true; const cacheDir = await fs.promises.opendir(dirName); for await (const dirent of cacheDir) { if (dirent.isDirectory()) { if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) { await rm(dirName, dirent); } else { dirIsEmpty && (dirIsEmpty = false); } } else { if (keepPkg.has(dirent.name)) { dirIsEmpty && (dirIsEmpty = false); } else { await rm(dirName, dirent); } } } return dirIsEmpty; } async function cleanGit(packages) { const coPath = path.join(CARGO_HOME, "git", "checkouts"); const dbPath = path.join(CARGO_HOME, "git", "db"); const repos = new Map(); for (const p of packages) { if (!p.path.startsWith(coPath)) { continue; } const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); const refs = repos.get(repo); if (refs) { refs.add(ref); } else { repos.set(repo, new Set([ref])); } } // we have to keep both the clone, and the checkout, removing either will // trigger a rebuild // clean the db try { let dir = await fs.promises.opendir(dbPath); for await (const dirent of dir) { if (!repos.has(dirent.name)) { await rm(dir.path, dirent); } } } catch { } // clean the checkouts try { let dir = await fs.promises.opendir(coPath); for await (const dirent of dir) { const refs = repos.get(dirent.name); if (!refs) { await rm(dir.path, dirent); continue; } if (!dirent.isDirectory()) { continue; } const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); for await (const dirent of refsDir) { if (!refs.has(dirent.name)) { await rm(refsDir.path, dirent); } } } } catch { } } const ONE_WEEK = 7 * 24 * 3600 * 1000; /** * Removes all files or directories in `dirName` matching some criteria. * * When the `checkTimestamp` flag is set, this will also remove anything older * than one week. * * Otherwise, it will remove everything that does not match any string in the * `keepPrefix` set. * The matching strips and trailing `-$hash` suffix. */ async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { const dir = await external_fs_default().promises.opendir(dirName); for await (const dirent of dir) { if (checkTimestamp) { const fileName = external_path_default().join(dir.path, dirent.name); const { mtime } = await external_fs_default().promises.stat(fileName); const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; if (isOutdated) { await rm(dir.path, dirent); } return; } let name = dirent.name; // strip the trailing hash const idx = name.lastIndexOf("-"); if (idx !== -1) { name = name.slice(0, idx); } if (!keepPrefix.has(name)) { await rm(dir.path, dirent); } } } async function rm(parent, dirent) { try { const fileName = external_path_default().join(parent, dirent.name); lib_core.debug(`deleting "${fileName}"`); if (dirent.isFile()) { await external_fs_default().promises.unlink(fileName); } else if (dirent.isDirectory()) { await lib_io.rmRF(fileName); } } catch { } } async function rmRF(dirName) { core.debug(`deleting "${dirName}"`); await io.rmRF(dirName); } ;// CONCATENATED MODULE: ./src/restore.ts process.on("uncaughtException", (e) => { lib_core.error(e.message); if (e.stack) { lib_core.error(e.stack); } }); async function run() { const cacheProvider = getCacheProvider(); if (!cacheProvider.cache.isFeatureAvailable()) { setCacheHitOutput(false); return; } try { var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); if (cacheOnFailure !== "true") { cacheOnFailure = "false"; } var lookupOnly = lib_core.getInput("lookup-only").toLowerCase() === "true"; lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); lib_core.exportVariable("CARGO_INCREMENTAL", 0); const config = await CacheConfig.new(); config.printInfo(cacheProvider); lib_core.info(""); lib_core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); const key = config.cacheKey; // Pass a copy of cachePaths to avoid mutating the original array as reported by: // https://github.com/actions/toolkit/pull/1378 // TODO: remove this once the underlying bug is fixed. const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { lookupOnly, }); if (restoreKey) { const match = restoreKey === key; lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); if (!match) { // pre-clean the target directory on cache mismatch for (const workspace of config.workspaces) { try { await cleanTargetDir(workspace.target, [], true); } catch { } } // We restored the cache but it is not a full match. config.saveState(); } setCacheHitOutput(match); } else { lib_core.info("No cache found."); config.saveState(); setCacheHitOutput(false); } } catch (e) { setCacheHitOutput(false); reportError(e); } process.exit(); } function setCacheHitOutput(cacheHit) { lib_core.setOutput("cache-hit", cacheHit.toString()); } run(); })(); module.exports = __webpack_exports__; /******/ })() ;