diff --git a/README.md b/README.md index aca2d01..76ce5b2 100644 --- a/README.md +++ b/README.md @@ -32,6 +32,10 @@ sensible defaults. # default: "true" use-job-key: "" + # If the automatic `job`-based cache key should include a hash of the job's contents. + # default: "true" + add-job-hash: "" + # A whitespace separated list of env-var *prefixes* who's value contributes # to the environment cache key. # The env-vars are matched by *prefix*, so the default `RUST` var will diff --git a/action.yml b/action.yml index fac7cd1..5960a0e 100644 --- a/action.yml +++ b/action.yml @@ -16,6 +16,10 @@ inputs: description: "If the automatic `job`-based cache key should be used for the cache name. Defaults to true." required: false default: "true" + add-job-hash: + description: "If the automatic `job`-based cache key should include a hash of the job's contents. Defaults to false." + required: false + default: "true" env-vars: description: "Additional environment variables to include in the cache key, separated by spaces." required: false diff --git a/dist/restore/index.js b/dist/restore/index.js index 3ce4e9f..60f123c 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -85015,6 +85015,1039 @@ class Queue { module.exports = Queue; +/***/ }), + +/***/ 55541: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cleanTargetDir = cleanTargetDir; +exports.getCargoBins = getCargoBins; +exports.cleanBin = cleanBin; +exports.cleanRegistry = cleanRegistry; +exports.cleanGit = cleanGit; +const core = __importStar(__nccwpck_require__(37484)); +const io = __importStar(__nccwpck_require__(94994)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const config_1 = __nccwpck_require__(1283); +const utils_1 = __nccwpck_require__(95804); +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await fs_1.default.promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = path_1.default.join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await (0, utils_1.exists)(path_1.default.join(dirName, "CACHEDIR.TAG"))) || (await (0, utils_1.exists)(path_1.default.join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + core.debug(`cleaning profile directory "${profileDir}"`); + // Quite a few testing utility crates store compilation artifacts as nested + // workspaces under `target/tests`. Notably, `target/tests/target` and + // `target/tests/trybuild`. + if (path_1.default.basename(profileDir) === "tests") { + try { + // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 + // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 + cleanTargetDir(path_1.default.join(profileDir, "target"), packages, checkTimestamp); + } + catch { } + try { + // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 + cleanTargetDir(path_1.default.join(profileDir, "trybuild"), packages, checkTimestamp); + } + catch { } + // Delete everything else. + await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); + return; + } + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(path_1.default.join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(path_1.default.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(path_1.default.join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await fs_1.default.promises.readFile(path_1.default.join(config_1.CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +/** + * Clean the cargo bin directory, removing the binaries that existed + * when the action started, as they were not created by the build. + * + * @param oldBins The binaries that existed when the action started. + */ +async function cleanBin(oldBins) { + const bins = await getCargoBins(); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages, crates = true) { + // remove `.cargo/credentials.toml` + try { + const credentials = path_1.default.join(config_1.CARGO_HOME, ".cargo", "credentials.toml"); + core.debug(`deleting "${credentials}"`); + await fs_1.default.promises.unlink(credentials); + } + catch { } + // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); + const indexDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = path_1.default.join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await (0, utils_1.exists)(path_1.default.join(dirPath, ".git"))) { + await rmRF(path_1.default.join(dirPath, ".cache")); + } + else { + await cleanRegistryIndexCache(dirPath, pkgSet); + } + } + } + if (!crates) { + core.debug("skipping registry cache and src cleanup"); + return; + } + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(path_1.default.join(dir.path, dirent.name)); + } + } + } + } + // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + const cacheDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName, keepPkg) { + let dirIsEmpty = true; + const cacheDir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(path_1.default.join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } + else { + dirIsEmpty && (dirIsEmpty = false); + } + } + else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty && (dirIsEmpty = false); + } + else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} +async function cleanGit(packages) { + const coPath = path_1.default.join(config_1.CARGO_HOME, "git", "checkouts"); + const dbPath = path_1.default.join(config_1.CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split(path_1.default.sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await fs_1.default.promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await fs_1.default.promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await fs_1.default.promises.opendir(path_1.default.join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName` matching some criteria. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + * + * Otherwise, it will remove everything that does not match any string in the + * `keepPrefix` set. + * The matching strips and trailing `-$hash` suffix. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of dir) { + if (checkTimestamp) { + const fileName = path_1.default.join(dir.path, dirent.name); + const { mtime } = await fs_1.default.promises.stat(fileName); + const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + if (isOutdated) { + await rm(dir.path, dirent); + } + return; + } + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + if (!keepPrefix.has(name)) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = path_1.default.join(parent, dirent.name); + core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await fs_1.default.promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} + + +/***/ }), + +/***/ 1283: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CacheConfig = exports.CARGO_HOME = void 0; +exports.isCacheUpToDate = isCacheUpToDate; +const core = __importStar(__nccwpck_require__(37484)); +const glob = __importStar(__nccwpck_require__(47206)); +const crypto_1 = __importDefault(__nccwpck_require__(76982)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +const promises_1 = __importDefault(__nccwpck_require__(91943)); +const os_1 = __importDefault(__nccwpck_require__(70857)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const toml = __importStar(__nccwpck_require__(27106)); +const cleanup_1 = __nccwpck_require__(55541); +const utils_1 = __nccwpck_require__(95804); +const workspace_1 = __nccwpck_require__(87896); +const HOME = os_1.default.homedir(); +exports.CARGO_HOME = process.env.CARGO_HOME || path_1.default.join(HOME, ".cargo"); +const STATE_CONFIG = "RUST_CACHE_CONFIG"; +const HASH_LENGTH = 8; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** Whether to cache CARGO_HOME/.bin */ + this.cacheBin = true; + /** The workspace configurations */ + this.workspaces = []; + /** The cargo binaries present during main step */ + this.cargoBins = []; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = core.getInput("prefix-key") || "v0-rust"; + const sharedKey = core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if ((job) && core.getInput("use-job-key").toLowerCase() == "true") { + key += `-${job}`; + } + } + // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache + const runnerOS = os_1.default.type(); + const runnerArch = os_1.default.arch(); + key += `-${runnerOS}-${runnerArch}`; + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = crypto_1.default.createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + // Add job hash suffix if 'add-job-hash' is true + if (core.getInput("add-job-hash").toLowerCase() == "true") { + key += `-${digest(hasher)}`; + } + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = path_1.default.resolve(root); + target = path_1.default.join(root, target); + workspaces.push(new workspace_1.Workspace(root, target)); + } + self.workspaces = workspaces; + let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); + const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed + hasher = crypto_1.default.createHash("sha1"); + for (const workspace of workspaces) { + const root = workspace.root; + keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); + const workspaceMembers = await workspace.getWorkspaceMembers(); + const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path_1.default.join(member.path, "Cargo.toml"))); + for (const cargo_manifest of cargo_manifests) { + try { + const content = await promises_1.default.readFile(cargo_manifest, { encoding: "utf8" }); + // Use any since TomlPrimitive is not exposed + const parsed = toml.parse(content); + if ("package" in parsed) { + const pack = parsed.package; + if ("version" in pack) { + pack["version"] = "0.0.0"; + } + } + for (const prefix of ["", "build-", "dev-"]) { + const section_name = `${prefix}dependencies`; + if (!(section_name in parsed)) { + continue; + } + const deps = parsed[section_name]; + for (const key of Object.keys(deps)) { + const dep = deps[key]; + try { + if ("path" in dep) { + dep.version = "0.0.0"; + dep.path = ""; + } + } + catch (_e) { + // Not an object, probably a string (version), + // continue. + continue; + } + } + } + hasher.update(JSON.stringify(parsed)); + parsedKeyFiles.push(cargo_manifest); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_manifest); + } + } + const cargo_lock = path_1.default.join(workspace.root, "Cargo.lock"); + if (await (0, utils_1.exists)(cargo_lock)) { + try { + const content = await promises_1.default.readFile(cargo_lock, { encoding: "utf8" }); + const parsed = toml.parse(content); + if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { + // Fallback to caching them as regular file since this action + // can only handle Cargo.lock format version 3 + core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); + keyFiles.push(cargo_lock); + continue; + } + // Package without `[[package]].source` and `[[package]].checksum` + // are the one with `path = "..."` to crates within the workspace. + const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); + hasher.update(JSON.stringify(packages)); + parsedKeyFiles.push(cargo_lock); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_lock); + } + } + } + keyFiles = sort_and_uniq(keyFiles); + for (const file of keyFiles) { + for await (const chunk of fs_1.default.createReadStream(file)) { + hasher.update(chunk); + } + } + let lockHash = digest(hasher); + keyFiles.push(...parsedKeyFiles); + self.keyFiles = sort_and_uniq(keyFiles); + key += `-${lockHash}`; + self.cacheKey = key; + self.cachePaths = [path_1.default.join(exports.CARGO_HOME, "registry"), path_1.default.join(exports.CARGO_HOME, "git")]; + if (self.cacheBin) { + self.cachePaths = [ + path_1.default.join(exports.CARGO_HOME, "bin"), + path_1.default.join(exports.CARGO_HOME, ".crates.toml"), + path_1.default.join(exports.CARGO_HOME, ".crates2.json"), + ...self.cachePaths, + ]; + } + const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; + if (cacheTargets === "true") { + self.cachePaths.push(...workspaces.map((ws) => ws.target)); + } + const cacheDirectories = core.getInput("cache-directories"); + for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { + self.cachePaths.push(dir); + } + const bins = await (0, cleanup_1.getCargoBins)(); + self.cargoBins = Array.from(bins.values()); + return self; + } + /** + * Reads and returns the cache config from the action `state`. + * + * @throws {Error} if the state is not present. + * @returns {CacheConfig} the configuration. + * @see {@link CacheConfig#saveState} + * @see {@link CacheConfig#new} + */ + static fromState() { + const source = core.getState(STATE_CONFIG); + if (!source) { + throw new Error("Cache configuration not found in state"); + } + const self = new CacheConfig(); + Object.assign(self, JSON.parse(source)); + self.workspaces = self.workspaces.map((w) => new workspace_1.Workspace(w.root, w.target)); + return self; + } + /** + * Prints the configuration to the action log. + */ + printInfo(cacheProvider) { + core.startGroup("Cache Configuration"); + core.info(`Cache Provider:`); + core.info(` ${cacheProvider.name}`); + core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + core.info(` ${workspace.root}`); + } + core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + core.info(` ${path}`); + } + core.info(`Restore Key:`); + core.info(` ${this.restoreKey}`); + core.info(`Cache Key:`); + core.info(` ${this.cacheKey}`); + core.info(`.. Prefix:`); + core.info(` - ${this.keyPrefix}`); + core.info(`.. Environment considered:`); + core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + core.info(` - ${env}`); + } + core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + core.info(` - ${file}`); + } + core.endGroup(); + } + /** + * Saves the configuration to the state store. + * This is used to restore the configuration in the post action. + */ + saveState() { + core.saveState(STATE_CONFIG, this); + } +} +exports.CacheConfig = CacheConfig; +/** + * Checks if the cache is up to date. + * + * @returns `true` if the cache is up to date, `false` otherwise. + */ +function isCacheUpToDate() { + return core.getState(STATE_CONFIG) === ""; +} +/** + * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. + * + * @param hasher The hasher to digest. + * @returns The hex digest. + */ +function digest(hasher) { + return hasher.digest("hex").substring(0, HASH_LENGTH); +} +async function getRustVersion() { + const stdout = await (0, utils_1.getCmdOutput)("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} +async function globFiles(pattern) { + const globber = await glob.create(pattern, { + followSymbolicLinks: false, + }); + // fs.statSync resolve the symbolic link and returns stat for the + // file it pointed to, so isFile would make sure the resolved + // file is actually a regular file. + return (await globber.glob()).filter((file) => fs_1.default.statSync(file).isFile()); +} +function sort_and_uniq(a) { + return a + .sort((a, b) => a.localeCompare(b)) + .reduce((accumulator, currentValue) => { + const len = accumulator.length; + // If accumulator is empty or its last element != currentValue + // Since array is already sorted, elements with the same value + // are grouped together to be continugous in space. + // + // If currentValue != last element, then it must be unique. + if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { + accumulator.push(currentValue); + } + return accumulator; + }, []); +} + + +/***/ }), + +/***/ 95804: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.reportError = reportError; +exports.getCmdOutput = getCmdOutput; +exports.getCacheProvider = getCacheProvider; +exports.exists = exists; +const core = __importStar(__nccwpck_require__(37484)); +const exec = __importStar(__nccwpck_require__(95236)); +const buildjetCache = __importStar(__nccwpck_require__(24318)); +const warpbuildCache = __importStar(__nccwpck_require__(22343)); +const ghCache = __importStar(__nccwpck_require__(5116)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +function reportError(e) { + const { commandFailed } = e; + if (commandFailed) { + core.error(`Command failed: ${commandFailed.command}`); + core.error(commandFailed.stderr); + } + else { + core.error(`${e.stack}`); + } +} +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + e.commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; + throw e; + } + return stdout; +} +function getCacheProvider() { + const cacheProvider = core.getInput("cache-provider"); + let cache; + switch (cacheProvider) { + case "github": + cache = ghCache; + break; + case "buildjet": + cache = buildjetCache; + break; + case "warpbuild": + cache = warpbuildCache; + break; + default: + throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); + } + return { + name: cacheProvider, + cache: cache, + }; +} +async function exists(path) { + try { + await fs_1.default.promises.access(path); + return true; + } + catch { + return false; + } +} + + +/***/ }), + +/***/ 87896: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Workspace = void 0; +const core = __importStar(__nccwpck_require__(37484)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const utils_1 = __nccwpck_require__(95804); +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages(filter, ...extraArgs) { + let packages = []; + try { + core.debug(`collecting metadata for "${this.root}"`); + const meta = JSON.parse(await (0, utils_1.getCmdOutput)("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { + cwd: this.root, + env: { "CARGO_ENCODED_RUSTFLAGS": "" }, + })); + core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); + for (const pkg of meta.packages.filter(filter)) { + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: path_1.default.dirname(pkg.manifest_path) }); + } + } + catch (err) { + console.error(err); + } + return packages; + } + async getPackagesOutsideWorkspaceRoot() { + return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); + } + async getWorkspaceMembers() { + return await this.getPackages((_) => true, "--no-deps"); + } +} +exports.Workspace = Workspace; + + +/***/ }), + +/***/ 473: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(37484)); +const cleanup_1 = __nccwpck_require__(55541); +const config_1 = __nccwpck_require__(1283); +const utils_1 = __nccwpck_require__(95804); +process.on("uncaughtException", (e) => { + core.error(e.message); + if (e.stack) { + core.error(e.stack); + } +}); +async function run() { + const cacheProvider = (0, utils_1.getCacheProvider)(); + if (!cacheProvider.cache.isFeatureAvailable()) { + setCacheHitOutput(false); + return; + } + try { + var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase(); + if (cacheOnFailure !== "true") { + cacheOnFailure = "false"; + } + var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true"; + core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); + core.exportVariable("CARGO_INCREMENTAL", 0); + const config = await config_1.CacheConfig.new(); + config.printInfo(cacheProvider); + core.info(""); + core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); + const key = config.cacheKey; + // Pass a copy of cachePaths to avoid mutating the original array as reported by: + // https://github.com/actions/toolkit/pull/1378 + // TODO: remove this once the underlying bug is fixed. + const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { + lookupOnly, + }); + if (restoreKey) { + const match = restoreKey === key; + core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); + if (!match) { + // pre-clean the target directory on cache mismatch + for (const workspace of config.workspaces) { + try { + await (0, cleanup_1.cleanTargetDir)(workspace.target, [], true); + } + catch { } + } + // We restored the cache but it is not a full match. + config.saveState(); + } + setCacheHitOutput(match); + } + else { + core.info("No cache found."); + config.saveState(); + setCacheHitOutput(false); + } + } + catch (e) { + setCacheHitOutput(false); + (0, utils_1.reportError)(e); + } + process.exit(); +} +function setCacheHitOutput(cacheHit) { + core.setOutput("cache-hit", cacheHit.toString()); +} +run(); + + /***/ }), /***/ 42078: @@ -147326,6 +148359,915 @@ module.exports = axios; (()=>{"use strict";var t={d:(e,n)=>{for(var i in n)t.o(n,i)&&!t.o(e,i)&&Object.defineProperty(e,i,{enumerable:!0,get:n[i]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{XMLBuilder:()=>ft,XMLParser:()=>st,XMLValidator:()=>mt});const n=":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD",i=new RegExp("^["+n+"]["+n+"\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$");function s(t,e){const n=[];let i=e.exec(t);for(;i;){const s=[];s.startIndex=e.lastIndex-i[0].length;const r=i.length;for(let t=0;t"!==t[o]&&" "!==t[o]&&"\t"!==t[o]&&"\n"!==t[o]&&"\r"!==t[o];o++)f+=t[o];if(f=f.trim(),"/"===f[f.length-1]&&(f=f.substring(0,f.length-1),o--),!r(f)){let e;return e=0===f.trim().length?"Invalid space after '<'.":"Tag '"+f+"' is an invalid name.",x("InvalidTag",e,N(t,o))}const p=c(t,o);if(!1===p)return x("InvalidAttr","Attributes for '"+f+"' have open quote.",N(t,o));let b=p.value;if(o=p.index,"/"===b[b.length-1]){const n=o-b.length;b=b.substring(0,b.length-1);const s=g(b,e);if(!0!==s)return x(s.err.code,s.err.msg,N(t,n+s.err.line));i=!0}else if(d){if(!p.tagClosed)return x("InvalidTag","Closing tag '"+f+"' doesn't have proper closing.",N(t,o));if(b.trim().length>0)return x("InvalidTag","Closing tag '"+f+"' can't have attributes or invalid starting.",N(t,a));if(0===n.length)return x("InvalidTag","Closing tag '"+f+"' has not been opened.",N(t,a));{const e=n.pop();if(f!==e.tagName){let n=N(t,e.tagStartPos);return x("InvalidTag","Expected closing tag '"+e.tagName+"' (opened in line "+n.line+", col "+n.col+") instead of closing tag '"+f+"'.",N(t,a))}0==n.length&&(s=!0)}}else{const r=g(b,e);if(!0!==r)return x(r.err.code,r.err.msg,N(t,o-b.length+r.err.line));if(!0===s)return x("InvalidXml","Multiple possible root nodes found.",N(t,o));-1!==e.unpairedTags.indexOf(f)||n.push({tagName:f,tagStartPos:a}),i=!0}for(o++;o0)||x("InvalidXml","Invalid '"+JSON.stringify(n.map((t=>t.tagName)),null,4).replace(/\r?\n/g,"")+"' found.",{line:1,col:1}):x("InvalidXml","Start tag expected.",1)}function l(t){return" "===t||"\t"===t||"\n"===t||"\r"===t}function u(t,e){const n=e;for(;e5&&"xml"===i)return x("InvalidXml","XML declaration allowed only at the start of the document.",N(t,e));if("?"==t[e]&&">"==t[e+1]){e++;break}}return e}function h(t,e){if(t.length>e+5&&"-"===t[e+1]&&"-"===t[e+2]){for(e+=3;e"===t[e+2]){e+=2;break}}else if(t.length>e+8&&"D"===t[e+1]&&"O"===t[e+2]&&"C"===t[e+3]&&"T"===t[e+4]&&"Y"===t[e+5]&&"P"===t[e+6]&&"E"===t[e+7]){let n=1;for(e+=8;e"===t[e]&&(n--,0===n))break}else if(t.length>e+9&&"["===t[e+1]&&"C"===t[e+2]&&"D"===t[e+3]&&"A"===t[e+4]&&"T"===t[e+5]&&"A"===t[e+6]&&"["===t[e+7])for(e+=8;e"===t[e+2]){e+=2;break}return e}const d='"',f="'";function c(t,e){let n="",i="",s=!1;for(;e"===t[e]&&""===i){s=!0;break}n+=t[e]}return""===i&&{value:n,index:e,tagClosed:s}}const p=new RegExp("(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['\"])(([\\s\\S])*?)\\5)?","g");function g(t,e){const n=s(t,p),i={};for(let t=0;t!1,commentPropName:!1,unpairedTags:[],processEntities:!0,htmlEntities:!1,ignoreDeclaration:!1,ignorePiTags:!1,transformTagName:!1,transformAttributeName:!1,updateTag:function(t,e,n){return t},captureMetaData:!1};let y;y="function"!=typeof Symbol?"@@xmlMetadata":Symbol("XML Node Metadata");class T{constructor(t){this.tagname=t,this.child=[],this[":@"]={}}add(t,e){"__proto__"===t&&(t="#__proto__"),this.child.push({[t]:e})}addChild(t,e){"__proto__"===t.tagname&&(t.tagname="#__proto__"),t[":@"]&&Object.keys(t[":@"]).length>0?this.child.push({[t.tagname]:t.child,":@":t[":@"]}):this.child.push({[t.tagname]:t.child}),void 0!==e&&(this.child[this.child.length-1][y]={startIndex:e})}static getMetaDataSymbol(){return y}}function w(t,e){const n={};if("O"!==t[e+3]||"C"!==t[e+4]||"T"!==t[e+5]||"Y"!==t[e+6]||"P"!==t[e+7]||"E"!==t[e+8])throw new Error("Invalid Tag instead of DOCTYPE");{e+=9;let i=1,s=!1,r=!1,o="";for(;e"===t[e]){if(r?"-"===t[e-1]&&"-"===t[e-2]&&(r=!1,i--):i--,0===i)break}else"["===t[e]?s=!0:o+=t[e];else{if(s&&C(t,"!ENTITY",e)){let i,s;e+=7,[i,s,e]=O(t,e+1),-1===s.indexOf("&")&&(n[i]={regx:RegExp(`&${i};`,"g"),val:s})}else if(s&&C(t,"!ELEMENT",e)){e+=8;const{index:n}=S(t,e+1);e=n}else if(s&&C(t,"!ATTLIST",e))e+=8;else if(s&&C(t,"!NOTATION",e)){e+=9;const{index:n}=A(t,e+1);e=n}else{if(!C(t,"!--",e))throw new Error("Invalid DOCTYPE");r=!0}i++,o=""}if(0!==i)throw new Error("Unclosed DOCTYPE")}return{entities:n,i:e}}const P=(t,e)=>{for(;e{for(const n of t){if("string"==typeof n&&e===n)return!0;if(n instanceof RegExp&&n.test(e))return!0}}:()=>!1}class k{constructor(t){this.options=t,this.currentNode=null,this.tagsNodeStack=[],this.docTypeEntities={},this.lastEntities={apos:{regex:/&(apos|#39|#x27);/g,val:"'"},gt:{regex:/&(gt|#62|#x3E);/g,val:">"},lt:{regex:/&(lt|#60|#x3C);/g,val:"<"},quot:{regex:/&(quot|#34|#x22);/g,val:'"'}},this.ampEntity={regex:/&(amp|#38|#x26);/g,val:"&"},this.htmlEntities={space:{regex:/&(nbsp|#160);/g,val:" "},cent:{regex:/&(cent|#162);/g,val:"¢"},pound:{regex:/&(pound|#163);/g,val:"£"},yen:{regex:/&(yen|#165);/g,val:"¥"},euro:{regex:/&(euro|#8364);/g,val:"€"},copyright:{regex:/&(copy|#169);/g,val:"©"},reg:{regex:/&(reg|#174);/g,val:"®"},inr:{regex:/&(inr|#8377);/g,val:"₹"},num_dec:{regex:/&#([0-9]{1,7});/g,val:(t,e)=>String.fromCodePoint(Number.parseInt(e,10))},num_hex:{regex:/&#x([0-9a-fA-F]{1,6});/g,val:(t,e)=>String.fromCodePoint(Number.parseInt(e,16))}},this.addExternalEntities=F,this.parseXml=X,this.parseTextData=L,this.resolveNameSpace=B,this.buildAttributesMap=G,this.isItStopNode=Z,this.replaceEntitiesValue=R,this.readStopNodeData=J,this.saveTextToParentTag=q,this.addChild=Y,this.ignoreAttributesFn=_(this.options.ignoreAttributes)}}function F(t){const e=Object.keys(t);for(let n=0;n0)){o||(t=this.replaceEntitiesValue(t));const i=this.options.tagValueProcessor(e,t,n,s,r);return null==i?t:typeof i!=typeof t||i!==t?i:this.options.trimValues||t.trim()===t?H(t,this.options.parseTagValue,this.options.numberParseOptions):t}}function B(t){if(this.options.removeNSPrefix){const e=t.split(":"),n="/"===t.charAt(0)?"/":"";if("xmlns"===e[0])return"";2===e.length&&(t=n+e[1])}return t}const U=new RegExp("([^\\s=]+)\\s*(=\\s*(['\"])([\\s\\S]*?)\\3)?","gm");function G(t,e,n){if(!0!==this.options.ignoreAttributes&&"string"==typeof t){const n=s(t,U),i=n.length,r={};for(let t=0;t",r,"Closing Tag is not closed.");let o=t.substring(r+2,e).trim();if(this.options.removeNSPrefix){const t=o.indexOf(":");-1!==t&&(o=o.substr(t+1))}this.options.transformTagName&&(o=this.options.transformTagName(o)),n&&(i=this.saveTextToParentTag(i,n,s));const a=s.substring(s.lastIndexOf(".")+1);if(o&&-1!==this.options.unpairedTags.indexOf(o))throw new Error(`Unpaired tag can not be used as closing tag: `);let l=0;a&&-1!==this.options.unpairedTags.indexOf(a)?(l=s.lastIndexOf(".",s.lastIndexOf(".")-1),this.tagsNodeStack.pop()):l=s.lastIndexOf("."),s=s.substring(0,l),n=this.tagsNodeStack.pop(),i="",r=e}else if("?"===t[r+1]){let e=z(t,r,!1,"?>");if(!e)throw new Error("Pi Tag is not closed.");if(i=this.saveTextToParentTag(i,n,s),this.options.ignoreDeclaration&&"?xml"===e.tagName||this.options.ignorePiTags);else{const t=new T(e.tagName);t.add(this.options.textNodeName,""),e.tagName!==e.tagExp&&e.attrExpPresent&&(t[":@"]=this.buildAttributesMap(e.tagExp,s,e.tagName)),this.addChild(n,t,s,r)}r=e.closeIndex+1}else if("!--"===t.substr(r+1,3)){const e=W(t,"--\x3e",r+4,"Comment is not closed.");if(this.options.commentPropName){const o=t.substring(r+4,e-2);i=this.saveTextToParentTag(i,n,s),n.add(this.options.commentPropName,[{[this.options.textNodeName]:o}])}r=e}else if("!D"===t.substr(r+1,2)){const e=w(t,r);this.docTypeEntities=e.entities,r=e.i}else if("!["===t.substr(r+1,2)){const e=W(t,"]]>",r,"CDATA is not closed.")-2,o=t.substring(r+9,e);i=this.saveTextToParentTag(i,n,s);let a=this.parseTextData(o,n.tagname,s,!0,!1,!0,!0);null==a&&(a=""),this.options.cdataPropName?n.add(this.options.cdataPropName,[{[this.options.textNodeName]:o}]):n.add(this.options.textNodeName,a),r=e+2}else{let o=z(t,r,this.options.removeNSPrefix),a=o.tagName;const l=o.rawTagName;let u=o.tagExp,h=o.attrExpPresent,d=o.closeIndex;this.options.transformTagName&&(a=this.options.transformTagName(a)),n&&i&&"!xml"!==n.tagname&&(i=this.saveTextToParentTag(i,n,s,!1));const f=n;f&&-1!==this.options.unpairedTags.indexOf(f.tagname)&&(n=this.tagsNodeStack.pop(),s=s.substring(0,s.lastIndexOf("."))),a!==e.tagname&&(s+=s?"."+a:a);const c=r;if(this.isItStopNode(this.options.stopNodes,s,a)){let e="";if(u.length>0&&u.lastIndexOf("/")===u.length-1)"/"===a[a.length-1]?(a=a.substr(0,a.length-1),s=s.substr(0,s.length-1),u=a):u=u.substr(0,u.length-1),r=o.closeIndex;else if(-1!==this.options.unpairedTags.indexOf(a))r=o.closeIndex;else{const n=this.readStopNodeData(t,l,d+1);if(!n)throw new Error(`Unexpected end of ${l}`);r=n.i,e=n.tagContent}const i=new T(a);a!==u&&h&&(i[":@"]=this.buildAttributesMap(u,s,a)),e&&(e=this.parseTextData(e,a,s,!0,h,!0,!0)),s=s.substr(0,s.lastIndexOf(".")),i.add(this.options.textNodeName,e),this.addChild(n,i,s,c)}else{if(u.length>0&&u.lastIndexOf("/")===u.length-1){"/"===a[a.length-1]?(a=a.substr(0,a.length-1),s=s.substr(0,s.length-1),u=a):u=u.substr(0,u.length-1),this.options.transformTagName&&(a=this.options.transformTagName(a));const t=new T(a);a!==u&&h&&(t[":@"]=this.buildAttributesMap(u,s,a)),this.addChild(n,t,s,c),s=s.substr(0,s.lastIndexOf("."))}else{const t=new T(a);this.tagsNodeStack.push(n),a!==u&&h&&(t[":@"]=this.buildAttributesMap(u,s,a)),this.addChild(n,t,s,c),n=t}i="",r=d}}else i+=t[r];return e.child};function Y(t,e,n,i){this.options.captureMetaData||(i=void 0);const s=this.options.updateTag(e.tagname,n,e[":@"]);!1===s||("string"==typeof s?(e.tagname=s,t.addChild(e,i)):t.addChild(e,i))}const R=function(t){if(this.options.processEntities){for(let e in this.docTypeEntities){const n=this.docTypeEntities[e];t=t.replace(n.regx,n.val)}for(let e in this.lastEntities){const n=this.lastEntities[e];t=t.replace(n.regex,n.val)}if(this.options.htmlEntities)for(let e in this.htmlEntities){const n=this.htmlEntities[e];t=t.replace(n.regex,n.val)}t=t.replace(this.ampEntity.regex,this.ampEntity.val)}return t};function q(t,e,n,i){return t&&(void 0===i&&(i=0===e.child.length),void 0!==(t=this.parseTextData(t,e.tagname,n,!1,!!e[":@"]&&0!==Object.keys(e[":@"]).length,i))&&""!==t&&e.add(this.options.textNodeName,t),t=""),t}function Z(t,e,n){const i="*."+n;for(const n in t){const s=t[n];if(i===s||e===s)return!0}return!1}function W(t,e,n,i){const s=t.indexOf(e,n);if(-1===s)throw new Error(i);return s+e.length-1}function z(t,e,n,i=">"){const s=function(t,e,n=">"){let i,s="";for(let r=e;r",n,`${e} is not closed`);if(t.substring(n+2,r).trim()===e&&(s--,0===s))return{tagContent:t.substring(i,n),i:r};n=r}else if("?"===t[n+1])n=W(t,"?>",n+1,"StopNode is not closed.");else if("!--"===t.substr(n+1,3))n=W(t,"--\x3e",n+3,"StopNode is not closed.");else if("!["===t.substr(n+1,2))n=W(t,"]]>",n,"StopNode is not closed.")-2;else{const i=z(t,n,">");i&&((i&&i.tagName)===e&&"/"!==i.tagExp[i.tagExp.length-1]&&s++,n=i.closeIndex)}}function H(t,e,n){if(e&&"string"==typeof t){const e=t.trim();return"true"===e||"false"!==e&&function(t,e={}){if(e=Object.assign({},V,e),!t||"string"!=typeof t)return t;let n=t.trim();if(void 0!==e.skipLike&&e.skipLike.test(n))return t;if("0"===t)return 0;if(e.hex&&j.test(n))return function(t){if(parseInt)return parseInt(t,16);if(Number.parseInt)return Number.parseInt(t,16);if(window&&window.parseInt)return window.parseInt(t,16);throw new Error("parseInt, Number.parseInt, window.parseInt are not supported")}(n);if(-1!==n.search(/.+[eE].+/))return function(t,e,n){if(!n.eNotation)return t;const i=e.match(M);if(i){let s=i[1]||"";const r=-1===i[3].indexOf("e")?"E":"e",o=i[2],a=s?t[o.length+1]===r:t[o.length]===r;return o.length>1&&a?t:1!==o.length||!i[3].startsWith(`.${r}`)&&i[3][0]!==r?n.leadingZeros&&!a?(e=(i[1]||"")+i[3],Number(e)):t:Number(e)}return t}(t,n,e);{const s=D.exec(n);if(s){const r=s[1]||"",o=s[2];let a=(i=s[3])&&-1!==i.indexOf(".")?("."===(i=i.replace(/0+$/,""))?i="0":"."===i[0]?i="0"+i:"."===i[i.length-1]&&(i=i.substring(0,i.length-1)),i):i;const l=r?"."===t[o.length+1]:"."===t[o.length];if(!e.leadingZeros&&(o.length>1||1===o.length&&!l))return t;{const i=Number(n),s=String(i);if(0===i||-0===i)return i;if(-1!==s.search(/[eE]/))return e.eNotation?i:t;if(-1!==n.indexOf("."))return"0"===s||s===a||s===`${r}${a}`?i:t;let l=o?a:n;return o?l===s||r+l===s?i:t:l===s||l===r+s?i:t}}return t}var i}(t,n)}return void 0!==t?t:""}const K=T.getMetaDataSymbol();function Q(t,e){return tt(t,e)}function tt(t,e,n){let i;const s={};for(let r=0;r0&&(s[e.textNodeName]=i):void 0!==i&&(s[e.textNodeName]=i),s}function et(t){const e=Object.keys(t);for(let t=0;t0&&(n="\n"),ot(t,e,"",n)}function ot(t,e,n,i){let s="",r=!1;for(let o=0;o`,r=!1;continue}if(l===e.commentPropName){s+=i+`\x3c!--${a[l][0][e.textNodeName]}--\x3e`,r=!0;continue}if("?"===l[0]){const t=lt(a[":@"],e),n="?xml"===l?"":i;let o=a[l][0][e.textNodeName];o=0!==o.length?" "+o:"",s+=n+`<${l}${o}${t}?>`,r=!0;continue}let h=i;""!==h&&(h+=e.indentBy);const d=i+`<${l}${lt(a[":@"],e)}`,f=ot(a[l],e,u,h);-1!==e.unpairedTags.indexOf(l)?e.suppressUnpairedNode?s+=d+">":s+=d+"/>":f&&0!==f.length||!e.suppressEmptyNode?f&&f.endsWith(">")?s+=d+`>${f}${i}`:(s+=d+">",f&&""!==i&&(f.includes("/>")||f.includes("`):s+=d+"/>",r=!0}return s}function at(t){const e=Object.keys(t);for(let n=0;n0&&e.processEntities)for(let n=0;n","g"),val:">"},{regex:new RegExp("<","g"),val:"<"},{regex:new RegExp("'","g"),val:"'"},{regex:new RegExp('"',"g"),val:"""}],processEntities:!0,stopNodes:[],oneListGroup:!1};function ft(t){this.options=Object.assign({},dt,t),!0===this.options.ignoreAttributes||this.options.attributesGroupName?this.isAttribute=function(){return!1}:(this.ignoreAttributesFn=_(this.options.ignoreAttributes),this.attrPrefixLen=this.options.attributeNamePrefix.length,this.isAttribute=gt),this.processTextOrObjNode=ct,this.options.format?(this.indentate=pt,this.tagEndChar=">\n",this.newLine="\n"):(this.indentate=function(){return""},this.tagEndChar=">",this.newLine="")}function ct(t,e,n,i){const s=this.j2x(t,n+1,i.concat(e));return void 0!==t[this.options.textNodeName]&&1===Object.keys(t).length?this.buildTextValNode(t[this.options.textNodeName],e,s.attrStr,n):this.buildObjectNode(s.val,e,s.attrStr,n)}function pt(t){return this.options.indentBy.repeat(t)}function gt(t){return!(!t.startsWith(this.options.attributeNamePrefix)||t===this.options.textNodeName)&&t.substr(this.attrPrefixLen)}ft.prototype.build=function(t){return this.options.preserveOrder?rt(t,this.options):(Array.isArray(t)&&this.options.arrayNodeName&&this.options.arrayNodeName.length>1&&(t={[this.options.arrayNodeName]:t}),this.j2x(t,0,[]).val)},ft.prototype.j2x=function(t,e,n){let i="",s="";const r=n.join(".");for(let o in t)if(Object.prototype.hasOwnProperty.call(t,o))if(void 0===t[o])this.isAttribute(o)&&(s+="");else if(null===t[o])this.isAttribute(o)||o===this.options.cdataPropName?s+="":"?"===o[0]?s+=this.indentate(e)+"<"+o+"?"+this.tagEndChar:s+=this.indentate(e)+"<"+o+"/"+this.tagEndChar;else if(t[o]instanceof Date)s+=this.buildTextValNode(t[o],o,"",e);else if("object"!=typeof t[o]){const n=this.isAttribute(o);if(n&&!this.ignoreAttributesFn(n,r))i+=this.buildAttrPairStr(n,""+t[o]);else if(!n)if(o===this.options.textNodeName){let e=this.options.tagValueProcessor(o,""+t[o]);s+=this.replaceEntitiesValue(e)}else s+=this.buildTextValNode(t[o],o,"",e)}else if(Array.isArray(t[o])){const i=t[o].length;let r="",a="";for(let l=0;l"+t+s}},ft.prototype.closeTag=function(t){let e="";return-1!==this.options.unpairedTags.indexOf(t)?this.options.suppressUnpairedNode||(e="/"):e=this.options.suppressEmptyNode?"/":`>`+this.newLine;if(!1!==this.options.commentPropName&&e===this.options.commentPropName)return this.indentate(i)+`\x3c!--${t}--\x3e`+this.newLine;if("?"===e[0])return this.indentate(i)+"<"+e+n+"?"+this.tagEndChar;{let s=this.options.tagValueProcessor(e,t);return s=this.replaceEntitiesValue(s),""===s?this.indentate(i)+"<"+e+n+this.closeTag(e)+this.tagEndChar:this.indentate(i)+"<"+e+n+">"+s+"0&&this.options.processEntities)for(let e=0;e { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// dist/index.js +var index_exports = {}; +__export(index_exports, { + TomlDate: () => TomlDate, + TomlError: () => TomlError, + default: () => index_default, + parse: () => parse, + stringify: () => stringify +}); +module.exports = __toCommonJS(index_exports); + +// dist/error.js +function getLineColFromPtr(string, ptr) { + let lines = string.slice(0, ptr).split(/\r\n|\n|\r/g); + return [lines.length, lines.pop().length + 1]; +} +function makeCodeBlock(string, line, column) { + let lines = string.split(/\r\n|\n|\r/g); + let codeblock = ""; + let numberLen = (Math.log10(line + 1) | 0) + 1; + for (let i = line - 1; i <= line + 1; i++) { + let l = lines[i - 1]; + if (!l) + continue; + codeblock += i.toString().padEnd(numberLen, " "); + codeblock += ": "; + codeblock += l; + codeblock += "\n"; + if (i === line) { + codeblock += " ".repeat(numberLen + column + 2); + codeblock += "^\n"; + } + } + return codeblock; +} +var TomlError = class extends Error { + line; + column; + codeblock; + constructor(message, options) { + const [line, column] = getLineColFromPtr(options.toml, options.ptr); + const codeblock = makeCodeBlock(options.toml, line, column); + super(`Invalid TOML document: ${message} + +${codeblock}`, options); + this.line = line; + this.column = column; + this.codeblock = codeblock; + } +}; + +// dist/util.js +function isEscaped(str, ptr) { + let i = 0; + while (str[ptr - ++i] === "\\") + ; + return --i && i % 2; +} +function indexOfNewline(str, start = 0, end = str.length) { + let idx = str.indexOf("\n", start); + if (str[idx - 1] === "\r") + idx--; + return idx <= end ? idx : -1; +} +function skipComment(str, ptr) { + for (let i = ptr; i < str.length; i++) { + let c = str[i]; + if (c === "\n") + return i; + if (c === "\r" && str[i + 1] === "\n") + return i + 1; + if (c < " " && c !== " " || c === "\x7F") { + throw new TomlError("control characters are not allowed in comments", { + toml: str, + ptr + }); + } + } + return str.length; +} +function skipVoid(str, ptr, banNewLines, banComments) { + let c; + while ((c = str[ptr]) === " " || c === " " || !banNewLines && (c === "\n" || c === "\r" && str[ptr + 1] === "\n")) + ptr++; + return banComments || c !== "#" ? ptr : skipVoid(str, skipComment(str, ptr), banNewLines); +} +function skipUntil(str, ptr, sep, end, banNewLines = false) { + if (!end) { + ptr = indexOfNewline(str, ptr); + return ptr < 0 ? str.length : ptr; + } + for (let i = ptr; i < str.length; i++) { + let c = str[i]; + if (c === "#") { + i = indexOfNewline(str, i); + } else if (c === sep) { + return i + 1; + } else if (c === end || banNewLines && (c === "\n" || c === "\r" && str[i + 1] === "\n")) { + return i; + } + } + throw new TomlError("cannot find end of structure", { + toml: str, + ptr + }); +} +function getStringEnd(str, seek) { + let first = str[seek]; + let target = first === str[seek + 1] && str[seek + 1] === str[seek + 2] ? str.slice(seek, seek + 3) : first; + seek += target.length - 1; + do + seek = str.indexOf(target, ++seek); + while (seek > -1 && first !== "'" && isEscaped(str, seek)); + if (seek > -1) { + seek += target.length; + if (target.length > 1) { + if (str[seek] === first) + seek++; + if (str[seek] === first) + seek++; + } + } + return seek; +} + +// dist/date.js +var DATE_TIME_RE = /^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i; +var TomlDate = class _TomlDate extends Date { + #hasDate = false; + #hasTime = false; + #offset = null; + constructor(date) { + let hasDate = true; + let hasTime = true; + let offset = "Z"; + if (typeof date === "string") { + let match = date.match(DATE_TIME_RE); + if (match) { + if (!match[1]) { + hasDate = false; + date = `0000-01-01T${date}`; + } + hasTime = !!match[2]; + hasTime && date[10] === " " && (date = date.replace(" ", "T")); + if (match[2] && +match[2] > 23) { + date = ""; + } else { + offset = match[3] || null; + date = date.toUpperCase(); + if (!offset && hasTime) + date += "Z"; + } + } else { + date = ""; + } + } + super(date); + if (!isNaN(this.getTime())) { + this.#hasDate = hasDate; + this.#hasTime = hasTime; + this.#offset = offset; + } + } + isDateTime() { + return this.#hasDate && this.#hasTime; + } + isLocal() { + return !this.#hasDate || !this.#hasTime || !this.#offset; + } + isDate() { + return this.#hasDate && !this.#hasTime; + } + isTime() { + return this.#hasTime && !this.#hasDate; + } + isValid() { + return this.#hasDate || this.#hasTime; + } + toISOString() { + let iso = super.toISOString(); + if (this.isDate()) + return iso.slice(0, 10); + if (this.isTime()) + return iso.slice(11, 23); + if (this.#offset === null) + return iso.slice(0, -1); + if (this.#offset === "Z") + return iso; + let offset = +this.#offset.slice(1, 3) * 60 + +this.#offset.slice(4, 6); + offset = this.#offset[0] === "-" ? offset : -offset; + let offsetDate = new Date(this.getTime() - offset * 6e4); + return offsetDate.toISOString().slice(0, -1) + this.#offset; + } + static wrapAsOffsetDateTime(jsDate, offset = "Z") { + let date = new _TomlDate(jsDate); + date.#offset = offset; + return date; + } + static wrapAsLocalDateTime(jsDate) { + let date = new _TomlDate(jsDate); + date.#offset = null; + return date; + } + static wrapAsLocalDate(jsDate) { + let date = new _TomlDate(jsDate); + date.#hasTime = false; + date.#offset = null; + return date; + } + static wrapAsLocalTime(jsDate) { + let date = new _TomlDate(jsDate); + date.#hasDate = false; + date.#offset = null; + return date; + } +}; + +// dist/primitive.js +var INT_REGEX = /^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/; +var FLOAT_REGEX = /^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/; +var LEADING_ZERO = /^[+-]?0[0-9_]/; +var ESCAPE_REGEX = /^[0-9a-f]{4,8}$/i; +var ESC_MAP = { + b: "\b", + t: " ", + n: "\n", + f: "\f", + r: "\r", + '"': '"', + "\\": "\\" +}; +function parseString(str, ptr = 0, endPtr = str.length) { + let isLiteral = str[ptr] === "'"; + let isMultiline = str[ptr++] === str[ptr] && str[ptr] === str[ptr + 1]; + if (isMultiline) { + endPtr -= 2; + if (str[ptr += 2] === "\r") + ptr++; + if (str[ptr] === "\n") + ptr++; + } + let tmp = 0; + let isEscape; + let parsed = ""; + let sliceStart = ptr; + while (ptr < endPtr - 1) { + let c = str[ptr++]; + if (c === "\n" || c === "\r" && str[ptr] === "\n") { + if (!isMultiline) { + throw new TomlError("newlines are not allowed in strings", { + toml: str, + ptr: ptr - 1 + }); + } + } else if (c < " " && c !== " " || c === "\x7F") { + throw new TomlError("control characters are not allowed in strings", { + toml: str, + ptr: ptr - 1 + }); + } + if (isEscape) { + isEscape = false; + if (c === "u" || c === "U") { + let code = str.slice(ptr, ptr += c === "u" ? 4 : 8); + if (!ESCAPE_REGEX.test(code)) { + throw new TomlError("invalid unicode escape", { + toml: str, + ptr: tmp + }); + } + try { + parsed += String.fromCodePoint(parseInt(code, 16)); + } catch { + throw new TomlError("invalid unicode escape", { + toml: str, + ptr: tmp + }); + } + } else if (isMultiline && (c === "\n" || c === " " || c === " " || c === "\r")) { + ptr = skipVoid(str, ptr - 1, true); + if (str[ptr] !== "\n" && str[ptr] !== "\r") { + throw new TomlError("invalid escape: only line-ending whitespace may be escaped", { + toml: str, + ptr: tmp + }); + } + ptr = skipVoid(str, ptr); + } else if (c in ESC_MAP) { + parsed += ESC_MAP[c]; + } else { + throw new TomlError("unrecognized escape sequence", { + toml: str, + ptr: tmp + }); + } + sliceStart = ptr; + } else if (!isLiteral && c === "\\") { + tmp = ptr - 1; + isEscape = true; + parsed += str.slice(sliceStart, tmp); + } + } + return parsed + str.slice(sliceStart, endPtr - 1); +} +function parseValue(value, toml, ptr, integersAsBigInt) { + if (value === "true") + return true; + if (value === "false") + return false; + if (value === "-inf") + return -Infinity; + if (value === "inf" || value === "+inf") + return Infinity; + if (value === "nan" || value === "+nan" || value === "-nan") + return NaN; + if (value === "-0") + return integersAsBigInt ? 0n : 0; + let isInt = INT_REGEX.test(value); + if (isInt || FLOAT_REGEX.test(value)) { + if (LEADING_ZERO.test(value)) { + throw new TomlError("leading zeroes are not allowed", { + toml, + ptr + }); + } + value = value.replace(/_/g, ""); + let numeric = +value; + if (isNaN(numeric)) { + throw new TomlError("invalid number", { + toml, + ptr + }); + } + if (isInt) { + if ((isInt = !Number.isSafeInteger(numeric)) && !integersAsBigInt) { + throw new TomlError("integer value cannot be represented losslessly", { + toml, + ptr + }); + } + if (isInt || integersAsBigInt === true) + numeric = BigInt(value); + } + return numeric; + } + const date = new TomlDate(value); + if (!date.isValid()) { + throw new TomlError("invalid value", { + toml, + ptr + }); + } + return date; +} + +// dist/extract.js +function sliceAndTrimEndOf(str, startPtr, endPtr, allowNewLines) { + let value = str.slice(startPtr, endPtr); + let commentIdx = value.indexOf("#"); + if (commentIdx > -1) { + skipComment(str, commentIdx); + value = value.slice(0, commentIdx); + } + let trimmed = value.trimEnd(); + if (!allowNewLines) { + let newlineIdx = value.indexOf("\n", trimmed.length); + if (newlineIdx > -1) { + throw new TomlError("newlines are not allowed in inline tables", { + toml: str, + ptr: startPtr + newlineIdx + }); + } + } + return [trimmed, commentIdx]; +} +function extractValue(str, ptr, end, depth, integersAsBigInt) { + if (depth === 0) { + throw new TomlError("document contains excessively nested structures. aborting.", { + toml: str, + ptr + }); + } + let c = str[ptr]; + if (c === "[" || c === "{") { + let [value, endPtr2] = c === "[" ? parseArray(str, ptr, depth, integersAsBigInt) : parseInlineTable(str, ptr, depth, integersAsBigInt); + let newPtr = end ? skipUntil(str, endPtr2, ",", end) : endPtr2; + if (endPtr2 - newPtr && end === "}") { + let nextNewLine = indexOfNewline(str, endPtr2, newPtr); + if (nextNewLine > -1) { + throw new TomlError("newlines are not allowed in inline tables", { + toml: str, + ptr: nextNewLine + }); + } + } + return [value, newPtr]; + } + let endPtr; + if (c === '"' || c === "'") { + endPtr = getStringEnd(str, ptr); + let parsed = parseString(str, ptr, endPtr); + if (end) { + endPtr = skipVoid(str, endPtr, end !== "]"); + if (str[endPtr] && str[endPtr] !== "," && str[endPtr] !== end && str[endPtr] !== "\n" && str[endPtr] !== "\r") { + throw new TomlError("unexpected character encountered", { + toml: str, + ptr: endPtr + }); + } + endPtr += +(str[endPtr] === ","); + } + return [parsed, endPtr]; + } + endPtr = skipUntil(str, ptr, ",", end); + let slice = sliceAndTrimEndOf(str, ptr, endPtr - +(str[endPtr - 1] === ","), end === "]"); + if (!slice[0]) { + throw new TomlError("incomplete key-value declaration: no value specified", { + toml: str, + ptr + }); + } + if (end && slice[1] > -1) { + endPtr = skipVoid(str, ptr + slice[1]); + endPtr += +(str[endPtr] === ","); + } + return [ + parseValue(slice[0], str, ptr, integersAsBigInt), + endPtr + ]; +} + +// dist/struct.js +var KEY_PART_RE = /^[a-zA-Z0-9-_]+[ \t]*$/; +function parseKey(str, ptr, end = "=") { + let dot = ptr - 1; + let parsed = []; + let endPtr = str.indexOf(end, ptr); + if (endPtr < 0) { + throw new TomlError("incomplete key-value: cannot find end of key", { + toml: str, + ptr + }); + } + do { + let c = str[ptr = ++dot]; + if (c !== " " && c !== " ") { + if (c === '"' || c === "'") { + if (c === str[ptr + 1] && c === str[ptr + 2]) { + throw new TomlError("multiline strings are not allowed in keys", { + toml: str, + ptr + }); + } + let eos = getStringEnd(str, ptr); + if (eos < 0) { + throw new TomlError("unfinished string encountered", { + toml: str, + ptr + }); + } + dot = str.indexOf(".", eos); + let strEnd = str.slice(eos, dot < 0 || dot > endPtr ? endPtr : dot); + let newLine = indexOfNewline(strEnd); + if (newLine > -1) { + throw new TomlError("newlines are not allowed in keys", { + toml: str, + ptr: ptr + dot + newLine + }); + } + if (strEnd.trimStart()) { + throw new TomlError("found extra tokens after the string part", { + toml: str, + ptr: eos + }); + } + if (endPtr < eos) { + endPtr = str.indexOf(end, eos); + if (endPtr < 0) { + throw new TomlError("incomplete key-value: cannot find end of key", { + toml: str, + ptr + }); + } + } + parsed.push(parseString(str, ptr, eos)); + } else { + dot = str.indexOf(".", ptr); + let part = str.slice(ptr, dot < 0 || dot > endPtr ? endPtr : dot); + if (!KEY_PART_RE.test(part)) { + throw new TomlError("only letter, numbers, dashes and underscores are allowed in keys", { + toml: str, + ptr + }); + } + parsed.push(part.trimEnd()); + } + } + } while (dot + 1 && dot < endPtr); + return [parsed, skipVoid(str, endPtr + 1, true, true)]; +} +function parseInlineTable(str, ptr, depth, integersAsBigInt) { + let res = {}; + let seen = /* @__PURE__ */ new Set(); + let c; + let comma = 0; + ptr++; + while ((c = str[ptr++]) !== "}" && c) { + let err = { toml: str, ptr: ptr - 1 }; + if (c === "\n") { + throw new TomlError("newlines are not allowed in inline tables", err); + } else if (c === "#") { + throw new TomlError("inline tables cannot contain comments", err); + } else if (c === ",") { + throw new TomlError("expected key-value, found comma", err); + } else if (c !== " " && c !== " ") { + let k; + let t = res; + let hasOwn = false; + let [key, keyEndPtr] = parseKey(str, ptr - 1); + for (let i = 0; i < key.length; i++) { + if (i) + t = hasOwn ? t[k] : t[k] = {}; + k = key[i]; + if ((hasOwn = Object.hasOwn(t, k)) && (typeof t[k] !== "object" || seen.has(t[k]))) { + throw new TomlError("trying to redefine an already defined value", { + toml: str, + ptr + }); + } + if (!hasOwn && k === "__proto__") { + Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); + } + } + if (hasOwn) { + throw new TomlError("trying to redefine an already defined value", { + toml: str, + ptr + }); + } + let [value, valueEndPtr] = extractValue(str, keyEndPtr, "}", depth - 1, integersAsBigInt); + seen.add(value); + t[k] = value; + ptr = valueEndPtr; + comma = str[ptr - 1] === "," ? ptr - 1 : 0; + } + } + if (comma) { + throw new TomlError("trailing commas are not allowed in inline tables", { + toml: str, + ptr: comma + }); + } + if (!c) { + throw new TomlError("unfinished table encountered", { + toml: str, + ptr + }); + } + return [res, ptr]; +} +function parseArray(str, ptr, depth, integersAsBigInt) { + let res = []; + let c; + ptr++; + while ((c = str[ptr++]) !== "]" && c) { + if (c === ",") { + throw new TomlError("expected value, found comma", { + toml: str, + ptr: ptr - 1 + }); + } else if (c === "#") + ptr = skipComment(str, ptr); + else if (c !== " " && c !== " " && c !== "\n" && c !== "\r") { + let e = extractValue(str, ptr - 1, "]", depth - 1, integersAsBigInt); + res.push(e[0]); + ptr = e[1]; + } + } + if (!c) { + throw new TomlError("unfinished array encountered", { + toml: str, + ptr + }); + } + return [res, ptr]; +} + +// dist/parse.js +function peekTable(key, table, meta, type) { + let t = table; + let m = meta; + let k; + let hasOwn = false; + let state; + for (let i = 0; i < key.length; i++) { + if (i) { + t = hasOwn ? t[k] : t[k] = {}; + m = (state = m[k]).c; + if (type === 0 && (state.t === 1 || state.t === 2)) { + return null; + } + if (state.t === 2) { + let l = t.length - 1; + t = t[l]; + m = m[l].c; + } + } + k = key[i]; + if ((hasOwn = Object.hasOwn(t, k)) && m[k]?.t === 0 && m[k]?.d) { + return null; + } + if (!hasOwn) { + if (k === "__proto__") { + Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); + Object.defineProperty(m, k, { enumerable: true, configurable: true, writable: true }); + } + m[k] = { + t: i < key.length - 1 && type === 2 ? 3 : type, + d: false, + i: 0, + c: {} + }; + } + } + state = m[k]; + if (state.t !== type && !(type === 1 && state.t === 3)) { + return null; + } + if (type === 2) { + if (!state.d) { + state.d = true; + t[k] = []; + } + t[k].push(t = {}); + state.c[state.i++] = state = { t: 1, d: false, i: 0, c: {} }; + } + if (state.d) { + return null; + } + state.d = true; + if (type === 1) { + t = hasOwn ? t[k] : t[k] = {}; + } else if (type === 0 && hasOwn) { + return null; + } + return [k, t, state.c]; +} +function parse(toml, { maxDepth = 1e3, integersAsBigInt } = {}) { + let res = {}; + let meta = {}; + let tbl = res; + let m = meta; + for (let ptr = skipVoid(toml, 0); ptr < toml.length; ) { + if (toml[ptr] === "[") { + let isTableArray = toml[++ptr] === "["; + let k = parseKey(toml, ptr += +isTableArray, "]"); + if (isTableArray) { + if (toml[k[1] - 1] !== "]") { + throw new TomlError("expected end of table declaration", { + toml, + ptr: k[1] - 1 + }); + } + k[1]++; + } + let p = peekTable( + k[0], + res, + meta, + isTableArray ? 2 : 1 + /* Type.EXPLICIT */ + ); + if (!p) { + throw new TomlError("trying to redefine an already defined table or value", { + toml, + ptr + }); + } + m = p[2]; + tbl = p[1]; + ptr = k[1]; + } else { + let k = parseKey(toml, ptr); + let p = peekTable( + k[0], + tbl, + m, + 0 + /* Type.DOTTED */ + ); + if (!p) { + throw new TomlError("trying to redefine an already defined table or value", { + toml, + ptr + }); + } + let v = extractValue(toml, k[1], void 0, maxDepth, integersAsBigInt); + p[1][p[0]] = v[0]; + ptr = v[1]; + } + ptr = skipVoid(toml, ptr, true); + if (toml[ptr] && toml[ptr] !== "\n" && toml[ptr] !== "\r") { + throw new TomlError("each key-value declaration must be followed by an end-of-line", { + toml, + ptr + }); + } + ptr = skipVoid(toml, ptr); + } + return res; +} + +// dist/stringify.js +var BARE_KEY = /^[a-z0-9-_]+$/i; +function extendedTypeOf(obj) { + let type = typeof obj; + if (type === "object") { + if (Array.isArray(obj)) + return "array"; + if (obj instanceof Date) + return "date"; + } + return type; +} +function isArrayOfTables(obj) { + for (let i = 0; i < obj.length; i++) { + if (extendedTypeOf(obj[i]) !== "object") + return false; + } + return obj.length != 0; +} +function formatString(s) { + return JSON.stringify(s).replace(/\x7f/g, "\\u007f"); +} +function stringifyValue(val, type, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + if (type === "number") { + if (isNaN(val)) + return "nan"; + if (val === Infinity) + return "inf"; + if (val === -Infinity) + return "-inf"; + if (numberAsFloat && Number.isInteger(val)) + return val.toFixed(1); + return val.toString(); + } + if (type === "bigint" || type === "boolean") { + return val.toString(); + } + if (type === "string") { + return formatString(val); + } + if (type === "date") { + if (isNaN(val.getTime())) { + throw new TypeError("cannot serialize invalid date"); + } + return val.toISOString(); + } + if (type === "object") { + return stringifyInlineTable(val, depth, numberAsFloat); + } + if (type === "array") { + return stringifyArray(val, depth, numberAsFloat); + } +} +function stringifyInlineTable(obj, depth, numberAsFloat) { + let keys = Object.keys(obj); + if (keys.length === 0) + return "{}"; + let res = "{ "; + for (let i = 0; i < keys.length; i++) { + let k = keys[i]; + if (i) + res += ", "; + res += BARE_KEY.test(k) ? k : formatString(k); + res += " = "; + res += stringifyValue(obj[k], extendedTypeOf(obj[k]), depth - 1, numberAsFloat); + } + return res + " }"; +} +function stringifyArray(array, depth, numberAsFloat) { + if (array.length === 0) + return "[]"; + let res = "[ "; + for (let i = 0; i < array.length; i++) { + if (i) + res += ", "; + if (array[i] === null || array[i] === void 0) { + throw new TypeError("arrays cannot contain null or undefined values"); + } + res += stringifyValue(array[i], extendedTypeOf(array[i]), depth - 1, numberAsFloat); + } + return res + " ]"; +} +function stringifyArrayTable(array, key, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + let res = ""; + for (let i = 0; i < array.length; i++) { + res += `[[${key}]] +`; + res += stringifyTable(array[i], key, depth, numberAsFloat); + res += "\n\n"; + } + return res; +} +function stringifyTable(obj, prefix, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + let preamble = ""; + let tables = ""; + let keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + let k = keys[i]; + if (obj[k] !== null && obj[k] !== void 0) { + let type = extendedTypeOf(obj[k]); + if (type === "symbol" || type === "function") { + throw new TypeError(`cannot serialize values of type '${type}'`); + } + let key = BARE_KEY.test(k) ? k : formatString(k); + if (type === "array" && isArrayOfTables(obj[k])) { + tables += stringifyArrayTable(obj[k], prefix ? `${prefix}.${key}` : key, depth - 1, numberAsFloat); + } else if (type === "object") { + let tblKey = prefix ? `${prefix}.${key}` : key; + tables += `[${tblKey}] +`; + tables += stringifyTable(obj[k], tblKey, depth - 1, numberAsFloat); + tables += "\n\n"; + } else { + preamble += key; + preamble += " = "; + preamble += stringifyValue(obj[k], type, depth, numberAsFloat); + preamble += "\n"; + } + } + } + return `${preamble} +${tables}`.trim(); +} +function stringify(obj, { maxDepth = 1e3, numbersAsFloat = false } = {}) { + if (extendedTypeOf(obj) !== "object") { + throw new TypeError("stringify can only be called with an object"); + } + return stringifyTable(obj, "", maxDepth, numbersAsFloat); +} + +// dist/index.js +var index_default = { parse, stringify, TomlDate, TomlError }; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); +/*! + * Copyright (c) Squirrel Chat et al., All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + /***/ }), /***/ 64012: @@ -147409,2014 +149351,17 @@ module.exports = /*#__PURE__*/JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45 /******/ } /******/ /************************************************************************/ -/******/ /* webpack/runtime/compat get default export */ -/******/ (() => { -/******/ // getDefaultExport function for compatibility with non-harmony modules -/******/ __nccwpck_require__.n = (module) => { -/******/ var getter = module && module.__esModule ? -/******/ () => (module['default']) : -/******/ () => (module); -/******/ __nccwpck_require__.d(getter, { a: getter }); -/******/ return getter; -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/define property getters */ -/******/ (() => { -/******/ // define getter functions for harmony exports -/******/ __nccwpck_require__.d = (exports, definition) => { -/******/ for(var key in definition) { -/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { -/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); -/******/ } -/******/ } -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/hasOwnProperty shorthand */ -/******/ (() => { -/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) -/******/ })(); -/******/ /******/ /* webpack/runtime/compat */ /******/ /******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; /******/ /************************************************************************/ -var __webpack_exports__ = {}; -// This entry need to be wrapped in an IIFE because it need to be in strict mode. -(() => { -"use strict"; - -// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js -var lib_core = __nccwpck_require__(37484); -// EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js -var lib_io = __nccwpck_require__(94994); -// EXTERNAL MODULE: external "fs" -var external_fs_ = __nccwpck_require__(79896); -var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_); -// EXTERNAL MODULE: external "path" -var external_path_ = __nccwpck_require__(16928); -var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_); -// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js -var glob = __nccwpck_require__(47206); -// EXTERNAL MODULE: external "crypto" -var external_crypto_ = __nccwpck_require__(76982); -var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_); -// EXTERNAL MODULE: external "fs/promises" -var promises_ = __nccwpck_require__(91943); -var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_); -// EXTERNAL MODULE: external "os" -var external_os_ = __nccwpck_require__(70857); -var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -function getLineColFromPtr(string, ptr) { - let lines = string.slice(0, ptr).split(/\r\n|\n|\r/g); - return [lines.length, lines.pop().length + 1]; -} -function makeCodeBlock(string, line, column) { - let lines = string.split(/\r\n|\n|\r/g); - let codeblock = ''; - let numberLen = (Math.log10(line + 1) | 0) + 1; - for (let i = line - 1; i <= line + 1; i++) { - let l = lines[i - 1]; - if (!l) - continue; - codeblock += i.toString().padEnd(numberLen, ' '); - codeblock += ': '; - codeblock += l; - codeblock += '\n'; - if (i === line) { - codeblock += ' '.repeat(numberLen + column + 2); - codeblock += '^\n'; - } - } - return codeblock; -} -class TomlError extends Error { - line; - column; - codeblock; - constructor(message, options) { - const [line, column] = getLineColFromPtr(options.toml, options.ptr); - const codeblock = makeCodeBlock(options.toml, line, column); - super(`Invalid TOML document: ${message}\n\n${codeblock}`, options); - this.line = line; - this.column = column; - this.codeblock = codeblock; - } -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/util.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -function isEscaped(str, ptr) { - let i = 0; - while (str[ptr - ++i] === '\\') - ; - return --i && (i % 2); -} -function indexOfNewline(str, start = 0, end = str.length) { - let idx = str.indexOf('\n', start); - if (str[idx - 1] === '\r') - idx--; - return idx <= end ? idx : -1; -} -function skipComment(str, ptr) { - for (let i = ptr; i < str.length; i++) { - let c = str[i]; - if (c === '\n') - return i; - if (c === '\r' && str[i + 1] === '\n') - return i + 1; - if ((c < '\x20' && c !== '\t') || c === '\x7f') { - throw new TomlError('control characters are not allowed in comments', { - toml: str, - ptr: ptr, - }); - } - } - return str.length; -} -function skipVoid(str, ptr, banNewLines, banComments) { - let c; - while ((c = str[ptr]) === ' ' || c === '\t' || (!banNewLines && (c === '\n' || c === '\r' && str[ptr + 1] === '\n'))) - ptr++; - return banComments || c !== '#' - ? ptr - : skipVoid(str, skipComment(str, ptr), banNewLines); -} -function skipUntil(str, ptr, sep, end, banNewLines = false) { - if (!end) { - ptr = indexOfNewline(str, ptr); - return ptr < 0 ? str.length : ptr; - } - for (let i = ptr; i < str.length; i++) { - let c = str[i]; - if (c === '#') { - i = indexOfNewline(str, i); - } - else if (c === sep) { - return i + 1; - } - else if (c === end || (banNewLines && (c === '\n' || (c === '\r' && str[i + 1] === '\n')))) { - return i; - } - } - throw new TomlError('cannot find end of structure', { - toml: str, - ptr: ptr - }); -} -function getStringEnd(str, seek) { - let first = str[seek]; - let target = first === str[seek + 1] && str[seek + 1] === str[seek + 2] - ? str.slice(seek, seek + 3) - : first; - seek += target.length - 1; - do - seek = str.indexOf(target, ++seek); - while (seek > -1 && first !== "'" && isEscaped(str, seek)); - if (seek > -1) { - seek += target.length; - if (target.length > 1) { - if (str[seek] === first) - seek++; - if (str[seek] === first) - seek++; - } - } - return seek; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/date.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -let DATE_TIME_RE = /^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i; -class TomlDate extends Date { - #hasDate = false; - #hasTime = false; - #offset = null; - constructor(date) { - let hasDate = true; - let hasTime = true; - let offset = 'Z'; - if (typeof date === 'string') { - let match = date.match(DATE_TIME_RE); - if (match) { - if (!match[1]) { - hasDate = false; - date = `0000-01-01T${date}`; - } - hasTime = !!match[2]; - // Make sure to use T instead of a space. Breaks in case of extreme values otherwise. - hasTime && date[10] === ' ' && (date = date.replace(' ', 'T')); - // Do not allow rollover hours. - if (match[2] && +match[2] > 23) { - date = ''; - } - else { - offset = match[3] || null; - date = date.toUpperCase(); - if (!offset && hasTime) - date += 'Z'; - } - } - else { - date = ''; - } - } - super(date); - if (!isNaN(this.getTime())) { - this.#hasDate = hasDate; - this.#hasTime = hasTime; - this.#offset = offset; - } - } - isDateTime() { - return this.#hasDate && this.#hasTime; - } - isLocal() { - return !this.#hasDate || !this.#hasTime || !this.#offset; - } - isDate() { - return this.#hasDate && !this.#hasTime; - } - isTime() { - return this.#hasTime && !this.#hasDate; - } - isValid() { - return this.#hasDate || this.#hasTime; - } - toISOString() { - let iso = super.toISOString(); - // Local Date - if (this.isDate()) - return iso.slice(0, 10); - // Local Time - if (this.isTime()) - return iso.slice(11, 23); - // Local DateTime - if (this.#offset === null) - return iso.slice(0, -1); - // Offset DateTime - if (this.#offset === 'Z') - return iso; - // This part is quite annoying: JS strips the original timezone from the ISO string representation - // Instead of using a "modified" date and "Z", we restore the representation "as authored" - let offset = (+(this.#offset.slice(1, 3)) * 60) + +(this.#offset.slice(4, 6)); - offset = this.#offset[0] === '-' ? offset : -offset; - let offsetDate = new Date(this.getTime() - (offset * 60e3)); - return offsetDate.toISOString().slice(0, -1) + this.#offset; - } - static wrapAsOffsetDateTime(jsDate, offset = 'Z') { - let date = new TomlDate(jsDate); - date.#offset = offset; - return date; - } - static wrapAsLocalDateTime(jsDate) { - let date = new TomlDate(jsDate); - date.#offset = null; - return date; - } - static wrapAsLocalDate(jsDate) { - let date = new TomlDate(jsDate); - date.#hasTime = false; - date.#offset = null; - return date; - } - static wrapAsLocalTime(jsDate) { - let date = new TomlDate(jsDate); - date.#hasDate = false; - date.#offset = null; - return date; - } -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/primitive.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - -let INT_REGEX = /^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/; -let FLOAT_REGEX = /^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/; -let LEADING_ZERO = /^[+-]?0[0-9_]/; -let ESCAPE_REGEX = /^[0-9a-f]{4,8}$/i; -let ESC_MAP = { - b: '\b', - t: '\t', - n: '\n', - f: '\f', - r: '\r', - '"': '"', - '\\': '\\', -}; -function parseString(str, ptr = 0, endPtr = str.length) { - let isLiteral = str[ptr] === '\''; - let isMultiline = str[ptr++] === str[ptr] && str[ptr] === str[ptr + 1]; - if (isMultiline) { - endPtr -= 2; - if (str[ptr += 2] === '\r') - ptr++; - if (str[ptr] === '\n') - ptr++; - } - let tmp = 0; - let isEscape; - let parsed = ''; - let sliceStart = ptr; - while (ptr < endPtr - 1) { - let c = str[ptr++]; - if (c === '\n' || (c === '\r' && str[ptr] === '\n')) { - if (!isMultiline) { - throw new TomlError('newlines are not allowed in strings', { - toml: str, - ptr: ptr - 1, - }); - } - } - else if ((c < '\x20' && c !== '\t') || c === '\x7f') { - throw new TomlError('control characters are not allowed in strings', { - toml: str, - ptr: ptr - 1, - }); - } - if (isEscape) { - isEscape = false; - if (c === 'u' || c === 'U') { - // Unicode escape - let code = str.slice(ptr, (ptr += (c === 'u' ? 4 : 8))); - if (!ESCAPE_REGEX.test(code)) { - throw new TomlError('invalid unicode escape', { - toml: str, - ptr: tmp, - }); - } - try { - parsed += String.fromCodePoint(parseInt(code, 16)); - } - catch { - throw new TomlError('invalid unicode escape', { - toml: str, - ptr: tmp, - }); - } - } - else if (isMultiline && (c === '\n' || c === ' ' || c === '\t' || c === '\r')) { - // Multiline escape - ptr = skipVoid(str, ptr - 1, true); - if (str[ptr] !== '\n' && str[ptr] !== '\r') { - throw new TomlError('invalid escape: only line-ending whitespace may be escaped', { - toml: str, - ptr: tmp, - }); - } - ptr = skipVoid(str, ptr); - } - else if (c in ESC_MAP) { - // Classic escape - parsed += ESC_MAP[c]; - } - else { - throw new TomlError('unrecognized escape sequence', { - toml: str, - ptr: tmp, - }); - } - sliceStart = ptr; - } - else if (!isLiteral && c === '\\') { - tmp = ptr - 1; - isEscape = true; - parsed += str.slice(sliceStart, tmp); - } - } - return parsed + str.slice(sliceStart, endPtr - 1); -} -function parseValue(value, toml, ptr, integersAsBigInt) { - // Constant values - if (value === 'true') - return true; - if (value === 'false') - return false; - if (value === '-inf') - return -Infinity; - if (value === 'inf' || value === '+inf') - return Infinity; - if (value === 'nan' || value === '+nan' || value === '-nan') - return NaN; - // Avoid FP representation of -0 - if (value === '-0') - return integersAsBigInt ? 0n : 0; - // Numbers - let isInt = INT_REGEX.test(value); - if (isInt || FLOAT_REGEX.test(value)) { - if (LEADING_ZERO.test(value)) { - throw new TomlError('leading zeroes are not allowed', { - toml: toml, - ptr: ptr, - }); - } - value = value.replace(/_/g, ''); - let numeric = +value; - if (isNaN(numeric)) { - throw new TomlError('invalid number', { - toml: toml, - ptr: ptr, - }); - } - if (isInt) { - if ((isInt = !Number.isSafeInteger(numeric)) && !integersAsBigInt) { - throw new TomlError('integer value cannot be represented losslessly', { - toml: toml, - ptr: ptr, - }); - } - if (isInt || integersAsBigInt === true) - numeric = BigInt(value); - } - return numeric; - } - const date = new TomlDate(value); - if (!date.isValid()) { - throw new TomlError('invalid value', { - toml: toml, - ptr: ptr, - }); - } - return date; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/extract.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -function sliceAndTrimEndOf(str, startPtr, endPtr, allowNewLines) { - let value = str.slice(startPtr, endPtr); - let commentIdx = value.indexOf('#'); - if (commentIdx > -1) { - // The call to skipComment allows to "validate" the comment - // (absence of control characters) - skipComment(str, commentIdx); - value = value.slice(0, commentIdx); - } - let trimmed = value.trimEnd(); - if (!allowNewLines) { - let newlineIdx = value.indexOf('\n', trimmed.length); - if (newlineIdx > -1) { - throw new TomlError('newlines are not allowed in inline tables', { - toml: str, - ptr: startPtr + newlineIdx - }); - } - } - return [trimmed, commentIdx]; -} -function extractValue(str, ptr, end, depth, integersAsBigInt) { - if (depth === 0) { - throw new TomlError('document contains excessively nested structures. aborting.', { - toml: str, - ptr: ptr - }); - } - let c = str[ptr]; - if (c === '[' || c === '{') { - let [value, endPtr] = c === '[' - ? parseArray(str, ptr, depth, integersAsBigInt) - : parseInlineTable(str, ptr, depth, integersAsBigInt); - let newPtr = end ? skipUntil(str, endPtr, ',', end) : endPtr; - if (endPtr - newPtr && end === '}') { - let nextNewLine = indexOfNewline(str, endPtr, newPtr); - if (nextNewLine > -1) { - throw new TomlError('newlines are not allowed in inline tables', { - toml: str, - ptr: nextNewLine - }); - } - } - return [value, newPtr]; - } - let endPtr; - if (c === '"' || c === "'") { - endPtr = getStringEnd(str, ptr); - let parsed = parseString(str, ptr, endPtr); - if (end) { - endPtr = skipVoid(str, endPtr, end !== ']'); - if (str[endPtr] && str[endPtr] !== ',' && str[endPtr] !== end && str[endPtr] !== '\n' && str[endPtr] !== '\r') { - throw new TomlError('unexpected character encountered', { - toml: str, - ptr: endPtr, - }); - } - endPtr += (+(str[endPtr] === ',')); - } - return [parsed, endPtr]; - } - endPtr = skipUntil(str, ptr, ',', end); - let slice = sliceAndTrimEndOf(str, ptr, endPtr - (+(str[endPtr - 1] === ',')), end === ']'); - if (!slice[0]) { - throw new TomlError('incomplete key-value declaration: no value specified', { - toml: str, - ptr: ptr - }); - } - if (end && slice[1] > -1) { - endPtr = skipVoid(str, ptr + slice[1]); - endPtr += +(str[endPtr] === ','); - } - return [ - parseValue(slice[0], str, ptr, integersAsBigInt), - endPtr, - ]; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/struct.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -let KEY_PART_RE = /^[a-zA-Z0-9-_]+[ \t]*$/; -function parseKey(str, ptr, end = '=') { - let dot = ptr - 1; - let parsed = []; - let endPtr = str.indexOf(end, ptr); - if (endPtr < 0) { - throw new TomlError('incomplete key-value: cannot find end of key', { - toml: str, - ptr: ptr, - }); - } - do { - let c = str[ptr = ++dot]; - // If it's whitespace, ignore - if (c !== ' ' && c !== '\t') { - // If it's a string - if (c === '"' || c === '\'') { - if (c === str[ptr + 1] && c === str[ptr + 2]) { - throw new TomlError('multiline strings are not allowed in keys', { - toml: str, - ptr: ptr, - }); - } - let eos = getStringEnd(str, ptr); - if (eos < 0) { - throw new TomlError('unfinished string encountered', { - toml: str, - ptr: ptr, - }); - } - dot = str.indexOf('.', eos); - let strEnd = str.slice(eos, dot < 0 || dot > endPtr ? endPtr : dot); - let newLine = indexOfNewline(strEnd); - if (newLine > -1) { - throw new TomlError('newlines are not allowed in keys', { - toml: str, - ptr: ptr + dot + newLine, - }); - } - if (strEnd.trimStart()) { - throw new TomlError('found extra tokens after the string part', { - toml: str, - ptr: eos, - }); - } - if (endPtr < eos) { - endPtr = str.indexOf(end, eos); - if (endPtr < 0) { - throw new TomlError('incomplete key-value: cannot find end of key', { - toml: str, - ptr: ptr, - }); - } - } - parsed.push(parseString(str, ptr, eos)); - } - else { - // Normal raw key part consumption and validation - dot = str.indexOf('.', ptr); - let part = str.slice(ptr, dot < 0 || dot > endPtr ? endPtr : dot); - if (!KEY_PART_RE.test(part)) { - throw new TomlError('only letter, numbers, dashes and underscores are allowed in keys', { - toml: str, - ptr: ptr, - }); - } - parsed.push(part.trimEnd()); - } - } - // Until there's no more dot - } while (dot + 1 && dot < endPtr); - return [parsed, skipVoid(str, endPtr + 1, true, true)]; -} -function parseInlineTable(str, ptr, depth, integersAsBigInt) { - let res = {}; - let seen = new Set(); - let c; - let comma = 0; - ptr++; - while ((c = str[ptr++]) !== '}' && c) { - let err = { toml: str, ptr: ptr - 1 }; - if (c === '\n') { - throw new TomlError('newlines are not allowed in inline tables', err); - } - else if (c === '#') { - throw new TomlError('inline tables cannot contain comments', err); - } - else if (c === ',') { - throw new TomlError('expected key-value, found comma', err); - } - else if (c !== ' ' && c !== '\t') { - let k; - let t = res; - let hasOwn = false; - let [key, keyEndPtr] = parseKey(str, ptr - 1); - for (let i = 0; i < key.length; i++) { - if (i) - t = hasOwn ? t[k] : (t[k] = {}); - k = key[i]; - if ((hasOwn = Object.hasOwn(t, k)) && (typeof t[k] !== 'object' || seen.has(t[k]))) { - throw new TomlError('trying to redefine an already defined value', { - toml: str, - ptr: ptr, - }); - } - if (!hasOwn && k === '__proto__') { - Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); - } - } - if (hasOwn) { - throw new TomlError('trying to redefine an already defined value', { - toml: str, - ptr: ptr, - }); - } - let [value, valueEndPtr] = extractValue(str, keyEndPtr, '}', depth - 1, integersAsBigInt); - seen.add(value); - t[k] = value; - ptr = valueEndPtr; - comma = str[ptr - 1] === ',' ? ptr - 1 : 0; - } - } - if (comma) { - throw new TomlError('trailing commas are not allowed in inline tables', { - toml: str, - ptr: comma, - }); - } - if (!c) { - throw new TomlError('unfinished table encountered', { - toml: str, - ptr: ptr, - }); - } - return [res, ptr]; -} -function parseArray(str, ptr, depth, integersAsBigInt) { - let res = []; - let c; - ptr++; - while ((c = str[ptr++]) !== ']' && c) { - if (c === ',') { - throw new TomlError('expected value, found comma', { - toml: str, - ptr: ptr - 1, - }); - } - else if (c === '#') - ptr = skipComment(str, ptr); - else if (c !== ' ' && c !== '\t' && c !== '\n' && c !== '\r') { - let e = extractValue(str, ptr - 1, ']', depth - 1, integersAsBigInt); - res.push(e[0]); - ptr = e[1]; - } - } - if (!c) { - throw new TomlError('unfinished array encountered', { - toml: str, - ptr: ptr, - }); - } - return [res, ptr]; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/parse.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -function peekTable(key, table, meta, type) { - let t = table; - let m = meta; - let k; - let hasOwn = false; - let state; - for (let i = 0; i < key.length; i++) { - if (i) { - t = hasOwn ? t[k] : (t[k] = {}); - m = (state = m[k]).c; - if (type === 0 /* Type.DOTTED */ && (state.t === 1 /* Type.EXPLICIT */ || state.t === 2 /* Type.ARRAY */)) { - return null; - } - if (state.t === 2 /* Type.ARRAY */) { - let l = t.length - 1; - t = t[l]; - m = m[l].c; - } - } - k = key[i]; - if ((hasOwn = Object.hasOwn(t, k)) && m[k]?.t === 0 /* Type.DOTTED */ && m[k]?.d) { - return null; - } - if (!hasOwn) { - if (k === '__proto__') { - Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); - Object.defineProperty(m, k, { enumerable: true, configurable: true, writable: true }); - } - m[k] = { - t: i < key.length - 1 && type === 2 /* Type.ARRAY */ - ? 3 /* Type.ARRAY_DOTTED */ - : type, - d: false, - i: 0, - c: {}, - }; - } - } - state = m[k]; - if (state.t !== type && !(type === 1 /* Type.EXPLICIT */ && state.t === 3 /* Type.ARRAY_DOTTED */)) { - // Bad key type! - return null; - } - if (type === 2 /* Type.ARRAY */) { - if (!state.d) { - state.d = true; - t[k] = []; - } - t[k].push(t = {}); - state.c[state.i++] = (state = { t: 1 /* Type.EXPLICIT */, d: false, i: 0, c: {} }); - } - if (state.d) { - // Redefining a table! - return null; - } - state.d = true; - if (type === 1 /* Type.EXPLICIT */) { - t = hasOwn ? t[k] : (t[k] = {}); - } - else if (type === 0 /* Type.DOTTED */ && hasOwn) { - return null; - } - return [k, t, state.c]; -} -function parse(toml, { maxDepth = 1000, integersAsBigInt } = {}) { - let res = {}; - let meta = {}; - let tbl = res; - let m = meta; - for (let ptr = skipVoid(toml, 0); ptr < toml.length;) { - if (toml[ptr] === '[') { - let isTableArray = toml[++ptr] === '['; - let k = parseKey(toml, ptr += +isTableArray, ']'); - if (isTableArray) { - if (toml[k[1] - 1] !== ']') { - throw new TomlError('expected end of table declaration', { - toml: toml, - ptr: k[1] - 1, - }); - } - k[1]++; - } - let p = peekTable(k[0], res, meta, isTableArray ? 2 /* Type.ARRAY */ : 1 /* Type.EXPLICIT */); - if (!p) { - throw new TomlError('trying to redefine an already defined table or value', { - toml: toml, - ptr: ptr, - }); - } - m = p[2]; - tbl = p[1]; - ptr = k[1]; - } - else { - let k = parseKey(toml, ptr); - let p = peekTable(k[0], tbl, m, 0 /* Type.DOTTED */); - if (!p) { - throw new TomlError('trying to redefine an already defined table or value', { - toml: toml, - ptr: ptr, - }); - } - let v = extractValue(toml, k[1], void 0, maxDepth, integersAsBigInt); - p[1][p[0]] = v[0]; - ptr = v[1]; - } - ptr = skipVoid(toml, ptr, true); - if (toml[ptr] && toml[ptr] !== '\n' && toml[ptr] !== '\r') { - throw new TomlError('each key-value declaration must be followed by an end-of-line', { - toml: toml, - ptr: ptr - }); - } - ptr = skipVoid(toml, ptr); - } - return res; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/stringify.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -let BARE_KEY = /^[a-z0-9-_]+$/i; -function extendedTypeOf(obj) { - let type = typeof obj; - if (type === 'object') { - if (Array.isArray(obj)) - return 'array'; - if (obj instanceof Date) - return 'date'; - } - return type; -} -function isArrayOfTables(obj) { - for (let i = 0; i < obj.length; i++) { - if (extendedTypeOf(obj[i]) !== 'object') - return false; - } - return obj.length != 0; -} -function formatString(s) { - return JSON.stringify(s).replace(/\x7f/g, '\\u007f'); -} -function stringifyValue(val, type, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - if (type === 'number') { - if (isNaN(val)) - return 'nan'; - if (val === Infinity) - return 'inf'; - if (val === -Infinity) - return '-inf'; - if (numberAsFloat && Number.isInteger(val)) - return val.toFixed(1); - return val.toString(); - } - if (type === 'bigint' || type === 'boolean') { - return val.toString(); - } - if (type === 'string') { - return formatString(val); - } - if (type === 'date') { - if (isNaN(val.getTime())) { - throw new TypeError('cannot serialize invalid date'); - } - return val.toISOString(); - } - if (type === 'object') { - return stringifyInlineTable(val, depth, numberAsFloat); - } - if (type === 'array') { - return stringifyArray(val, depth, numberAsFloat); - } -} -function stringifyInlineTable(obj, depth, numberAsFloat) { - let keys = Object.keys(obj); - if (keys.length === 0) - return '{}'; - let res = '{ '; - for (let i = 0; i < keys.length; i++) { - let k = keys[i]; - if (i) - res += ', '; - res += BARE_KEY.test(k) ? k : formatString(k); - res += ' = '; - res += stringifyValue(obj[k], extendedTypeOf(obj[k]), depth - 1, numberAsFloat); - } - return res + ' }'; -} -function stringifyArray(array, depth, numberAsFloat) { - if (array.length === 0) - return '[]'; - let res = '[ '; - for (let i = 0; i < array.length; i++) { - if (i) - res += ', '; - if (array[i] === null || array[i] === void 0) { - throw new TypeError('arrays cannot contain null or undefined values'); - } - res += stringifyValue(array[i], extendedTypeOf(array[i]), depth - 1, numberAsFloat); - } - return res + ' ]'; -} -function stringifyArrayTable(array, key, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - let res = ''; - for (let i = 0; i < array.length; i++) { - res += `[[${key}]]\n`; - res += stringifyTable(array[i], key, depth, numberAsFloat); - res += '\n\n'; - } - return res; -} -function stringifyTable(obj, prefix, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - let preamble = ''; - let tables = ''; - let keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - let k = keys[i]; - if (obj[k] !== null && obj[k] !== void 0) { - let type = extendedTypeOf(obj[k]); - if (type === 'symbol' || type === 'function') { - throw new TypeError(`cannot serialize values of type '${type}'`); - } - let key = BARE_KEY.test(k) ? k : formatString(k); - if (type === 'array' && isArrayOfTables(obj[k])) { - tables += stringifyArrayTable(obj[k], prefix ? `${prefix}.${key}` : key, depth - 1, numberAsFloat); - } - else if (type === 'object') { - let tblKey = prefix ? `${prefix}.${key}` : key; - tables += `[${tblKey}]\n`; - tables += stringifyTable(obj[k], tblKey, depth - 1, numberAsFloat); - tables += '\n\n'; - } - else { - preamble += key; - preamble += ' = '; - preamble += stringifyValue(obj[k], type, depth, numberAsFloat); - preamble += '\n'; - } - } - } - return `${preamble}\n${tables}`.trim(); -} -function stringify(obj, { maxDepth = 1000, numbersAsFloat = false } = {}) { - if (extendedTypeOf(obj) !== 'object') { - throw new TypeError('stringify can only be called with an object'); - } - return stringifyTable(obj, '', maxDepth, numbersAsFloat); -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/index.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -/* harmony default export */ const dist = ({ parse: parse, stringify: stringify, TomlDate: TomlDate, TomlError: TomlError }); - - -// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js -var exec = __nccwpck_require__(95236); -// EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js -var lib_cache = __nccwpck_require__(24318); -// EXTERNAL MODULE: ./node_modules/@actions/warpbuild-cache/lib/cache.js -var warpbuild_cache_lib_cache = __nccwpck_require__(22343); -// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js -var cache_lib_cache = __nccwpck_require__(5116); -;// CONCATENATED MODULE: ./src/utils.js - - - - - - -function reportError(e) { - const { commandFailed } = e; - if (commandFailed) { - lib_core.error(`Command failed: ${commandFailed.command}`); - lib_core.error(commandFailed.stderr); - } - else { - lib_core.error(`${e.stack}`); - } -} -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - e.commandFailed = { - command: `${cmd} ${args.join(" ")}`, - stderr, - }; - throw e; - } - return stdout; -} -function getCacheProvider() { - const cacheProvider = lib_core.getInput("cache-provider"); - let cache; - switch (cacheProvider) { - case "github": - cache = cache_lib_cache; - break; - case "buildjet": - cache = lib_cache; - break; - case "warpbuild": - cache = warpbuild_cache_lib_cache; - break; - default: - throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); - } - return { - name: cacheProvider, - cache: cache, - }; -} -async function utils_exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} - -;// CONCATENATED MODULE: ./src/workspace.js - - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages(filter, ...extraArgs) { - let packages = []; - try { - lib_core.debug(`collecting metadata for "${this.root}"`); - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { - cwd: this.root, - env: { "CARGO_ENCODED_RUSTFLAGS": "" }, - })); - lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); - for (const pkg of meta.packages.filter(filter)) { - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch (err) { - console.error(err); - } - return packages; - } - async getPackagesOutsideWorkspaceRoot() { - return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); - } - async getWorkspaceMembers() { - return await this.getPackages((_) => true, "--no-deps"); - } -} - -;// CONCATENATED MODULE: ./src/config.js - - - - - - - - - - - -const HOME = external_os_default().homedir(); -const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_CONFIG = "RUST_CACHE_CONFIG"; -const HASH_LENGTH = 8; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** Whether to cache CARGO_HOME/.bin */ - this.cacheBin = true; - /** The workspace configurations */ - this.workspaces = []; - /** The cargo binaries present during main step */ - this.cargoBins = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = lib_core.getInput("prefix-key") || "v0-rust"; - const sharedKey = lib_core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = lib_core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache - const runnerOS = external_os_default().type(); - const runnerArch = external_os_default().arch(); - key += `-${runnerOS}-${runnerArch}`; - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${digest(hasher)}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - self.cacheBin = lib_core.getInput("cache-bin").toLowerCase() == "true"; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = lib_core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); - const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed - hasher = external_crypto_default().createHash("sha1"); - for (const workspace of workspaces) { - const root = workspace.root; - keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); - const workspaceMembers = await workspace.getWorkspaceMembers(); - const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml"))); - for (const cargo_manifest of cargo_manifests) { - try { - const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" }); - // Use any since TomlPrimitive is not exposed - const parsed = parse(content); - if ("package" in parsed) { - const pack = parsed.package; - if ("version" in pack) { - pack["version"] = "0.0.0"; - } - } - for (const prefix of ["", "build-", "dev-"]) { - const section_name = `${prefix}dependencies`; - if (!(section_name in parsed)) { - continue; - } - const deps = parsed[section_name]; - for (const key of Object.keys(deps)) { - const dep = deps[key]; - try { - if ("path" in dep) { - dep.version = "0.0.0"; - dep.path = ""; - } - } - catch (_e) { - // Not an object, probably a string (version), - // continue. - continue; - } - } - } - hasher.update(JSON.stringify(parsed)); - parsedKeyFiles.push(cargo_manifest); - } - catch (e) { - // Fallback to caching them as regular file - lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); - keyFiles.push(cargo_manifest); - } - } - const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); - if (await utils_exists(cargo_lock)) { - try { - const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); - const parsed = parse(content); - if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { - // Fallback to caching them as regular file since this action - // can only handle Cargo.lock format version 3 - lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); - keyFiles.push(cargo_lock); - continue; - } - // Package without `[[package]].source` and `[[package]].checksum` - // are the one with `path = "..."` to crates within the workspace. - const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); - hasher.update(JSON.stringify(packages)); - parsedKeyFiles.push(cargo_lock); - } - catch (e) { - // Fallback to caching them as regular file - lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); - keyFiles.push(cargo_lock); - } - } - } - keyFiles = sort_and_uniq(keyFiles); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - let lockHash = digest(hasher); - keyFiles.push(...parsedKeyFiles); - self.keyFiles = sort_and_uniq(keyFiles); - key += `-${lockHash}`; - self.cacheKey = key; - self.cachePaths = [external_path_default().join(config_CARGO_HOME, "registry"), external_path_default().join(config_CARGO_HOME, "git")]; - if (self.cacheBin) { - self.cachePaths = [ - external_path_default().join(config_CARGO_HOME, "bin"), - external_path_default().join(config_CARGO_HOME, ".crates.toml"), - external_path_default().join(config_CARGO_HOME, ".crates2.json"), - ...self.cachePaths, - ]; - } - const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true"; - if (cacheTargets === "true") { - self.cachePaths.push(...workspaces.map((ws) => ws.target)); - } - const cacheDirectories = lib_core.getInput("cache-directories"); - for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { - self.cachePaths.push(dir); - } - const bins = await getCargoBins(); - self.cargoBins = Array.from(bins.values()); - return self; - } - /** - * Reads and returns the cache config from the action `state`. - * - * @throws {Error} if the state is not present. - * @returns {CacheConfig} the configuration. - * @see {@link CacheConfig#saveState} - * @see {@link CacheConfig#new} - */ - static fromState() { - const source = lib_core.getState(STATE_CONFIG); - if (!source) { - throw new Error("Cache configuration not found in state"); - } - const self = new CacheConfig(); - Object.assign(self, JSON.parse(source)); - self.workspaces = self.workspaces.map((w) => new Workspace(w.root, w.target)); - return self; - } - /** - * Prints the configuration to the action log. - */ - printInfo(cacheProvider) { - lib_core.startGroup("Cache Configuration"); - lib_core.info(`Cache Provider:`); - lib_core.info(` ${cacheProvider.name}`); - lib_core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - lib_core.info(` ${workspace.root}`); - } - lib_core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - lib_core.info(` ${path}`); - } - lib_core.info(`Restore Key:`); - lib_core.info(` ${this.restoreKey}`); - lib_core.info(`Cache Key:`); - lib_core.info(` ${this.cacheKey}`); - lib_core.info(`.. Prefix:`); - lib_core.info(` - ${this.keyPrefix}`); - lib_core.info(`.. Environment considered:`); - lib_core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - lib_core.info(` - ${env}`); - } - lib_core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - lib_core.info(` - ${file}`); - } - lib_core.endGroup(); - } - /** - * Saves the configuration to the state store. - * This is used to restore the configuration in the post action. - */ - saveState() { - lib_core.saveState(STATE_CONFIG, this); - } -} -/** - * Checks if the cache is up to date. - * - * @returns `true` if the cache is up to date, `false` otherwise. - */ -function isCacheUpToDate() { - return core.getState(STATE_CONFIG) === ""; -} -/** - * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. - * - * @param hasher The hasher to digest. - * @returns The hex digest. - */ -function digest(hasher) { - return hasher.digest("hex").substring(0, HASH_LENGTH); -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} -async function globFiles(pattern) { - const globber = await glob.create(pattern, { - followSymbolicLinks: false, - }); - // fs.statSync resolve the symbolic link and returns stat for the - // file it pointed to, so isFile would make sure the resolved - // file is actually a regular file. - return (await globber.glob()).filter((file) => external_fs_default().statSync(file).isFile()); -} -function sort_and_uniq(a) { - return a - .sort((a, b) => a.localeCompare(b)) - .reduce((accumulator, currentValue) => { - const len = accumulator.length; - // If accumulator is empty or its last element != currentValue - // Since array is already sorted, elements with the same value - // are grouped together to be continugous in space. - // - // If currentValue != last element, then it must be unique. - if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { - accumulator.push(currentValue); - } - return accumulator; - }, []); -} - -;// CONCATENATED MODULE: ./src/cleanup.js - - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning profile directory "${profileDir}"`); - // Quite a few testing utility crates store compilation artifacts as nested - // workspaces under `target/tests`. Notably, `target/tests/target` and - // `target/tests/trybuild`. - if (external_path_default().basename(profileDir) === "tests") { - try { - // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 - // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 - cleanTargetDir(external_path_default().join(profileDir, "target"), packages, checkTimestamp); - } - catch { } - try { - // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 - cleanTargetDir(external_path_default().join(profileDir, "trybuild"), packages, checkTimestamp); - } - catch { } - // Delete everything else. - await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); - return; - } - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -/** - * Clean the cargo bin directory, removing the binaries that existed - * when the action started, as they were not created by the build. - * - * @param oldBins The binaries that existed when the action started. - */ -async function cleanBin(oldBins) { - const bins = await getCargoBins(); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages, crates = true) { - // remove `.cargo/credentials.toml` - try { - const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml"); - core.debug(`deleting "${credentials}"`); - await fs.promises.unlink(credentials); - } - catch { } - // `.cargo/registry/index` - let pkgSet = new Set(packages.map((p) => p.name)); - const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = path.join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(path.join(dirPath, ".git"))) { - await rmRF(path.join(dirPath, ".cache")); - } - else { - await cleanRegistryIndexCache(dirPath, pkgSet); - } - } - } - if (!crates) { - core.debug("skipping registry cache and src cleanup"); - return; - } - // `.cargo/registry/src` - // Cargo usually re-creates these from the `.crate` cache below, - // but for some reason that does not work for `-sys` crates that check timestamps - // to decide if rebuilds are necessary. - pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); - const srcDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "src")); - for await (const dirent of srcDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` - // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` - const dir = await fs.promises.opendir(path.join(srcDir.path, dirent.name)); - for await (const dirent of dir) { - if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { - await rmRF(path.join(dir.path, dirent.name)); - } - } - } - } - // `.cargo/registry/cache` - pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -/// Recursively walks and cleans the index `.cache` -async function cleanRegistryIndexCache(dirName, keepPkg) { - let dirIsEmpty = true; - const cacheDir = await fs.promises.opendir(dirName); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - if (await cleanRegistryIndexCache(path.join(dirName, dirent.name), keepPkg)) { - await rm(dirName, dirent); - } - else { - dirIsEmpty && (dirIsEmpty = false); - } - } - else { - if (keepPkg.has(dirent.name)) { - dirIsEmpty && (dirIsEmpty = false); - } - else { - await rm(dirName, dirent); - } - } - } - return dirIsEmpty; -} -async function cleanGit(packages) { - const coPath = path.join(CARGO_HOME, "git", "checkouts"); - const dbPath = path.join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await fs.promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await fs.promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName` matching some criteria. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - * - * Otherwise, it will remove everything that does not match any string in the - * `keepPrefix` set. - * The matching strips and trailing `-$hash` suffix. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - if (isOutdated) { - await rm(dir.path, dirent); - } - return; - } - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - if (!keepPrefix.has(name)) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - lib_core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await lib_io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} - -;// CONCATENATED MODULE: ./src/restore.ts - - - - -process.on("uncaughtException", (e) => { - lib_core.error(e.message); - if (e.stack) { - lib_core.error(e.stack); - } -}); -async function run() { - const cacheProvider = getCacheProvider(); - if (!cacheProvider.cache.isFeatureAvailable()) { - setCacheHitOutput(false); - return; - } - try { - var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); - if (cacheOnFailure !== "true") { - cacheOnFailure = "false"; - } - var lookupOnly = lib_core.getInput("lookup-only").toLowerCase() === "true"; - lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); - lib_core.exportVariable("CARGO_INCREMENTAL", 0); - const config = await CacheConfig.new(); - config.printInfo(cacheProvider); - lib_core.info(""); - lib_core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); - const key = config.cacheKey; - // Pass a copy of cachePaths to avoid mutating the original array as reported by: - // https://github.com/actions/toolkit/pull/1378 - // TODO: remove this once the underlying bug is fixed. - const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { - lookupOnly, - }); - if (restoreKey) { - const match = restoreKey === key; - lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); - if (!match) { - // pre-clean the target directory on cache mismatch - for (const workspace of config.workspaces) { - try { - await cleanTargetDir(workspace.target, [], true); - } - catch { } - } - // We restored the cache but it is not a full match. - config.saveState(); - } - setCacheHitOutput(match); - } - else { - lib_core.info("No cache found."); - config.saveState(); - setCacheHitOutput(false); - } - } - catch (e) { - setCacheHitOutput(false); - reportError(e); - } - process.exit(); -} -function setCacheHitOutput(cacheHit) { - lib_core.setOutput("cache-hit", cacheHit.toString()); -} -run(); - -})(); - -module.exports = __webpack_exports__; +/******/ +/******/ // startup +/******/ // Load entry module and return exports +/******/ // This entry module is referenced by other modules so it can't be inlined +/******/ var __webpack_exports__ = __nccwpck_require__(473); +/******/ module.exports = __webpack_exports__; +/******/ /******/ })() ; \ No newline at end of file diff --git a/dist/save/index.js b/dist/save/index.js index 326ee3f..b1af47a 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -85015,6 +85015,1062 @@ class Queue { module.exports = Queue; +/***/ }), + +/***/ 55541: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.cleanTargetDir = cleanTargetDir; +exports.getCargoBins = getCargoBins; +exports.cleanBin = cleanBin; +exports.cleanRegistry = cleanRegistry; +exports.cleanGit = cleanGit; +const core = __importStar(__nccwpck_require__(37484)); +const io = __importStar(__nccwpck_require__(94994)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const config_1 = __nccwpck_require__(1283); +const utils_1 = __nccwpck_require__(95804); +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await fs_1.default.promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = path_1.default.join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await (0, utils_1.exists)(path_1.default.join(dirName, "CACHEDIR.TAG"))) || (await (0, utils_1.exists)(path_1.default.join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + core.debug(`cleaning profile directory "${profileDir}"`); + // Quite a few testing utility crates store compilation artifacts as nested + // workspaces under `target/tests`. Notably, `target/tests/target` and + // `target/tests/trybuild`. + if (path_1.default.basename(profileDir) === "tests") { + try { + // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 + // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 + cleanTargetDir(path_1.default.join(profileDir, "target"), packages, checkTimestamp); + } + catch { } + try { + // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 + cleanTargetDir(path_1.default.join(profileDir, "trybuild"), packages, checkTimestamp); + } + catch { } + // Delete everything else. + await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); + return; + } + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(path_1.default.join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(path_1.default.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(path_1.default.join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await fs_1.default.promises.readFile(path_1.default.join(config_1.CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +/** + * Clean the cargo bin directory, removing the binaries that existed + * when the action started, as they were not created by the build. + * + * @param oldBins The binaries that existed when the action started. + */ +async function cleanBin(oldBins) { + const bins = await getCargoBins(); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages, crates = true) { + // remove `.cargo/credentials.toml` + try { + const credentials = path_1.default.join(config_1.CARGO_HOME, ".cargo", "credentials.toml"); + core.debug(`deleting "${credentials}"`); + await fs_1.default.promises.unlink(credentials); + } + catch { } + // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); + const indexDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = path_1.default.join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await (0, utils_1.exists)(path_1.default.join(dirPath, ".git"))) { + await rmRF(path_1.default.join(dirPath, ".cache")); + } + else { + await cleanRegistryIndexCache(dirPath, pkgSet); + } + } + } + if (!crates) { + core.debug("skipping registry cache and src cleanup"); + return; + } + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(path_1.default.join(dir.path, dirent.name)); + } + } + } + } + // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + const cacheDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName, keepPkg) { + let dirIsEmpty = true; + const cacheDir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(path_1.default.join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } + else { + dirIsEmpty && (dirIsEmpty = false); + } + } + else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty && (dirIsEmpty = false); + } + else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} +async function cleanGit(packages) { + const coPath = path_1.default.join(config_1.CARGO_HOME, "git", "checkouts"); + const dbPath = path_1.default.join(config_1.CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split(path_1.default.sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await fs_1.default.promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await fs_1.default.promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await fs_1.default.promises.opendir(path_1.default.join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName` matching some criteria. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + * + * Otherwise, it will remove everything that does not match any string in the + * `keepPrefix` set. + * The matching strips and trailing `-$hash` suffix. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of dir) { + if (checkTimestamp) { + const fileName = path_1.default.join(dir.path, dirent.name); + const { mtime } = await fs_1.default.promises.stat(fileName); + const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + if (isOutdated) { + await rm(dir.path, dirent); + } + return; + } + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + if (!keepPrefix.has(name)) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = path_1.default.join(parent, dirent.name); + core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await fs_1.default.promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} + + +/***/ }), + +/***/ 1283: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.CacheConfig = exports.CARGO_HOME = void 0; +exports.isCacheUpToDate = isCacheUpToDate; +const core = __importStar(__nccwpck_require__(37484)); +const glob = __importStar(__nccwpck_require__(47206)); +const crypto_1 = __importDefault(__nccwpck_require__(76982)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +const promises_1 = __importDefault(__nccwpck_require__(91943)); +const os_1 = __importDefault(__nccwpck_require__(70857)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const toml = __importStar(__nccwpck_require__(27106)); +const cleanup_1 = __nccwpck_require__(55541); +const utils_1 = __nccwpck_require__(95804); +const workspace_1 = __nccwpck_require__(87896); +const HOME = os_1.default.homedir(); +exports.CARGO_HOME = process.env.CARGO_HOME || path_1.default.join(HOME, ".cargo"); +const STATE_CONFIG = "RUST_CACHE_CONFIG"; +const HASH_LENGTH = 8; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** Whether to cache CARGO_HOME/.bin */ + this.cacheBin = true; + /** The workspace configurations */ + this.workspaces = []; + /** The cargo binaries present during main step */ + this.cargoBins = []; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = core.getInput("prefix-key") || "v0-rust"; + const sharedKey = core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if ((job) && core.getInput("use-job-key").toLowerCase() == "true") { + key += `-${job}`; + } + } + // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache + const runnerOS = os_1.default.type(); + const runnerArch = os_1.default.arch(); + key += `-${runnerOS}-${runnerArch}`; + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = crypto_1.default.createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + // Add job hash suffix if 'add-job-hash' is true + if (core.getInput("add-job-hash").toLowerCase() == "true") { + key += `-${digest(hasher)}`; + } + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = path_1.default.resolve(root); + target = path_1.default.join(root, target); + workspaces.push(new workspace_1.Workspace(root, target)); + } + self.workspaces = workspaces; + let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); + const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed + hasher = crypto_1.default.createHash("sha1"); + for (const workspace of workspaces) { + const root = workspace.root; + keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); + const workspaceMembers = await workspace.getWorkspaceMembers(); + const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path_1.default.join(member.path, "Cargo.toml"))); + for (const cargo_manifest of cargo_manifests) { + try { + const content = await promises_1.default.readFile(cargo_manifest, { encoding: "utf8" }); + // Use any since TomlPrimitive is not exposed + const parsed = toml.parse(content); + if ("package" in parsed) { + const pack = parsed.package; + if ("version" in pack) { + pack["version"] = "0.0.0"; + } + } + for (const prefix of ["", "build-", "dev-"]) { + const section_name = `${prefix}dependencies`; + if (!(section_name in parsed)) { + continue; + } + const deps = parsed[section_name]; + for (const key of Object.keys(deps)) { + const dep = deps[key]; + try { + if ("path" in dep) { + dep.version = "0.0.0"; + dep.path = ""; + } + } + catch (_e) { + // Not an object, probably a string (version), + // continue. + continue; + } + } + } + hasher.update(JSON.stringify(parsed)); + parsedKeyFiles.push(cargo_manifest); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_manifest); + } + } + const cargo_lock = path_1.default.join(workspace.root, "Cargo.lock"); + if (await (0, utils_1.exists)(cargo_lock)) { + try { + const content = await promises_1.default.readFile(cargo_lock, { encoding: "utf8" }); + const parsed = toml.parse(content); + if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { + // Fallback to caching them as regular file since this action + // can only handle Cargo.lock format version 3 + core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); + keyFiles.push(cargo_lock); + continue; + } + // Package without `[[package]].source` and `[[package]].checksum` + // are the one with `path = "..."` to crates within the workspace. + const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); + hasher.update(JSON.stringify(packages)); + parsedKeyFiles.push(cargo_lock); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_lock); + } + } + } + keyFiles = sort_and_uniq(keyFiles); + for (const file of keyFiles) { + for await (const chunk of fs_1.default.createReadStream(file)) { + hasher.update(chunk); + } + } + let lockHash = digest(hasher); + keyFiles.push(...parsedKeyFiles); + self.keyFiles = sort_and_uniq(keyFiles); + key += `-${lockHash}`; + self.cacheKey = key; + self.cachePaths = [path_1.default.join(exports.CARGO_HOME, "registry"), path_1.default.join(exports.CARGO_HOME, "git")]; + if (self.cacheBin) { + self.cachePaths = [ + path_1.default.join(exports.CARGO_HOME, "bin"), + path_1.default.join(exports.CARGO_HOME, ".crates.toml"), + path_1.default.join(exports.CARGO_HOME, ".crates2.json"), + ...self.cachePaths, + ]; + } + const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; + if (cacheTargets === "true") { + self.cachePaths.push(...workspaces.map((ws) => ws.target)); + } + const cacheDirectories = core.getInput("cache-directories"); + for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { + self.cachePaths.push(dir); + } + const bins = await (0, cleanup_1.getCargoBins)(); + self.cargoBins = Array.from(bins.values()); + return self; + } + /** + * Reads and returns the cache config from the action `state`. + * + * @throws {Error} if the state is not present. + * @returns {CacheConfig} the configuration. + * @see {@link CacheConfig#saveState} + * @see {@link CacheConfig#new} + */ + static fromState() { + const source = core.getState(STATE_CONFIG); + if (!source) { + throw new Error("Cache configuration not found in state"); + } + const self = new CacheConfig(); + Object.assign(self, JSON.parse(source)); + self.workspaces = self.workspaces.map((w) => new workspace_1.Workspace(w.root, w.target)); + return self; + } + /** + * Prints the configuration to the action log. + */ + printInfo(cacheProvider) { + core.startGroup("Cache Configuration"); + core.info(`Cache Provider:`); + core.info(` ${cacheProvider.name}`); + core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + core.info(` ${workspace.root}`); + } + core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + core.info(` ${path}`); + } + core.info(`Restore Key:`); + core.info(` ${this.restoreKey}`); + core.info(`Cache Key:`); + core.info(` ${this.cacheKey}`); + core.info(`.. Prefix:`); + core.info(` - ${this.keyPrefix}`); + core.info(`.. Environment considered:`); + core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + core.info(` - ${env}`); + } + core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + core.info(` - ${file}`); + } + core.endGroup(); + } + /** + * Saves the configuration to the state store. + * This is used to restore the configuration in the post action. + */ + saveState() { + core.saveState(STATE_CONFIG, this); + } +} +exports.CacheConfig = CacheConfig; +/** + * Checks if the cache is up to date. + * + * @returns `true` if the cache is up to date, `false` otherwise. + */ +function isCacheUpToDate() { + return core.getState(STATE_CONFIG) === ""; +} +/** + * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. + * + * @param hasher The hasher to digest. + * @returns The hex digest. + */ +function digest(hasher) { + return hasher.digest("hex").substring(0, HASH_LENGTH); +} +async function getRustVersion() { + const stdout = await (0, utils_1.getCmdOutput)("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} +async function globFiles(pattern) { + const globber = await glob.create(pattern, { + followSymbolicLinks: false, + }); + // fs.statSync resolve the symbolic link and returns stat for the + // file it pointed to, so isFile would make sure the resolved + // file is actually a regular file. + return (await globber.glob()).filter((file) => fs_1.default.statSync(file).isFile()); +} +function sort_and_uniq(a) { + return a + .sort((a, b) => a.localeCompare(b)) + .reduce((accumulator, currentValue) => { + const len = accumulator.length; + // If accumulator is empty or its last element != currentValue + // Since array is already sorted, elements with the same value + // are grouped together to be continugous in space. + // + // If currentValue != last element, then it must be unique. + if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { + accumulator.push(currentValue); + } + return accumulator; + }, []); +} + + +/***/ }), + +/***/ 95804: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.reportError = reportError; +exports.getCmdOutput = getCmdOutput; +exports.getCacheProvider = getCacheProvider; +exports.exists = exists; +const core = __importStar(__nccwpck_require__(37484)); +const exec = __importStar(__nccwpck_require__(95236)); +const buildjetCache = __importStar(__nccwpck_require__(24318)); +const warpbuildCache = __importStar(__nccwpck_require__(22343)); +const ghCache = __importStar(__nccwpck_require__(5116)); +const fs_1 = __importDefault(__nccwpck_require__(79896)); +function reportError(e) { + const { commandFailed } = e; + if (commandFailed) { + core.error(`Command failed: ${commandFailed.command}`); + core.error(commandFailed.stderr); + } + else { + core.error(`${e.stack}`); + } +} +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + e.commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; + throw e; + } + return stdout; +} +function getCacheProvider() { + const cacheProvider = core.getInput("cache-provider"); + let cache; + switch (cacheProvider) { + case "github": + cache = ghCache; + break; + case "buildjet": + cache = buildjetCache; + break; + case "warpbuild": + cache = warpbuildCache; + break; + default: + throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); + } + return { + name: cacheProvider, + cache: cache, + }; +} +async function exists(path) { + try { + await fs_1.default.promises.access(path); + return true; + } + catch { + return false; + } +} + + +/***/ }), + +/***/ 87896: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.Workspace = void 0; +const core = __importStar(__nccwpck_require__(37484)); +const path_1 = __importDefault(__nccwpck_require__(16928)); +const utils_1 = __nccwpck_require__(95804); +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages(filter, ...extraArgs) { + let packages = []; + try { + core.debug(`collecting metadata for "${this.root}"`); + const meta = JSON.parse(await (0, utils_1.getCmdOutput)("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { + cwd: this.root, + env: { "CARGO_ENCODED_RUSTFLAGS": "" }, + })); + core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); + for (const pkg of meta.packages.filter(filter)) { + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: path_1.default.dirname(pkg.manifest_path) }); + } + } + catch (err) { + console.error(err); + } + return packages; + } + async getPackagesOutsideWorkspaceRoot() { + return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); + } + async getWorkspaceMembers() { + return await this.getPackages((_) => true, "--no-deps"); + } +} +exports.Workspace = Workspace; + + +/***/ }), + +/***/ 90198: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", ({ value: true })); +const core = __importStar(__nccwpck_require__(37484)); +const exec = __importStar(__nccwpck_require__(95236)); +const cleanup_1 = __nccwpck_require__(55541); +const config_1 = __nccwpck_require__(1283); +const utils_1 = __nccwpck_require__(95804); +process.on("uncaughtException", (e) => { + core.error(e.message); + if (e.stack) { + core.error(e.stack); + } +}); +async function run() { + const cacheProvider = (0, utils_1.getCacheProvider)(); + const save = core.getInput("save-if").toLowerCase() || "true"; + if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { + return; + } + try { + if ((0, config_1.isCacheUpToDate)()) { + core.info(`Cache up-to-date.`); + return; + } + const config = config_1.CacheConfig.fromState(); + config.printInfo(cacheProvider); + core.info(""); + // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands + if (process.env["RUNNER_OS"] == "macOS") { + await macOsWorkaround(); + } + const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false"; + const allPackages = []; + for (const workspace of config.workspaces) { + const packages = await workspace.getPackagesOutsideWorkspaceRoot(); + if (workspaceCrates === "true") { + const wsMembers = await workspace.getWorkspaceMembers(); + packages.push(...wsMembers); + } + allPackages.push(...packages); + try { + core.info(`... Cleaning ${workspace.target} ...`); + await (0, cleanup_1.cleanTargetDir)(workspace.target, packages); + } + catch (e) { + core.debug(`${e.stack}`); + } + } + try { + const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; + core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); + await (0, cleanup_1.cleanRegistry)(allPackages, crates !== "true"); + } + catch (e) { + core.debug(`${e.stack}`); + } + if (config.cacheBin) { + try { + core.info(`... Cleaning cargo/bin ...`); + await (0, cleanup_1.cleanBin)(config.cargoBins); + } + catch (e) { + core.debug(`${e.stack}`); + } + } + try { + core.info(`... Cleaning cargo git cache ...`); + await (0, cleanup_1.cleanGit)(allPackages); + } + catch (e) { + core.debug(`${e.stack}`); + } + core.info(`... Saving cache ...`); + // Pass a copy of cachePaths to avoid mutating the original array as reported by: + // https://github.com/actions/toolkit/pull/1378 + // TODO: remove this once the underlying bug is fixed. + await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey); + } + catch (e) { + (0, utils_1.reportError)(e); + } + process.exit(); +} +run(); +async function macOsWorkaround() { + try { + // Workaround for https://github.com/actions/cache/issues/403 + // Also see https://github.com/rust-lang/cargo/issues/8603 + await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); + } + catch { } +} + + /***/ }), /***/ 42078: @@ -147326,6 +148382,915 @@ module.exports = axios; (()=>{"use strict";var t={d:(e,n)=>{for(var i in n)t.o(n,i)&&!t.o(e,i)&&Object.defineProperty(e,i,{enumerable:!0,get:n[i]})},o:(t,e)=>Object.prototype.hasOwnProperty.call(t,e),r:t=>{"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(t,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(t,"__esModule",{value:!0})}},e={};t.r(e),t.d(e,{XMLBuilder:()=>ft,XMLParser:()=>st,XMLValidator:()=>mt});const n=":A-Za-z_\\u00C0-\\u00D6\\u00D8-\\u00F6\\u00F8-\\u02FF\\u0370-\\u037D\\u037F-\\u1FFF\\u200C-\\u200D\\u2070-\\u218F\\u2C00-\\u2FEF\\u3001-\\uD7FF\\uF900-\\uFDCF\\uFDF0-\\uFFFD",i=new RegExp("^["+n+"]["+n+"\\-.\\d\\u00B7\\u0300-\\u036F\\u203F-\\u2040]*$");function s(t,e){const n=[];let i=e.exec(t);for(;i;){const s=[];s.startIndex=e.lastIndex-i[0].length;const r=i.length;for(let t=0;t"!==t[o]&&" "!==t[o]&&"\t"!==t[o]&&"\n"!==t[o]&&"\r"!==t[o];o++)f+=t[o];if(f=f.trim(),"/"===f[f.length-1]&&(f=f.substring(0,f.length-1),o--),!r(f)){let e;return e=0===f.trim().length?"Invalid space after '<'.":"Tag '"+f+"' is an invalid name.",x("InvalidTag",e,N(t,o))}const p=c(t,o);if(!1===p)return x("InvalidAttr","Attributes for '"+f+"' have open quote.",N(t,o));let b=p.value;if(o=p.index,"/"===b[b.length-1]){const n=o-b.length;b=b.substring(0,b.length-1);const s=g(b,e);if(!0!==s)return x(s.err.code,s.err.msg,N(t,n+s.err.line));i=!0}else if(d){if(!p.tagClosed)return x("InvalidTag","Closing tag '"+f+"' doesn't have proper closing.",N(t,o));if(b.trim().length>0)return x("InvalidTag","Closing tag '"+f+"' can't have attributes or invalid starting.",N(t,a));if(0===n.length)return x("InvalidTag","Closing tag '"+f+"' has not been opened.",N(t,a));{const e=n.pop();if(f!==e.tagName){let n=N(t,e.tagStartPos);return x("InvalidTag","Expected closing tag '"+e.tagName+"' (opened in line "+n.line+", col "+n.col+") instead of closing tag '"+f+"'.",N(t,a))}0==n.length&&(s=!0)}}else{const r=g(b,e);if(!0!==r)return x(r.err.code,r.err.msg,N(t,o-b.length+r.err.line));if(!0===s)return x("InvalidXml","Multiple possible root nodes found.",N(t,o));-1!==e.unpairedTags.indexOf(f)||n.push({tagName:f,tagStartPos:a}),i=!0}for(o++;o0)||x("InvalidXml","Invalid '"+JSON.stringify(n.map((t=>t.tagName)),null,4).replace(/\r?\n/g,"")+"' found.",{line:1,col:1}):x("InvalidXml","Start tag expected.",1)}function l(t){return" "===t||"\t"===t||"\n"===t||"\r"===t}function u(t,e){const n=e;for(;e5&&"xml"===i)return x("InvalidXml","XML declaration allowed only at the start of the document.",N(t,e));if("?"==t[e]&&">"==t[e+1]){e++;break}}return e}function h(t,e){if(t.length>e+5&&"-"===t[e+1]&&"-"===t[e+2]){for(e+=3;e"===t[e+2]){e+=2;break}}else if(t.length>e+8&&"D"===t[e+1]&&"O"===t[e+2]&&"C"===t[e+3]&&"T"===t[e+4]&&"Y"===t[e+5]&&"P"===t[e+6]&&"E"===t[e+7]){let n=1;for(e+=8;e"===t[e]&&(n--,0===n))break}else if(t.length>e+9&&"["===t[e+1]&&"C"===t[e+2]&&"D"===t[e+3]&&"A"===t[e+4]&&"T"===t[e+5]&&"A"===t[e+6]&&"["===t[e+7])for(e+=8;e"===t[e+2]){e+=2;break}return e}const d='"',f="'";function c(t,e){let n="",i="",s=!1;for(;e"===t[e]&&""===i){s=!0;break}n+=t[e]}return""===i&&{value:n,index:e,tagClosed:s}}const p=new RegExp("(\\s*)([^\\s=]+)(\\s*=)?(\\s*(['\"])(([\\s\\S])*?)\\5)?","g");function g(t,e){const n=s(t,p),i={};for(let t=0;t!1,commentPropName:!1,unpairedTags:[],processEntities:!0,htmlEntities:!1,ignoreDeclaration:!1,ignorePiTags:!1,transformTagName:!1,transformAttributeName:!1,updateTag:function(t,e,n){return t},captureMetaData:!1};let y;y="function"!=typeof Symbol?"@@xmlMetadata":Symbol("XML Node Metadata");class T{constructor(t){this.tagname=t,this.child=[],this[":@"]={}}add(t,e){"__proto__"===t&&(t="#__proto__"),this.child.push({[t]:e})}addChild(t,e){"__proto__"===t.tagname&&(t.tagname="#__proto__"),t[":@"]&&Object.keys(t[":@"]).length>0?this.child.push({[t.tagname]:t.child,":@":t[":@"]}):this.child.push({[t.tagname]:t.child}),void 0!==e&&(this.child[this.child.length-1][y]={startIndex:e})}static getMetaDataSymbol(){return y}}function w(t,e){const n={};if("O"!==t[e+3]||"C"!==t[e+4]||"T"!==t[e+5]||"Y"!==t[e+6]||"P"!==t[e+7]||"E"!==t[e+8])throw new Error("Invalid Tag instead of DOCTYPE");{e+=9;let i=1,s=!1,r=!1,o="";for(;e"===t[e]){if(r?"-"===t[e-1]&&"-"===t[e-2]&&(r=!1,i--):i--,0===i)break}else"["===t[e]?s=!0:o+=t[e];else{if(s&&C(t,"!ENTITY",e)){let i,s;e+=7,[i,s,e]=O(t,e+1),-1===s.indexOf("&")&&(n[i]={regx:RegExp(`&${i};`,"g"),val:s})}else if(s&&C(t,"!ELEMENT",e)){e+=8;const{index:n}=S(t,e+1);e=n}else if(s&&C(t,"!ATTLIST",e))e+=8;else if(s&&C(t,"!NOTATION",e)){e+=9;const{index:n}=A(t,e+1);e=n}else{if(!C(t,"!--",e))throw new Error("Invalid DOCTYPE");r=!0}i++,o=""}if(0!==i)throw new Error("Unclosed DOCTYPE")}return{entities:n,i:e}}const P=(t,e)=>{for(;e{for(const n of t){if("string"==typeof n&&e===n)return!0;if(n instanceof RegExp&&n.test(e))return!0}}:()=>!1}class k{constructor(t){this.options=t,this.currentNode=null,this.tagsNodeStack=[],this.docTypeEntities={},this.lastEntities={apos:{regex:/&(apos|#39|#x27);/g,val:"'"},gt:{regex:/&(gt|#62|#x3E);/g,val:">"},lt:{regex:/&(lt|#60|#x3C);/g,val:"<"},quot:{regex:/&(quot|#34|#x22);/g,val:'"'}},this.ampEntity={regex:/&(amp|#38|#x26);/g,val:"&"},this.htmlEntities={space:{regex:/&(nbsp|#160);/g,val:" "},cent:{regex:/&(cent|#162);/g,val:"¢"},pound:{regex:/&(pound|#163);/g,val:"£"},yen:{regex:/&(yen|#165);/g,val:"¥"},euro:{regex:/&(euro|#8364);/g,val:"€"},copyright:{regex:/&(copy|#169);/g,val:"©"},reg:{regex:/&(reg|#174);/g,val:"®"},inr:{regex:/&(inr|#8377);/g,val:"₹"},num_dec:{regex:/&#([0-9]{1,7});/g,val:(t,e)=>String.fromCodePoint(Number.parseInt(e,10))},num_hex:{regex:/&#x([0-9a-fA-F]{1,6});/g,val:(t,e)=>String.fromCodePoint(Number.parseInt(e,16))}},this.addExternalEntities=F,this.parseXml=X,this.parseTextData=L,this.resolveNameSpace=B,this.buildAttributesMap=G,this.isItStopNode=Z,this.replaceEntitiesValue=R,this.readStopNodeData=J,this.saveTextToParentTag=q,this.addChild=Y,this.ignoreAttributesFn=_(this.options.ignoreAttributes)}}function F(t){const e=Object.keys(t);for(let n=0;n0)){o||(t=this.replaceEntitiesValue(t));const i=this.options.tagValueProcessor(e,t,n,s,r);return null==i?t:typeof i!=typeof t||i!==t?i:this.options.trimValues||t.trim()===t?H(t,this.options.parseTagValue,this.options.numberParseOptions):t}}function B(t){if(this.options.removeNSPrefix){const e=t.split(":"),n="/"===t.charAt(0)?"/":"";if("xmlns"===e[0])return"";2===e.length&&(t=n+e[1])}return t}const U=new RegExp("([^\\s=]+)\\s*(=\\s*(['\"])([\\s\\S]*?)\\3)?","gm");function G(t,e,n){if(!0!==this.options.ignoreAttributes&&"string"==typeof t){const n=s(t,U),i=n.length,r={};for(let t=0;t",r,"Closing Tag is not closed.");let o=t.substring(r+2,e).trim();if(this.options.removeNSPrefix){const t=o.indexOf(":");-1!==t&&(o=o.substr(t+1))}this.options.transformTagName&&(o=this.options.transformTagName(o)),n&&(i=this.saveTextToParentTag(i,n,s));const a=s.substring(s.lastIndexOf(".")+1);if(o&&-1!==this.options.unpairedTags.indexOf(o))throw new Error(`Unpaired tag can not be used as closing tag: `);let l=0;a&&-1!==this.options.unpairedTags.indexOf(a)?(l=s.lastIndexOf(".",s.lastIndexOf(".")-1),this.tagsNodeStack.pop()):l=s.lastIndexOf("."),s=s.substring(0,l),n=this.tagsNodeStack.pop(),i="",r=e}else if("?"===t[r+1]){let e=z(t,r,!1,"?>");if(!e)throw new Error("Pi Tag is not closed.");if(i=this.saveTextToParentTag(i,n,s),this.options.ignoreDeclaration&&"?xml"===e.tagName||this.options.ignorePiTags);else{const t=new T(e.tagName);t.add(this.options.textNodeName,""),e.tagName!==e.tagExp&&e.attrExpPresent&&(t[":@"]=this.buildAttributesMap(e.tagExp,s,e.tagName)),this.addChild(n,t,s,r)}r=e.closeIndex+1}else if("!--"===t.substr(r+1,3)){const e=W(t,"--\x3e",r+4,"Comment is not closed.");if(this.options.commentPropName){const o=t.substring(r+4,e-2);i=this.saveTextToParentTag(i,n,s),n.add(this.options.commentPropName,[{[this.options.textNodeName]:o}])}r=e}else if("!D"===t.substr(r+1,2)){const e=w(t,r);this.docTypeEntities=e.entities,r=e.i}else if("!["===t.substr(r+1,2)){const e=W(t,"]]>",r,"CDATA is not closed.")-2,o=t.substring(r+9,e);i=this.saveTextToParentTag(i,n,s);let a=this.parseTextData(o,n.tagname,s,!0,!1,!0,!0);null==a&&(a=""),this.options.cdataPropName?n.add(this.options.cdataPropName,[{[this.options.textNodeName]:o}]):n.add(this.options.textNodeName,a),r=e+2}else{let o=z(t,r,this.options.removeNSPrefix),a=o.tagName;const l=o.rawTagName;let u=o.tagExp,h=o.attrExpPresent,d=o.closeIndex;this.options.transformTagName&&(a=this.options.transformTagName(a)),n&&i&&"!xml"!==n.tagname&&(i=this.saveTextToParentTag(i,n,s,!1));const f=n;f&&-1!==this.options.unpairedTags.indexOf(f.tagname)&&(n=this.tagsNodeStack.pop(),s=s.substring(0,s.lastIndexOf("."))),a!==e.tagname&&(s+=s?"."+a:a);const c=r;if(this.isItStopNode(this.options.stopNodes,s,a)){let e="";if(u.length>0&&u.lastIndexOf("/")===u.length-1)"/"===a[a.length-1]?(a=a.substr(0,a.length-1),s=s.substr(0,s.length-1),u=a):u=u.substr(0,u.length-1),r=o.closeIndex;else if(-1!==this.options.unpairedTags.indexOf(a))r=o.closeIndex;else{const n=this.readStopNodeData(t,l,d+1);if(!n)throw new Error(`Unexpected end of ${l}`);r=n.i,e=n.tagContent}const i=new T(a);a!==u&&h&&(i[":@"]=this.buildAttributesMap(u,s,a)),e&&(e=this.parseTextData(e,a,s,!0,h,!0,!0)),s=s.substr(0,s.lastIndexOf(".")),i.add(this.options.textNodeName,e),this.addChild(n,i,s,c)}else{if(u.length>0&&u.lastIndexOf("/")===u.length-1){"/"===a[a.length-1]?(a=a.substr(0,a.length-1),s=s.substr(0,s.length-1),u=a):u=u.substr(0,u.length-1),this.options.transformTagName&&(a=this.options.transformTagName(a));const t=new T(a);a!==u&&h&&(t[":@"]=this.buildAttributesMap(u,s,a)),this.addChild(n,t,s,c),s=s.substr(0,s.lastIndexOf("."))}else{const t=new T(a);this.tagsNodeStack.push(n),a!==u&&h&&(t[":@"]=this.buildAttributesMap(u,s,a)),this.addChild(n,t,s,c),n=t}i="",r=d}}else i+=t[r];return e.child};function Y(t,e,n,i){this.options.captureMetaData||(i=void 0);const s=this.options.updateTag(e.tagname,n,e[":@"]);!1===s||("string"==typeof s?(e.tagname=s,t.addChild(e,i)):t.addChild(e,i))}const R=function(t){if(this.options.processEntities){for(let e in this.docTypeEntities){const n=this.docTypeEntities[e];t=t.replace(n.regx,n.val)}for(let e in this.lastEntities){const n=this.lastEntities[e];t=t.replace(n.regex,n.val)}if(this.options.htmlEntities)for(let e in this.htmlEntities){const n=this.htmlEntities[e];t=t.replace(n.regex,n.val)}t=t.replace(this.ampEntity.regex,this.ampEntity.val)}return t};function q(t,e,n,i){return t&&(void 0===i&&(i=0===e.child.length),void 0!==(t=this.parseTextData(t,e.tagname,n,!1,!!e[":@"]&&0!==Object.keys(e[":@"]).length,i))&&""!==t&&e.add(this.options.textNodeName,t),t=""),t}function Z(t,e,n){const i="*."+n;for(const n in t){const s=t[n];if(i===s||e===s)return!0}return!1}function W(t,e,n,i){const s=t.indexOf(e,n);if(-1===s)throw new Error(i);return s+e.length-1}function z(t,e,n,i=">"){const s=function(t,e,n=">"){let i,s="";for(let r=e;r",n,`${e} is not closed`);if(t.substring(n+2,r).trim()===e&&(s--,0===s))return{tagContent:t.substring(i,n),i:r};n=r}else if("?"===t[n+1])n=W(t,"?>",n+1,"StopNode is not closed.");else if("!--"===t.substr(n+1,3))n=W(t,"--\x3e",n+3,"StopNode is not closed.");else if("!["===t.substr(n+1,2))n=W(t,"]]>",n,"StopNode is not closed.")-2;else{const i=z(t,n,">");i&&((i&&i.tagName)===e&&"/"!==i.tagExp[i.tagExp.length-1]&&s++,n=i.closeIndex)}}function H(t,e,n){if(e&&"string"==typeof t){const e=t.trim();return"true"===e||"false"!==e&&function(t,e={}){if(e=Object.assign({},V,e),!t||"string"!=typeof t)return t;let n=t.trim();if(void 0!==e.skipLike&&e.skipLike.test(n))return t;if("0"===t)return 0;if(e.hex&&j.test(n))return function(t){if(parseInt)return parseInt(t,16);if(Number.parseInt)return Number.parseInt(t,16);if(window&&window.parseInt)return window.parseInt(t,16);throw new Error("parseInt, Number.parseInt, window.parseInt are not supported")}(n);if(-1!==n.search(/.+[eE].+/))return function(t,e,n){if(!n.eNotation)return t;const i=e.match(M);if(i){let s=i[1]||"";const r=-1===i[3].indexOf("e")?"E":"e",o=i[2],a=s?t[o.length+1]===r:t[o.length]===r;return o.length>1&&a?t:1!==o.length||!i[3].startsWith(`.${r}`)&&i[3][0]!==r?n.leadingZeros&&!a?(e=(i[1]||"")+i[3],Number(e)):t:Number(e)}return t}(t,n,e);{const s=D.exec(n);if(s){const r=s[1]||"",o=s[2];let a=(i=s[3])&&-1!==i.indexOf(".")?("."===(i=i.replace(/0+$/,""))?i="0":"."===i[0]?i="0"+i:"."===i[i.length-1]&&(i=i.substring(0,i.length-1)),i):i;const l=r?"."===t[o.length+1]:"."===t[o.length];if(!e.leadingZeros&&(o.length>1||1===o.length&&!l))return t;{const i=Number(n),s=String(i);if(0===i||-0===i)return i;if(-1!==s.search(/[eE]/))return e.eNotation?i:t;if(-1!==n.indexOf("."))return"0"===s||s===a||s===`${r}${a}`?i:t;let l=o?a:n;return o?l===s||r+l===s?i:t:l===s||l===r+s?i:t}}return t}var i}(t,n)}return void 0!==t?t:""}const K=T.getMetaDataSymbol();function Q(t,e){return tt(t,e)}function tt(t,e,n){let i;const s={};for(let r=0;r0&&(s[e.textNodeName]=i):void 0!==i&&(s[e.textNodeName]=i),s}function et(t){const e=Object.keys(t);for(let t=0;t0&&(n="\n"),ot(t,e,"",n)}function ot(t,e,n,i){let s="",r=!1;for(let o=0;o`,r=!1;continue}if(l===e.commentPropName){s+=i+`\x3c!--${a[l][0][e.textNodeName]}--\x3e`,r=!0;continue}if("?"===l[0]){const t=lt(a[":@"],e),n="?xml"===l?"":i;let o=a[l][0][e.textNodeName];o=0!==o.length?" "+o:"",s+=n+`<${l}${o}${t}?>`,r=!0;continue}let h=i;""!==h&&(h+=e.indentBy);const d=i+`<${l}${lt(a[":@"],e)}`,f=ot(a[l],e,u,h);-1!==e.unpairedTags.indexOf(l)?e.suppressUnpairedNode?s+=d+">":s+=d+"/>":f&&0!==f.length||!e.suppressEmptyNode?f&&f.endsWith(">")?s+=d+`>${f}${i}`:(s+=d+">",f&&""!==i&&(f.includes("/>")||f.includes("`):s+=d+"/>",r=!0}return s}function at(t){const e=Object.keys(t);for(let n=0;n0&&e.processEntities)for(let n=0;n","g"),val:">"},{regex:new RegExp("<","g"),val:"<"},{regex:new RegExp("'","g"),val:"'"},{regex:new RegExp('"',"g"),val:"""}],processEntities:!0,stopNodes:[],oneListGroup:!1};function ft(t){this.options=Object.assign({},dt,t),!0===this.options.ignoreAttributes||this.options.attributesGroupName?this.isAttribute=function(){return!1}:(this.ignoreAttributesFn=_(this.options.ignoreAttributes),this.attrPrefixLen=this.options.attributeNamePrefix.length,this.isAttribute=gt),this.processTextOrObjNode=ct,this.options.format?(this.indentate=pt,this.tagEndChar=">\n",this.newLine="\n"):(this.indentate=function(){return""},this.tagEndChar=">",this.newLine="")}function ct(t,e,n,i){const s=this.j2x(t,n+1,i.concat(e));return void 0!==t[this.options.textNodeName]&&1===Object.keys(t).length?this.buildTextValNode(t[this.options.textNodeName],e,s.attrStr,n):this.buildObjectNode(s.val,e,s.attrStr,n)}function pt(t){return this.options.indentBy.repeat(t)}function gt(t){return!(!t.startsWith(this.options.attributeNamePrefix)||t===this.options.textNodeName)&&t.substr(this.attrPrefixLen)}ft.prototype.build=function(t){return this.options.preserveOrder?rt(t,this.options):(Array.isArray(t)&&this.options.arrayNodeName&&this.options.arrayNodeName.length>1&&(t={[this.options.arrayNodeName]:t}),this.j2x(t,0,[]).val)},ft.prototype.j2x=function(t,e,n){let i="",s="";const r=n.join(".");for(let o in t)if(Object.prototype.hasOwnProperty.call(t,o))if(void 0===t[o])this.isAttribute(o)&&(s+="");else if(null===t[o])this.isAttribute(o)||o===this.options.cdataPropName?s+="":"?"===o[0]?s+=this.indentate(e)+"<"+o+"?"+this.tagEndChar:s+=this.indentate(e)+"<"+o+"/"+this.tagEndChar;else if(t[o]instanceof Date)s+=this.buildTextValNode(t[o],o,"",e);else if("object"!=typeof t[o]){const n=this.isAttribute(o);if(n&&!this.ignoreAttributesFn(n,r))i+=this.buildAttrPairStr(n,""+t[o]);else if(!n)if(o===this.options.textNodeName){let e=this.options.tagValueProcessor(o,""+t[o]);s+=this.replaceEntitiesValue(e)}else s+=this.buildTextValNode(t[o],o,"",e)}else if(Array.isArray(t[o])){const i=t[o].length;let r="",a="";for(let l=0;l"+t+s}},ft.prototype.closeTag=function(t){let e="";return-1!==this.options.unpairedTags.indexOf(t)?this.options.suppressUnpairedNode||(e="/"):e=this.options.suppressEmptyNode?"/":`>`+this.newLine;if(!1!==this.options.commentPropName&&e===this.options.commentPropName)return this.indentate(i)+`\x3c!--${t}--\x3e`+this.newLine;if("?"===e[0])return this.indentate(i)+"<"+e+n+"?"+this.tagEndChar;{let s=this.options.tagValueProcessor(e,t);return s=this.replaceEntitiesValue(s),""===s?this.indentate(i)+"<"+e+n+this.closeTag(e)+this.tagEndChar:this.indentate(i)+"<"+e+n+">"+s+"0&&this.options.processEntities)for(let e=0;e { + +"use strict"; + +var __defProp = Object.defineProperty; +var __getOwnPropDesc = Object.getOwnPropertyDescriptor; +var __getOwnPropNames = Object.getOwnPropertyNames; +var __hasOwnProp = Object.prototype.hasOwnProperty; +var __export = (target, all) => { + for (var name in all) + __defProp(target, name, { get: all[name], enumerable: true }); +}; +var __copyProps = (to, from, except, desc) => { + if (from && typeof from === "object" || typeof from === "function") { + for (let key of __getOwnPropNames(from)) + if (!__hasOwnProp.call(to, key) && key !== except) + __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); + } + return to; +}; +var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + +// dist/index.js +var index_exports = {}; +__export(index_exports, { + TomlDate: () => TomlDate, + TomlError: () => TomlError, + default: () => index_default, + parse: () => parse, + stringify: () => stringify +}); +module.exports = __toCommonJS(index_exports); + +// dist/error.js +function getLineColFromPtr(string, ptr) { + let lines = string.slice(0, ptr).split(/\r\n|\n|\r/g); + return [lines.length, lines.pop().length + 1]; +} +function makeCodeBlock(string, line, column) { + let lines = string.split(/\r\n|\n|\r/g); + let codeblock = ""; + let numberLen = (Math.log10(line + 1) | 0) + 1; + for (let i = line - 1; i <= line + 1; i++) { + let l = lines[i - 1]; + if (!l) + continue; + codeblock += i.toString().padEnd(numberLen, " "); + codeblock += ": "; + codeblock += l; + codeblock += "\n"; + if (i === line) { + codeblock += " ".repeat(numberLen + column + 2); + codeblock += "^\n"; + } + } + return codeblock; +} +var TomlError = class extends Error { + line; + column; + codeblock; + constructor(message, options) { + const [line, column] = getLineColFromPtr(options.toml, options.ptr); + const codeblock = makeCodeBlock(options.toml, line, column); + super(`Invalid TOML document: ${message} + +${codeblock}`, options); + this.line = line; + this.column = column; + this.codeblock = codeblock; + } +}; + +// dist/util.js +function isEscaped(str, ptr) { + let i = 0; + while (str[ptr - ++i] === "\\") + ; + return --i && i % 2; +} +function indexOfNewline(str, start = 0, end = str.length) { + let idx = str.indexOf("\n", start); + if (str[idx - 1] === "\r") + idx--; + return idx <= end ? idx : -1; +} +function skipComment(str, ptr) { + for (let i = ptr; i < str.length; i++) { + let c = str[i]; + if (c === "\n") + return i; + if (c === "\r" && str[i + 1] === "\n") + return i + 1; + if (c < " " && c !== " " || c === "\x7F") { + throw new TomlError("control characters are not allowed in comments", { + toml: str, + ptr + }); + } + } + return str.length; +} +function skipVoid(str, ptr, banNewLines, banComments) { + let c; + while ((c = str[ptr]) === " " || c === " " || !banNewLines && (c === "\n" || c === "\r" && str[ptr + 1] === "\n")) + ptr++; + return banComments || c !== "#" ? ptr : skipVoid(str, skipComment(str, ptr), banNewLines); +} +function skipUntil(str, ptr, sep, end, banNewLines = false) { + if (!end) { + ptr = indexOfNewline(str, ptr); + return ptr < 0 ? str.length : ptr; + } + for (let i = ptr; i < str.length; i++) { + let c = str[i]; + if (c === "#") { + i = indexOfNewline(str, i); + } else if (c === sep) { + return i + 1; + } else if (c === end || banNewLines && (c === "\n" || c === "\r" && str[i + 1] === "\n")) { + return i; + } + } + throw new TomlError("cannot find end of structure", { + toml: str, + ptr + }); +} +function getStringEnd(str, seek) { + let first = str[seek]; + let target = first === str[seek + 1] && str[seek + 1] === str[seek + 2] ? str.slice(seek, seek + 3) : first; + seek += target.length - 1; + do + seek = str.indexOf(target, ++seek); + while (seek > -1 && first !== "'" && isEscaped(str, seek)); + if (seek > -1) { + seek += target.length; + if (target.length > 1) { + if (str[seek] === first) + seek++; + if (str[seek] === first) + seek++; + } + } + return seek; +} + +// dist/date.js +var DATE_TIME_RE = /^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i; +var TomlDate = class _TomlDate extends Date { + #hasDate = false; + #hasTime = false; + #offset = null; + constructor(date) { + let hasDate = true; + let hasTime = true; + let offset = "Z"; + if (typeof date === "string") { + let match = date.match(DATE_TIME_RE); + if (match) { + if (!match[1]) { + hasDate = false; + date = `0000-01-01T${date}`; + } + hasTime = !!match[2]; + hasTime && date[10] === " " && (date = date.replace(" ", "T")); + if (match[2] && +match[2] > 23) { + date = ""; + } else { + offset = match[3] || null; + date = date.toUpperCase(); + if (!offset && hasTime) + date += "Z"; + } + } else { + date = ""; + } + } + super(date); + if (!isNaN(this.getTime())) { + this.#hasDate = hasDate; + this.#hasTime = hasTime; + this.#offset = offset; + } + } + isDateTime() { + return this.#hasDate && this.#hasTime; + } + isLocal() { + return !this.#hasDate || !this.#hasTime || !this.#offset; + } + isDate() { + return this.#hasDate && !this.#hasTime; + } + isTime() { + return this.#hasTime && !this.#hasDate; + } + isValid() { + return this.#hasDate || this.#hasTime; + } + toISOString() { + let iso = super.toISOString(); + if (this.isDate()) + return iso.slice(0, 10); + if (this.isTime()) + return iso.slice(11, 23); + if (this.#offset === null) + return iso.slice(0, -1); + if (this.#offset === "Z") + return iso; + let offset = +this.#offset.slice(1, 3) * 60 + +this.#offset.slice(4, 6); + offset = this.#offset[0] === "-" ? offset : -offset; + let offsetDate = new Date(this.getTime() - offset * 6e4); + return offsetDate.toISOString().slice(0, -1) + this.#offset; + } + static wrapAsOffsetDateTime(jsDate, offset = "Z") { + let date = new _TomlDate(jsDate); + date.#offset = offset; + return date; + } + static wrapAsLocalDateTime(jsDate) { + let date = new _TomlDate(jsDate); + date.#offset = null; + return date; + } + static wrapAsLocalDate(jsDate) { + let date = new _TomlDate(jsDate); + date.#hasTime = false; + date.#offset = null; + return date; + } + static wrapAsLocalTime(jsDate) { + let date = new _TomlDate(jsDate); + date.#hasDate = false; + date.#offset = null; + return date; + } +}; + +// dist/primitive.js +var INT_REGEX = /^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/; +var FLOAT_REGEX = /^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/; +var LEADING_ZERO = /^[+-]?0[0-9_]/; +var ESCAPE_REGEX = /^[0-9a-f]{4,8}$/i; +var ESC_MAP = { + b: "\b", + t: " ", + n: "\n", + f: "\f", + r: "\r", + '"': '"', + "\\": "\\" +}; +function parseString(str, ptr = 0, endPtr = str.length) { + let isLiteral = str[ptr] === "'"; + let isMultiline = str[ptr++] === str[ptr] && str[ptr] === str[ptr + 1]; + if (isMultiline) { + endPtr -= 2; + if (str[ptr += 2] === "\r") + ptr++; + if (str[ptr] === "\n") + ptr++; + } + let tmp = 0; + let isEscape; + let parsed = ""; + let sliceStart = ptr; + while (ptr < endPtr - 1) { + let c = str[ptr++]; + if (c === "\n" || c === "\r" && str[ptr] === "\n") { + if (!isMultiline) { + throw new TomlError("newlines are not allowed in strings", { + toml: str, + ptr: ptr - 1 + }); + } + } else if (c < " " && c !== " " || c === "\x7F") { + throw new TomlError("control characters are not allowed in strings", { + toml: str, + ptr: ptr - 1 + }); + } + if (isEscape) { + isEscape = false; + if (c === "u" || c === "U") { + let code = str.slice(ptr, ptr += c === "u" ? 4 : 8); + if (!ESCAPE_REGEX.test(code)) { + throw new TomlError("invalid unicode escape", { + toml: str, + ptr: tmp + }); + } + try { + parsed += String.fromCodePoint(parseInt(code, 16)); + } catch { + throw new TomlError("invalid unicode escape", { + toml: str, + ptr: tmp + }); + } + } else if (isMultiline && (c === "\n" || c === " " || c === " " || c === "\r")) { + ptr = skipVoid(str, ptr - 1, true); + if (str[ptr] !== "\n" && str[ptr] !== "\r") { + throw new TomlError("invalid escape: only line-ending whitespace may be escaped", { + toml: str, + ptr: tmp + }); + } + ptr = skipVoid(str, ptr); + } else if (c in ESC_MAP) { + parsed += ESC_MAP[c]; + } else { + throw new TomlError("unrecognized escape sequence", { + toml: str, + ptr: tmp + }); + } + sliceStart = ptr; + } else if (!isLiteral && c === "\\") { + tmp = ptr - 1; + isEscape = true; + parsed += str.slice(sliceStart, tmp); + } + } + return parsed + str.slice(sliceStart, endPtr - 1); +} +function parseValue(value, toml, ptr, integersAsBigInt) { + if (value === "true") + return true; + if (value === "false") + return false; + if (value === "-inf") + return -Infinity; + if (value === "inf" || value === "+inf") + return Infinity; + if (value === "nan" || value === "+nan" || value === "-nan") + return NaN; + if (value === "-0") + return integersAsBigInt ? 0n : 0; + let isInt = INT_REGEX.test(value); + if (isInt || FLOAT_REGEX.test(value)) { + if (LEADING_ZERO.test(value)) { + throw new TomlError("leading zeroes are not allowed", { + toml, + ptr + }); + } + value = value.replace(/_/g, ""); + let numeric = +value; + if (isNaN(numeric)) { + throw new TomlError("invalid number", { + toml, + ptr + }); + } + if (isInt) { + if ((isInt = !Number.isSafeInteger(numeric)) && !integersAsBigInt) { + throw new TomlError("integer value cannot be represented losslessly", { + toml, + ptr + }); + } + if (isInt || integersAsBigInt === true) + numeric = BigInt(value); + } + return numeric; + } + const date = new TomlDate(value); + if (!date.isValid()) { + throw new TomlError("invalid value", { + toml, + ptr + }); + } + return date; +} + +// dist/extract.js +function sliceAndTrimEndOf(str, startPtr, endPtr, allowNewLines) { + let value = str.slice(startPtr, endPtr); + let commentIdx = value.indexOf("#"); + if (commentIdx > -1) { + skipComment(str, commentIdx); + value = value.slice(0, commentIdx); + } + let trimmed = value.trimEnd(); + if (!allowNewLines) { + let newlineIdx = value.indexOf("\n", trimmed.length); + if (newlineIdx > -1) { + throw new TomlError("newlines are not allowed in inline tables", { + toml: str, + ptr: startPtr + newlineIdx + }); + } + } + return [trimmed, commentIdx]; +} +function extractValue(str, ptr, end, depth, integersAsBigInt) { + if (depth === 0) { + throw new TomlError("document contains excessively nested structures. aborting.", { + toml: str, + ptr + }); + } + let c = str[ptr]; + if (c === "[" || c === "{") { + let [value, endPtr2] = c === "[" ? parseArray(str, ptr, depth, integersAsBigInt) : parseInlineTable(str, ptr, depth, integersAsBigInt); + let newPtr = end ? skipUntil(str, endPtr2, ",", end) : endPtr2; + if (endPtr2 - newPtr && end === "}") { + let nextNewLine = indexOfNewline(str, endPtr2, newPtr); + if (nextNewLine > -1) { + throw new TomlError("newlines are not allowed in inline tables", { + toml: str, + ptr: nextNewLine + }); + } + } + return [value, newPtr]; + } + let endPtr; + if (c === '"' || c === "'") { + endPtr = getStringEnd(str, ptr); + let parsed = parseString(str, ptr, endPtr); + if (end) { + endPtr = skipVoid(str, endPtr, end !== "]"); + if (str[endPtr] && str[endPtr] !== "," && str[endPtr] !== end && str[endPtr] !== "\n" && str[endPtr] !== "\r") { + throw new TomlError("unexpected character encountered", { + toml: str, + ptr: endPtr + }); + } + endPtr += +(str[endPtr] === ","); + } + return [parsed, endPtr]; + } + endPtr = skipUntil(str, ptr, ",", end); + let slice = sliceAndTrimEndOf(str, ptr, endPtr - +(str[endPtr - 1] === ","), end === "]"); + if (!slice[0]) { + throw new TomlError("incomplete key-value declaration: no value specified", { + toml: str, + ptr + }); + } + if (end && slice[1] > -1) { + endPtr = skipVoid(str, ptr + slice[1]); + endPtr += +(str[endPtr] === ","); + } + return [ + parseValue(slice[0], str, ptr, integersAsBigInt), + endPtr + ]; +} + +// dist/struct.js +var KEY_PART_RE = /^[a-zA-Z0-9-_]+[ \t]*$/; +function parseKey(str, ptr, end = "=") { + let dot = ptr - 1; + let parsed = []; + let endPtr = str.indexOf(end, ptr); + if (endPtr < 0) { + throw new TomlError("incomplete key-value: cannot find end of key", { + toml: str, + ptr + }); + } + do { + let c = str[ptr = ++dot]; + if (c !== " " && c !== " ") { + if (c === '"' || c === "'") { + if (c === str[ptr + 1] && c === str[ptr + 2]) { + throw new TomlError("multiline strings are not allowed in keys", { + toml: str, + ptr + }); + } + let eos = getStringEnd(str, ptr); + if (eos < 0) { + throw new TomlError("unfinished string encountered", { + toml: str, + ptr + }); + } + dot = str.indexOf(".", eos); + let strEnd = str.slice(eos, dot < 0 || dot > endPtr ? endPtr : dot); + let newLine = indexOfNewline(strEnd); + if (newLine > -1) { + throw new TomlError("newlines are not allowed in keys", { + toml: str, + ptr: ptr + dot + newLine + }); + } + if (strEnd.trimStart()) { + throw new TomlError("found extra tokens after the string part", { + toml: str, + ptr: eos + }); + } + if (endPtr < eos) { + endPtr = str.indexOf(end, eos); + if (endPtr < 0) { + throw new TomlError("incomplete key-value: cannot find end of key", { + toml: str, + ptr + }); + } + } + parsed.push(parseString(str, ptr, eos)); + } else { + dot = str.indexOf(".", ptr); + let part = str.slice(ptr, dot < 0 || dot > endPtr ? endPtr : dot); + if (!KEY_PART_RE.test(part)) { + throw new TomlError("only letter, numbers, dashes and underscores are allowed in keys", { + toml: str, + ptr + }); + } + parsed.push(part.trimEnd()); + } + } + } while (dot + 1 && dot < endPtr); + return [parsed, skipVoid(str, endPtr + 1, true, true)]; +} +function parseInlineTable(str, ptr, depth, integersAsBigInt) { + let res = {}; + let seen = /* @__PURE__ */ new Set(); + let c; + let comma = 0; + ptr++; + while ((c = str[ptr++]) !== "}" && c) { + let err = { toml: str, ptr: ptr - 1 }; + if (c === "\n") { + throw new TomlError("newlines are not allowed in inline tables", err); + } else if (c === "#") { + throw new TomlError("inline tables cannot contain comments", err); + } else if (c === ",") { + throw new TomlError("expected key-value, found comma", err); + } else if (c !== " " && c !== " ") { + let k; + let t = res; + let hasOwn = false; + let [key, keyEndPtr] = parseKey(str, ptr - 1); + for (let i = 0; i < key.length; i++) { + if (i) + t = hasOwn ? t[k] : t[k] = {}; + k = key[i]; + if ((hasOwn = Object.hasOwn(t, k)) && (typeof t[k] !== "object" || seen.has(t[k]))) { + throw new TomlError("trying to redefine an already defined value", { + toml: str, + ptr + }); + } + if (!hasOwn && k === "__proto__") { + Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); + } + } + if (hasOwn) { + throw new TomlError("trying to redefine an already defined value", { + toml: str, + ptr + }); + } + let [value, valueEndPtr] = extractValue(str, keyEndPtr, "}", depth - 1, integersAsBigInt); + seen.add(value); + t[k] = value; + ptr = valueEndPtr; + comma = str[ptr - 1] === "," ? ptr - 1 : 0; + } + } + if (comma) { + throw new TomlError("trailing commas are not allowed in inline tables", { + toml: str, + ptr: comma + }); + } + if (!c) { + throw new TomlError("unfinished table encountered", { + toml: str, + ptr + }); + } + return [res, ptr]; +} +function parseArray(str, ptr, depth, integersAsBigInt) { + let res = []; + let c; + ptr++; + while ((c = str[ptr++]) !== "]" && c) { + if (c === ",") { + throw new TomlError("expected value, found comma", { + toml: str, + ptr: ptr - 1 + }); + } else if (c === "#") + ptr = skipComment(str, ptr); + else if (c !== " " && c !== " " && c !== "\n" && c !== "\r") { + let e = extractValue(str, ptr - 1, "]", depth - 1, integersAsBigInt); + res.push(e[0]); + ptr = e[1]; + } + } + if (!c) { + throw new TomlError("unfinished array encountered", { + toml: str, + ptr + }); + } + return [res, ptr]; +} + +// dist/parse.js +function peekTable(key, table, meta, type) { + let t = table; + let m = meta; + let k; + let hasOwn = false; + let state; + for (let i = 0; i < key.length; i++) { + if (i) { + t = hasOwn ? t[k] : t[k] = {}; + m = (state = m[k]).c; + if (type === 0 && (state.t === 1 || state.t === 2)) { + return null; + } + if (state.t === 2) { + let l = t.length - 1; + t = t[l]; + m = m[l].c; + } + } + k = key[i]; + if ((hasOwn = Object.hasOwn(t, k)) && m[k]?.t === 0 && m[k]?.d) { + return null; + } + if (!hasOwn) { + if (k === "__proto__") { + Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); + Object.defineProperty(m, k, { enumerable: true, configurable: true, writable: true }); + } + m[k] = { + t: i < key.length - 1 && type === 2 ? 3 : type, + d: false, + i: 0, + c: {} + }; + } + } + state = m[k]; + if (state.t !== type && !(type === 1 && state.t === 3)) { + return null; + } + if (type === 2) { + if (!state.d) { + state.d = true; + t[k] = []; + } + t[k].push(t = {}); + state.c[state.i++] = state = { t: 1, d: false, i: 0, c: {} }; + } + if (state.d) { + return null; + } + state.d = true; + if (type === 1) { + t = hasOwn ? t[k] : t[k] = {}; + } else if (type === 0 && hasOwn) { + return null; + } + return [k, t, state.c]; +} +function parse(toml, { maxDepth = 1e3, integersAsBigInt } = {}) { + let res = {}; + let meta = {}; + let tbl = res; + let m = meta; + for (let ptr = skipVoid(toml, 0); ptr < toml.length; ) { + if (toml[ptr] === "[") { + let isTableArray = toml[++ptr] === "["; + let k = parseKey(toml, ptr += +isTableArray, "]"); + if (isTableArray) { + if (toml[k[1] - 1] !== "]") { + throw new TomlError("expected end of table declaration", { + toml, + ptr: k[1] - 1 + }); + } + k[1]++; + } + let p = peekTable( + k[0], + res, + meta, + isTableArray ? 2 : 1 + /* Type.EXPLICIT */ + ); + if (!p) { + throw new TomlError("trying to redefine an already defined table or value", { + toml, + ptr + }); + } + m = p[2]; + tbl = p[1]; + ptr = k[1]; + } else { + let k = parseKey(toml, ptr); + let p = peekTable( + k[0], + tbl, + m, + 0 + /* Type.DOTTED */ + ); + if (!p) { + throw new TomlError("trying to redefine an already defined table or value", { + toml, + ptr + }); + } + let v = extractValue(toml, k[1], void 0, maxDepth, integersAsBigInt); + p[1][p[0]] = v[0]; + ptr = v[1]; + } + ptr = skipVoid(toml, ptr, true); + if (toml[ptr] && toml[ptr] !== "\n" && toml[ptr] !== "\r") { + throw new TomlError("each key-value declaration must be followed by an end-of-line", { + toml, + ptr + }); + } + ptr = skipVoid(toml, ptr); + } + return res; +} + +// dist/stringify.js +var BARE_KEY = /^[a-z0-9-_]+$/i; +function extendedTypeOf(obj) { + let type = typeof obj; + if (type === "object") { + if (Array.isArray(obj)) + return "array"; + if (obj instanceof Date) + return "date"; + } + return type; +} +function isArrayOfTables(obj) { + for (let i = 0; i < obj.length; i++) { + if (extendedTypeOf(obj[i]) !== "object") + return false; + } + return obj.length != 0; +} +function formatString(s) { + return JSON.stringify(s).replace(/\x7f/g, "\\u007f"); +} +function stringifyValue(val, type, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + if (type === "number") { + if (isNaN(val)) + return "nan"; + if (val === Infinity) + return "inf"; + if (val === -Infinity) + return "-inf"; + if (numberAsFloat && Number.isInteger(val)) + return val.toFixed(1); + return val.toString(); + } + if (type === "bigint" || type === "boolean") { + return val.toString(); + } + if (type === "string") { + return formatString(val); + } + if (type === "date") { + if (isNaN(val.getTime())) { + throw new TypeError("cannot serialize invalid date"); + } + return val.toISOString(); + } + if (type === "object") { + return stringifyInlineTable(val, depth, numberAsFloat); + } + if (type === "array") { + return stringifyArray(val, depth, numberAsFloat); + } +} +function stringifyInlineTable(obj, depth, numberAsFloat) { + let keys = Object.keys(obj); + if (keys.length === 0) + return "{}"; + let res = "{ "; + for (let i = 0; i < keys.length; i++) { + let k = keys[i]; + if (i) + res += ", "; + res += BARE_KEY.test(k) ? k : formatString(k); + res += " = "; + res += stringifyValue(obj[k], extendedTypeOf(obj[k]), depth - 1, numberAsFloat); + } + return res + " }"; +} +function stringifyArray(array, depth, numberAsFloat) { + if (array.length === 0) + return "[]"; + let res = "[ "; + for (let i = 0; i < array.length; i++) { + if (i) + res += ", "; + if (array[i] === null || array[i] === void 0) { + throw new TypeError("arrays cannot contain null or undefined values"); + } + res += stringifyValue(array[i], extendedTypeOf(array[i]), depth - 1, numberAsFloat); + } + return res + " ]"; +} +function stringifyArrayTable(array, key, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + let res = ""; + for (let i = 0; i < array.length; i++) { + res += `[[${key}]] +`; + res += stringifyTable(array[i], key, depth, numberAsFloat); + res += "\n\n"; + } + return res; +} +function stringifyTable(obj, prefix, depth, numberAsFloat) { + if (depth === 0) { + throw new Error("Could not stringify the object: maximum object depth exceeded"); + } + let preamble = ""; + let tables = ""; + let keys = Object.keys(obj); + for (let i = 0; i < keys.length; i++) { + let k = keys[i]; + if (obj[k] !== null && obj[k] !== void 0) { + let type = extendedTypeOf(obj[k]); + if (type === "symbol" || type === "function") { + throw new TypeError(`cannot serialize values of type '${type}'`); + } + let key = BARE_KEY.test(k) ? k : formatString(k); + if (type === "array" && isArrayOfTables(obj[k])) { + tables += stringifyArrayTable(obj[k], prefix ? `${prefix}.${key}` : key, depth - 1, numberAsFloat); + } else if (type === "object") { + let tblKey = prefix ? `${prefix}.${key}` : key; + tables += `[${tblKey}] +`; + tables += stringifyTable(obj[k], tblKey, depth - 1, numberAsFloat); + tables += "\n\n"; + } else { + preamble += key; + preamble += " = "; + preamble += stringifyValue(obj[k], type, depth, numberAsFloat); + preamble += "\n"; + } + } + } + return `${preamble} +${tables}`.trim(); +} +function stringify(obj, { maxDepth = 1e3, numbersAsFloat = false } = {}) { + if (extendedTypeOf(obj) !== "object") { + throw new TypeError("stringify can only be called with an object"); + } + return stringifyTable(obj, "", maxDepth, numbersAsFloat); +} + +// dist/index.js +var index_default = { parse, stringify, TomlDate, TomlError }; +// Annotate the CommonJS export names for ESM import in node: +0 && (0); +/*! + * Copyright (c) Squirrel Chat et al., All rights reserved. + * SPDX-License-Identifier: BSD-3-Clause + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions are met: + * + * 1. Redistributions of source code must retain the above copyright notice, this + * list of conditions and the following disclaimer. + * 2. Redistributions in binary form must reproduce the above copyright notice, + * this list of conditions and the following disclaimer in the + * documentation and/or other materials provided with the distribution. + * 3. Neither the name of the copyright holder nor the names of its contributors + * may be used to endorse or promote products derived from this software without + * specific prior written permission. + * + * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND + * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE + * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL + * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR + * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER + * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE + * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + */ + + /***/ }), /***/ 64012: @@ -147409,2037 +149374,17 @@ module.exports = /*#__PURE__*/JSON.parse('[[[0,44],"disallowed_STD3_valid"],[[45 /******/ } /******/ /************************************************************************/ -/******/ /* webpack/runtime/compat get default export */ -/******/ (() => { -/******/ // getDefaultExport function for compatibility with non-harmony modules -/******/ __nccwpck_require__.n = (module) => { -/******/ var getter = module && module.__esModule ? -/******/ () => (module['default']) : -/******/ () => (module); -/******/ __nccwpck_require__.d(getter, { a: getter }); -/******/ return getter; -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/define property getters */ -/******/ (() => { -/******/ // define getter functions for harmony exports -/******/ __nccwpck_require__.d = (exports, definition) => { -/******/ for(var key in definition) { -/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { -/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); -/******/ } -/******/ } -/******/ }; -/******/ })(); -/******/ -/******/ /* webpack/runtime/hasOwnProperty shorthand */ -/******/ (() => { -/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) -/******/ })(); -/******/ /******/ /* webpack/runtime/compat */ /******/ /******/ if (typeof __nccwpck_require__ !== 'undefined') __nccwpck_require__.ab = __dirname + "/"; /******/ /************************************************************************/ -var __webpack_exports__ = {}; -// This entry need to be wrapped in an IIFE because it need to be in strict mode. -(() => { -"use strict"; - -// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js -var core = __nccwpck_require__(37484); -// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js -var exec = __nccwpck_require__(95236); -// EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js -var io = __nccwpck_require__(94994); -// EXTERNAL MODULE: external "fs" -var external_fs_ = __nccwpck_require__(79896); -var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_); -// EXTERNAL MODULE: external "path" -var external_path_ = __nccwpck_require__(16928); -var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_); -// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js -var glob = __nccwpck_require__(47206); -// EXTERNAL MODULE: external "crypto" -var external_crypto_ = __nccwpck_require__(76982); -var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_); -// EXTERNAL MODULE: external "fs/promises" -var promises_ = __nccwpck_require__(91943); -var promises_default = /*#__PURE__*/__nccwpck_require__.n(promises_); -// EXTERNAL MODULE: external "os" -var external_os_ = __nccwpck_require__(70857); -var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/error.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -function getLineColFromPtr(string, ptr) { - let lines = string.slice(0, ptr).split(/\r\n|\n|\r/g); - return [lines.length, lines.pop().length + 1]; -} -function makeCodeBlock(string, line, column) { - let lines = string.split(/\r\n|\n|\r/g); - let codeblock = ''; - let numberLen = (Math.log10(line + 1) | 0) + 1; - for (let i = line - 1; i <= line + 1; i++) { - let l = lines[i - 1]; - if (!l) - continue; - codeblock += i.toString().padEnd(numberLen, ' '); - codeblock += ': '; - codeblock += l; - codeblock += '\n'; - if (i === line) { - codeblock += ' '.repeat(numberLen + column + 2); - codeblock += '^\n'; - } - } - return codeblock; -} -class TomlError extends Error { - line; - column; - codeblock; - constructor(message, options) { - const [line, column] = getLineColFromPtr(options.toml, options.ptr); - const codeblock = makeCodeBlock(options.toml, line, column); - super(`Invalid TOML document: ${message}\n\n${codeblock}`, options); - this.line = line; - this.column = column; - this.codeblock = codeblock; - } -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/util.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - -function isEscaped(str, ptr) { - let i = 0; - while (str[ptr - ++i] === '\\') - ; - return --i && (i % 2); -} -function indexOfNewline(str, start = 0, end = str.length) { - let idx = str.indexOf('\n', start); - if (str[idx - 1] === '\r') - idx--; - return idx <= end ? idx : -1; -} -function skipComment(str, ptr) { - for (let i = ptr; i < str.length; i++) { - let c = str[i]; - if (c === '\n') - return i; - if (c === '\r' && str[i + 1] === '\n') - return i + 1; - if ((c < '\x20' && c !== '\t') || c === '\x7f') { - throw new TomlError('control characters are not allowed in comments', { - toml: str, - ptr: ptr, - }); - } - } - return str.length; -} -function skipVoid(str, ptr, banNewLines, banComments) { - let c; - while ((c = str[ptr]) === ' ' || c === '\t' || (!banNewLines && (c === '\n' || c === '\r' && str[ptr + 1] === '\n'))) - ptr++; - return banComments || c !== '#' - ? ptr - : skipVoid(str, skipComment(str, ptr), banNewLines); -} -function skipUntil(str, ptr, sep, end, banNewLines = false) { - if (!end) { - ptr = indexOfNewline(str, ptr); - return ptr < 0 ? str.length : ptr; - } - for (let i = ptr; i < str.length; i++) { - let c = str[i]; - if (c === '#') { - i = indexOfNewline(str, i); - } - else if (c === sep) { - return i + 1; - } - else if (c === end || (banNewLines && (c === '\n' || (c === '\r' && str[i + 1] === '\n')))) { - return i; - } - } - throw new TomlError('cannot find end of structure', { - toml: str, - ptr: ptr - }); -} -function getStringEnd(str, seek) { - let first = str[seek]; - let target = first === str[seek + 1] && str[seek + 1] === str[seek + 2] - ? str.slice(seek, seek + 3) - : first; - seek += target.length - 1; - do - seek = str.indexOf(target, ++seek); - while (seek > -1 && first !== "'" && isEscaped(str, seek)); - if (seek > -1) { - seek += target.length; - if (target.length > 1) { - if (str[seek] === first) - seek++; - if (str[seek] === first) - seek++; - } - } - return seek; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/date.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -let DATE_TIME_RE = /^(\d{4}-\d{2}-\d{2})?[T ]?(?:(\d{2}):\d{2}:\d{2}(?:\.\d+)?)?(Z|[-+]\d{2}:\d{2})?$/i; -class TomlDate extends Date { - #hasDate = false; - #hasTime = false; - #offset = null; - constructor(date) { - let hasDate = true; - let hasTime = true; - let offset = 'Z'; - if (typeof date === 'string') { - let match = date.match(DATE_TIME_RE); - if (match) { - if (!match[1]) { - hasDate = false; - date = `0000-01-01T${date}`; - } - hasTime = !!match[2]; - // Make sure to use T instead of a space. Breaks in case of extreme values otherwise. - hasTime && date[10] === ' ' && (date = date.replace(' ', 'T')); - // Do not allow rollover hours. - if (match[2] && +match[2] > 23) { - date = ''; - } - else { - offset = match[3] || null; - date = date.toUpperCase(); - if (!offset && hasTime) - date += 'Z'; - } - } - else { - date = ''; - } - } - super(date); - if (!isNaN(this.getTime())) { - this.#hasDate = hasDate; - this.#hasTime = hasTime; - this.#offset = offset; - } - } - isDateTime() { - return this.#hasDate && this.#hasTime; - } - isLocal() { - return !this.#hasDate || !this.#hasTime || !this.#offset; - } - isDate() { - return this.#hasDate && !this.#hasTime; - } - isTime() { - return this.#hasTime && !this.#hasDate; - } - isValid() { - return this.#hasDate || this.#hasTime; - } - toISOString() { - let iso = super.toISOString(); - // Local Date - if (this.isDate()) - return iso.slice(0, 10); - // Local Time - if (this.isTime()) - return iso.slice(11, 23); - // Local DateTime - if (this.#offset === null) - return iso.slice(0, -1); - // Offset DateTime - if (this.#offset === 'Z') - return iso; - // This part is quite annoying: JS strips the original timezone from the ISO string representation - // Instead of using a "modified" date and "Z", we restore the representation "as authored" - let offset = (+(this.#offset.slice(1, 3)) * 60) + +(this.#offset.slice(4, 6)); - offset = this.#offset[0] === '-' ? offset : -offset; - let offsetDate = new Date(this.getTime() - (offset * 60e3)); - return offsetDate.toISOString().slice(0, -1) + this.#offset; - } - static wrapAsOffsetDateTime(jsDate, offset = 'Z') { - let date = new TomlDate(jsDate); - date.#offset = offset; - return date; - } - static wrapAsLocalDateTime(jsDate) { - let date = new TomlDate(jsDate); - date.#offset = null; - return date; - } - static wrapAsLocalDate(jsDate) { - let date = new TomlDate(jsDate); - date.#hasTime = false; - date.#offset = null; - return date; - } - static wrapAsLocalTime(jsDate) { - let date = new TomlDate(jsDate); - date.#hasDate = false; - date.#offset = null; - return date; - } -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/primitive.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - -let INT_REGEX = /^((0x[0-9a-fA-F](_?[0-9a-fA-F])*)|(([+-]|0[ob])?\d(_?\d)*))$/; -let FLOAT_REGEX = /^[+-]?\d(_?\d)*(\.\d(_?\d)*)?([eE][+-]?\d(_?\d)*)?$/; -let LEADING_ZERO = /^[+-]?0[0-9_]/; -let ESCAPE_REGEX = /^[0-9a-f]{4,8}$/i; -let ESC_MAP = { - b: '\b', - t: '\t', - n: '\n', - f: '\f', - r: '\r', - '"': '"', - '\\': '\\', -}; -function parseString(str, ptr = 0, endPtr = str.length) { - let isLiteral = str[ptr] === '\''; - let isMultiline = str[ptr++] === str[ptr] && str[ptr] === str[ptr + 1]; - if (isMultiline) { - endPtr -= 2; - if (str[ptr += 2] === '\r') - ptr++; - if (str[ptr] === '\n') - ptr++; - } - let tmp = 0; - let isEscape; - let parsed = ''; - let sliceStart = ptr; - while (ptr < endPtr - 1) { - let c = str[ptr++]; - if (c === '\n' || (c === '\r' && str[ptr] === '\n')) { - if (!isMultiline) { - throw new TomlError('newlines are not allowed in strings', { - toml: str, - ptr: ptr - 1, - }); - } - } - else if ((c < '\x20' && c !== '\t') || c === '\x7f') { - throw new TomlError('control characters are not allowed in strings', { - toml: str, - ptr: ptr - 1, - }); - } - if (isEscape) { - isEscape = false; - if (c === 'u' || c === 'U') { - // Unicode escape - let code = str.slice(ptr, (ptr += (c === 'u' ? 4 : 8))); - if (!ESCAPE_REGEX.test(code)) { - throw new TomlError('invalid unicode escape', { - toml: str, - ptr: tmp, - }); - } - try { - parsed += String.fromCodePoint(parseInt(code, 16)); - } - catch { - throw new TomlError('invalid unicode escape', { - toml: str, - ptr: tmp, - }); - } - } - else if (isMultiline && (c === '\n' || c === ' ' || c === '\t' || c === '\r')) { - // Multiline escape - ptr = skipVoid(str, ptr - 1, true); - if (str[ptr] !== '\n' && str[ptr] !== '\r') { - throw new TomlError('invalid escape: only line-ending whitespace may be escaped', { - toml: str, - ptr: tmp, - }); - } - ptr = skipVoid(str, ptr); - } - else if (c in ESC_MAP) { - // Classic escape - parsed += ESC_MAP[c]; - } - else { - throw new TomlError('unrecognized escape sequence', { - toml: str, - ptr: tmp, - }); - } - sliceStart = ptr; - } - else if (!isLiteral && c === '\\') { - tmp = ptr - 1; - isEscape = true; - parsed += str.slice(sliceStart, tmp); - } - } - return parsed + str.slice(sliceStart, endPtr - 1); -} -function parseValue(value, toml, ptr, integersAsBigInt) { - // Constant values - if (value === 'true') - return true; - if (value === 'false') - return false; - if (value === '-inf') - return -Infinity; - if (value === 'inf' || value === '+inf') - return Infinity; - if (value === 'nan' || value === '+nan' || value === '-nan') - return NaN; - // Avoid FP representation of -0 - if (value === '-0') - return integersAsBigInt ? 0n : 0; - // Numbers - let isInt = INT_REGEX.test(value); - if (isInt || FLOAT_REGEX.test(value)) { - if (LEADING_ZERO.test(value)) { - throw new TomlError('leading zeroes are not allowed', { - toml: toml, - ptr: ptr, - }); - } - value = value.replace(/_/g, ''); - let numeric = +value; - if (isNaN(numeric)) { - throw new TomlError('invalid number', { - toml: toml, - ptr: ptr, - }); - } - if (isInt) { - if ((isInt = !Number.isSafeInteger(numeric)) && !integersAsBigInt) { - throw new TomlError('integer value cannot be represented losslessly', { - toml: toml, - ptr: ptr, - }); - } - if (isInt || integersAsBigInt === true) - numeric = BigInt(value); - } - return numeric; - } - const date = new TomlDate(value); - if (!date.isValid()) { - throw new TomlError('invalid value', { - toml: toml, - ptr: ptr, - }); - } - return date; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/extract.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -function sliceAndTrimEndOf(str, startPtr, endPtr, allowNewLines) { - let value = str.slice(startPtr, endPtr); - let commentIdx = value.indexOf('#'); - if (commentIdx > -1) { - // The call to skipComment allows to "validate" the comment - // (absence of control characters) - skipComment(str, commentIdx); - value = value.slice(0, commentIdx); - } - let trimmed = value.trimEnd(); - if (!allowNewLines) { - let newlineIdx = value.indexOf('\n', trimmed.length); - if (newlineIdx > -1) { - throw new TomlError('newlines are not allowed in inline tables', { - toml: str, - ptr: startPtr + newlineIdx - }); - } - } - return [trimmed, commentIdx]; -} -function extractValue(str, ptr, end, depth, integersAsBigInt) { - if (depth === 0) { - throw new TomlError('document contains excessively nested structures. aborting.', { - toml: str, - ptr: ptr - }); - } - let c = str[ptr]; - if (c === '[' || c === '{') { - let [value, endPtr] = c === '[' - ? parseArray(str, ptr, depth, integersAsBigInt) - : parseInlineTable(str, ptr, depth, integersAsBigInt); - let newPtr = end ? skipUntil(str, endPtr, ',', end) : endPtr; - if (endPtr - newPtr && end === '}') { - let nextNewLine = indexOfNewline(str, endPtr, newPtr); - if (nextNewLine > -1) { - throw new TomlError('newlines are not allowed in inline tables', { - toml: str, - ptr: nextNewLine - }); - } - } - return [value, newPtr]; - } - let endPtr; - if (c === '"' || c === "'") { - endPtr = getStringEnd(str, ptr); - let parsed = parseString(str, ptr, endPtr); - if (end) { - endPtr = skipVoid(str, endPtr, end !== ']'); - if (str[endPtr] && str[endPtr] !== ',' && str[endPtr] !== end && str[endPtr] !== '\n' && str[endPtr] !== '\r') { - throw new TomlError('unexpected character encountered', { - toml: str, - ptr: endPtr, - }); - } - endPtr += (+(str[endPtr] === ',')); - } - return [parsed, endPtr]; - } - endPtr = skipUntil(str, ptr, ',', end); - let slice = sliceAndTrimEndOf(str, ptr, endPtr - (+(str[endPtr - 1] === ',')), end === ']'); - if (!slice[0]) { - throw new TomlError('incomplete key-value declaration: no value specified', { - toml: str, - ptr: ptr - }); - } - if (end && slice[1] > -1) { - endPtr = skipVoid(str, ptr + slice[1]); - endPtr += +(str[endPtr] === ','); - } - return [ - parseValue(slice[0], str, ptr, integersAsBigInt), - endPtr, - ]; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/struct.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -let KEY_PART_RE = /^[a-zA-Z0-9-_]+[ \t]*$/; -function parseKey(str, ptr, end = '=') { - let dot = ptr - 1; - let parsed = []; - let endPtr = str.indexOf(end, ptr); - if (endPtr < 0) { - throw new TomlError('incomplete key-value: cannot find end of key', { - toml: str, - ptr: ptr, - }); - } - do { - let c = str[ptr = ++dot]; - // If it's whitespace, ignore - if (c !== ' ' && c !== '\t') { - // If it's a string - if (c === '"' || c === '\'') { - if (c === str[ptr + 1] && c === str[ptr + 2]) { - throw new TomlError('multiline strings are not allowed in keys', { - toml: str, - ptr: ptr, - }); - } - let eos = getStringEnd(str, ptr); - if (eos < 0) { - throw new TomlError('unfinished string encountered', { - toml: str, - ptr: ptr, - }); - } - dot = str.indexOf('.', eos); - let strEnd = str.slice(eos, dot < 0 || dot > endPtr ? endPtr : dot); - let newLine = indexOfNewline(strEnd); - if (newLine > -1) { - throw new TomlError('newlines are not allowed in keys', { - toml: str, - ptr: ptr + dot + newLine, - }); - } - if (strEnd.trimStart()) { - throw new TomlError('found extra tokens after the string part', { - toml: str, - ptr: eos, - }); - } - if (endPtr < eos) { - endPtr = str.indexOf(end, eos); - if (endPtr < 0) { - throw new TomlError('incomplete key-value: cannot find end of key', { - toml: str, - ptr: ptr, - }); - } - } - parsed.push(parseString(str, ptr, eos)); - } - else { - // Normal raw key part consumption and validation - dot = str.indexOf('.', ptr); - let part = str.slice(ptr, dot < 0 || dot > endPtr ? endPtr : dot); - if (!KEY_PART_RE.test(part)) { - throw new TomlError('only letter, numbers, dashes and underscores are allowed in keys', { - toml: str, - ptr: ptr, - }); - } - parsed.push(part.trimEnd()); - } - } - // Until there's no more dot - } while (dot + 1 && dot < endPtr); - return [parsed, skipVoid(str, endPtr + 1, true, true)]; -} -function parseInlineTable(str, ptr, depth, integersAsBigInt) { - let res = {}; - let seen = new Set(); - let c; - let comma = 0; - ptr++; - while ((c = str[ptr++]) !== '}' && c) { - let err = { toml: str, ptr: ptr - 1 }; - if (c === '\n') { - throw new TomlError('newlines are not allowed in inline tables', err); - } - else if (c === '#') { - throw new TomlError('inline tables cannot contain comments', err); - } - else if (c === ',') { - throw new TomlError('expected key-value, found comma', err); - } - else if (c !== ' ' && c !== '\t') { - let k; - let t = res; - let hasOwn = false; - let [key, keyEndPtr] = parseKey(str, ptr - 1); - for (let i = 0; i < key.length; i++) { - if (i) - t = hasOwn ? t[k] : (t[k] = {}); - k = key[i]; - if ((hasOwn = Object.hasOwn(t, k)) && (typeof t[k] !== 'object' || seen.has(t[k]))) { - throw new TomlError('trying to redefine an already defined value', { - toml: str, - ptr: ptr, - }); - } - if (!hasOwn && k === '__proto__') { - Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); - } - } - if (hasOwn) { - throw new TomlError('trying to redefine an already defined value', { - toml: str, - ptr: ptr, - }); - } - let [value, valueEndPtr] = extractValue(str, keyEndPtr, '}', depth - 1, integersAsBigInt); - seen.add(value); - t[k] = value; - ptr = valueEndPtr; - comma = str[ptr - 1] === ',' ? ptr - 1 : 0; - } - } - if (comma) { - throw new TomlError('trailing commas are not allowed in inline tables', { - toml: str, - ptr: comma, - }); - } - if (!c) { - throw new TomlError('unfinished table encountered', { - toml: str, - ptr: ptr, - }); - } - return [res, ptr]; -} -function parseArray(str, ptr, depth, integersAsBigInt) { - let res = []; - let c; - ptr++; - while ((c = str[ptr++]) !== ']' && c) { - if (c === ',') { - throw new TomlError('expected value, found comma', { - toml: str, - ptr: ptr - 1, - }); - } - else if (c === '#') - ptr = skipComment(str, ptr); - else if (c !== ' ' && c !== '\t' && c !== '\n' && c !== '\r') { - let e = extractValue(str, ptr - 1, ']', depth - 1, integersAsBigInt); - res.push(e[0]); - ptr = e[1]; - } - } - if (!c) { - throw new TomlError('unfinished array encountered', { - toml: str, - ptr: ptr, - }); - } - return [res, ptr]; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/parse.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -function peekTable(key, table, meta, type) { - let t = table; - let m = meta; - let k; - let hasOwn = false; - let state; - for (let i = 0; i < key.length; i++) { - if (i) { - t = hasOwn ? t[k] : (t[k] = {}); - m = (state = m[k]).c; - if (type === 0 /* Type.DOTTED */ && (state.t === 1 /* Type.EXPLICIT */ || state.t === 2 /* Type.ARRAY */)) { - return null; - } - if (state.t === 2 /* Type.ARRAY */) { - let l = t.length - 1; - t = t[l]; - m = m[l].c; - } - } - k = key[i]; - if ((hasOwn = Object.hasOwn(t, k)) && m[k]?.t === 0 /* Type.DOTTED */ && m[k]?.d) { - return null; - } - if (!hasOwn) { - if (k === '__proto__') { - Object.defineProperty(t, k, { enumerable: true, configurable: true, writable: true }); - Object.defineProperty(m, k, { enumerable: true, configurable: true, writable: true }); - } - m[k] = { - t: i < key.length - 1 && type === 2 /* Type.ARRAY */ - ? 3 /* Type.ARRAY_DOTTED */ - : type, - d: false, - i: 0, - c: {}, - }; - } - } - state = m[k]; - if (state.t !== type && !(type === 1 /* Type.EXPLICIT */ && state.t === 3 /* Type.ARRAY_DOTTED */)) { - // Bad key type! - return null; - } - if (type === 2 /* Type.ARRAY */) { - if (!state.d) { - state.d = true; - t[k] = []; - } - t[k].push(t = {}); - state.c[state.i++] = (state = { t: 1 /* Type.EXPLICIT */, d: false, i: 0, c: {} }); - } - if (state.d) { - // Redefining a table! - return null; - } - state.d = true; - if (type === 1 /* Type.EXPLICIT */) { - t = hasOwn ? t[k] : (t[k] = {}); - } - else if (type === 0 /* Type.DOTTED */ && hasOwn) { - return null; - } - return [k, t, state.c]; -} -function parse(toml, { maxDepth = 1000, integersAsBigInt } = {}) { - let res = {}; - let meta = {}; - let tbl = res; - let m = meta; - for (let ptr = skipVoid(toml, 0); ptr < toml.length;) { - if (toml[ptr] === '[') { - let isTableArray = toml[++ptr] === '['; - let k = parseKey(toml, ptr += +isTableArray, ']'); - if (isTableArray) { - if (toml[k[1] - 1] !== ']') { - throw new TomlError('expected end of table declaration', { - toml: toml, - ptr: k[1] - 1, - }); - } - k[1]++; - } - let p = peekTable(k[0], res, meta, isTableArray ? 2 /* Type.ARRAY */ : 1 /* Type.EXPLICIT */); - if (!p) { - throw new TomlError('trying to redefine an already defined table or value', { - toml: toml, - ptr: ptr, - }); - } - m = p[2]; - tbl = p[1]; - ptr = k[1]; - } - else { - let k = parseKey(toml, ptr); - let p = peekTable(k[0], tbl, m, 0 /* Type.DOTTED */); - if (!p) { - throw new TomlError('trying to redefine an already defined table or value', { - toml: toml, - ptr: ptr, - }); - } - let v = extractValue(toml, k[1], void 0, maxDepth, integersAsBigInt); - p[1][p[0]] = v[0]; - ptr = v[1]; - } - ptr = skipVoid(toml, ptr, true); - if (toml[ptr] && toml[ptr] !== '\n' && toml[ptr] !== '\r') { - throw new TomlError('each key-value declaration must be followed by an end-of-line', { - toml: toml, - ptr: ptr - }); - } - ptr = skipVoid(toml, ptr); - } - return res; -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/stringify.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ -let BARE_KEY = /^[a-z0-9-_]+$/i; -function extendedTypeOf(obj) { - let type = typeof obj; - if (type === 'object') { - if (Array.isArray(obj)) - return 'array'; - if (obj instanceof Date) - return 'date'; - } - return type; -} -function isArrayOfTables(obj) { - for (let i = 0; i < obj.length; i++) { - if (extendedTypeOf(obj[i]) !== 'object') - return false; - } - return obj.length != 0; -} -function formatString(s) { - return JSON.stringify(s).replace(/\x7f/g, '\\u007f'); -} -function stringifyValue(val, type, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - if (type === 'number') { - if (isNaN(val)) - return 'nan'; - if (val === Infinity) - return 'inf'; - if (val === -Infinity) - return '-inf'; - if (numberAsFloat && Number.isInteger(val)) - return val.toFixed(1); - return val.toString(); - } - if (type === 'bigint' || type === 'boolean') { - return val.toString(); - } - if (type === 'string') { - return formatString(val); - } - if (type === 'date') { - if (isNaN(val.getTime())) { - throw new TypeError('cannot serialize invalid date'); - } - return val.toISOString(); - } - if (type === 'object') { - return stringifyInlineTable(val, depth, numberAsFloat); - } - if (type === 'array') { - return stringifyArray(val, depth, numberAsFloat); - } -} -function stringifyInlineTable(obj, depth, numberAsFloat) { - let keys = Object.keys(obj); - if (keys.length === 0) - return '{}'; - let res = '{ '; - for (let i = 0; i < keys.length; i++) { - let k = keys[i]; - if (i) - res += ', '; - res += BARE_KEY.test(k) ? k : formatString(k); - res += ' = '; - res += stringifyValue(obj[k], extendedTypeOf(obj[k]), depth - 1, numberAsFloat); - } - return res + ' }'; -} -function stringifyArray(array, depth, numberAsFloat) { - if (array.length === 0) - return '[]'; - let res = '[ '; - for (let i = 0; i < array.length; i++) { - if (i) - res += ', '; - if (array[i] === null || array[i] === void 0) { - throw new TypeError('arrays cannot contain null or undefined values'); - } - res += stringifyValue(array[i], extendedTypeOf(array[i]), depth - 1, numberAsFloat); - } - return res + ' ]'; -} -function stringifyArrayTable(array, key, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - let res = ''; - for (let i = 0; i < array.length; i++) { - res += `[[${key}]]\n`; - res += stringifyTable(array[i], key, depth, numberAsFloat); - res += '\n\n'; - } - return res; -} -function stringifyTable(obj, prefix, depth, numberAsFloat) { - if (depth === 0) { - throw new Error('Could not stringify the object: maximum object depth exceeded'); - } - let preamble = ''; - let tables = ''; - let keys = Object.keys(obj); - for (let i = 0; i < keys.length; i++) { - let k = keys[i]; - if (obj[k] !== null && obj[k] !== void 0) { - let type = extendedTypeOf(obj[k]); - if (type === 'symbol' || type === 'function') { - throw new TypeError(`cannot serialize values of type '${type}'`); - } - let key = BARE_KEY.test(k) ? k : formatString(k); - if (type === 'array' && isArrayOfTables(obj[k])) { - tables += stringifyArrayTable(obj[k], prefix ? `${prefix}.${key}` : key, depth - 1, numberAsFloat); - } - else if (type === 'object') { - let tblKey = prefix ? `${prefix}.${key}` : key; - tables += `[${tblKey}]\n`; - tables += stringifyTable(obj[k], tblKey, depth - 1, numberAsFloat); - tables += '\n\n'; - } - else { - preamble += key; - preamble += ' = '; - preamble += stringifyValue(obj[k], type, depth, numberAsFloat); - preamble += '\n'; - } - } - } - return `${preamble}\n${tables}`.trim(); -} -function stringify(obj, { maxDepth = 1000, numbersAsFloat = false } = {}) { - if (extendedTypeOf(obj) !== 'object') { - throw new TypeError('stringify can only be called with an object'); - } - return stringifyTable(obj, '', maxDepth, numbersAsFloat); -} - -;// CONCATENATED MODULE: ./node_modules/smol-toml/dist/index.js -/*! - * Copyright (c) Squirrel Chat et al., All rights reserved. - * SPDX-License-Identifier: BSD-3-Clause - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are met: - * - * 1. Redistributions of source code must retain the above copyright notice, this - * list of conditions and the following disclaimer. - * 2. Redistributions in binary form must reproduce the above copyright notice, - * this list of conditions and the following disclaimer in the - * documentation and/or other materials provided with the distribution. - * 3. Neither the name of the copyright holder nor the names of its contributors - * may be used to endorse or promote products derived from this software without - * specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND - * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED - * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE - * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE - * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL - * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR - * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER - * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, - * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - */ - - - - -/* harmony default export */ const dist = ({ parse: parse, stringify: stringify, TomlDate: TomlDate, TomlError: TomlError }); - - -// EXTERNAL MODULE: ./node_modules/@actions/buildjet-cache/lib/cache.js -var lib_cache = __nccwpck_require__(24318); -// EXTERNAL MODULE: ./node_modules/@actions/warpbuild-cache/lib/cache.js -var warpbuild_cache_lib_cache = __nccwpck_require__(22343); -// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js -var cache_lib_cache = __nccwpck_require__(5116); -;// CONCATENATED MODULE: ./src/utils.js - - - - - - -function reportError(e) { - const { commandFailed } = e; - if (commandFailed) { - core.error(`Command failed: ${commandFailed.command}`); - core.error(commandFailed.stderr); - } - else { - core.error(`${e.stack}`); - } -} -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - e.commandFailed = { - command: `${cmd} ${args.join(" ")}`, - stderr, - }; - throw e; - } - return stdout; -} -function getCacheProvider() { - const cacheProvider = core.getInput("cache-provider"); - let cache; - switch (cacheProvider) { - case "github": - cache = cache_lib_cache; - break; - case "buildjet": - cache = lib_cache; - break; - case "warpbuild": - cache = warpbuild_cache_lib_cache; - break; - default: - throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); - } - return { - name: cacheProvider, - cache: cache, - }; -} -async function exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} - -;// CONCATENATED MODULE: ./src/workspace.js - - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages(filter, ...extraArgs) { - let packages = []; - try { - core.debug(`collecting metadata for "${this.root}"`); - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { - cwd: this.root, - env: { "CARGO_ENCODED_RUSTFLAGS": "" }, - })); - core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); - for (const pkg of meta.packages.filter(filter)) { - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch (err) { - console.error(err); - } - return packages; - } - async getPackagesOutsideWorkspaceRoot() { - return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); - } - async getWorkspaceMembers() { - return await this.getPackages((_) => true, "--no-deps"); - } -} - -;// CONCATENATED MODULE: ./src/config.js - - - - - - - - - - - -const HOME = external_os_default().homedir(); -const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_CONFIG = "RUST_CACHE_CONFIG"; -const HASH_LENGTH = 8; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** Whether to cache CARGO_HOME/.bin */ - this.cacheBin = true; - /** The workspace configurations */ - this.workspaces = []; - /** The cargo binaries present during main step */ - this.cargoBins = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = core.getInput("prefix-key") || "v0-rust"; - const sharedKey = core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache - const runnerOS = external_os_default().type(); - const runnerArch = external_os_default().arch(); - key += `-${runnerOS}-${runnerArch}`; - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${digest(hasher)}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); - const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed - hasher = external_crypto_default().createHash("sha1"); - for (const workspace of workspaces) { - const root = workspace.root; - keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); - const workspaceMembers = await workspace.getWorkspaceMembers(); - const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml"))); - for (const cargo_manifest of cargo_manifests) { - try { - const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" }); - // Use any since TomlPrimitive is not exposed - const parsed = parse(content); - if ("package" in parsed) { - const pack = parsed.package; - if ("version" in pack) { - pack["version"] = "0.0.0"; - } - } - for (const prefix of ["", "build-", "dev-"]) { - const section_name = `${prefix}dependencies`; - if (!(section_name in parsed)) { - continue; - } - const deps = parsed[section_name]; - for (const key of Object.keys(deps)) { - const dep = deps[key]; - try { - if ("path" in dep) { - dep.version = "0.0.0"; - dep.path = ""; - } - } - catch (_e) { - // Not an object, probably a string (version), - // continue. - continue; - } - } - } - hasher.update(JSON.stringify(parsed)); - parsedKeyFiles.push(cargo_manifest); - } - catch (e) { - // Fallback to caching them as regular file - core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); - keyFiles.push(cargo_manifest); - } - } - const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); - if (await exists(cargo_lock)) { - try { - const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); - const parsed = parse(content); - if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { - // Fallback to caching them as regular file since this action - // can only handle Cargo.lock format version 3 - core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); - keyFiles.push(cargo_lock); - continue; - } - // Package without `[[package]].source` and `[[package]].checksum` - // are the one with `path = "..."` to crates within the workspace. - const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); - hasher.update(JSON.stringify(packages)); - parsedKeyFiles.push(cargo_lock); - } - catch (e) { - // Fallback to caching them as regular file - core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); - keyFiles.push(cargo_lock); - } - } - } - keyFiles = sort_and_uniq(keyFiles); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - let lockHash = digest(hasher); - keyFiles.push(...parsedKeyFiles); - self.keyFiles = sort_and_uniq(keyFiles); - key += `-${lockHash}`; - self.cacheKey = key; - self.cachePaths = [external_path_default().join(CARGO_HOME, "registry"), external_path_default().join(CARGO_HOME, "git")]; - if (self.cacheBin) { - self.cachePaths = [ - external_path_default().join(CARGO_HOME, "bin"), - external_path_default().join(CARGO_HOME, ".crates.toml"), - external_path_default().join(CARGO_HOME, ".crates2.json"), - ...self.cachePaths, - ]; - } - const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; - if (cacheTargets === "true") { - self.cachePaths.push(...workspaces.map((ws) => ws.target)); - } - const cacheDirectories = core.getInput("cache-directories"); - for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { - self.cachePaths.push(dir); - } - const bins = await getCargoBins(); - self.cargoBins = Array.from(bins.values()); - return self; - } - /** - * Reads and returns the cache config from the action `state`. - * - * @throws {Error} if the state is not present. - * @returns {CacheConfig} the configuration. - * @see {@link CacheConfig#saveState} - * @see {@link CacheConfig#new} - */ - static fromState() { - const source = core.getState(STATE_CONFIG); - if (!source) { - throw new Error("Cache configuration not found in state"); - } - const self = new CacheConfig(); - Object.assign(self, JSON.parse(source)); - self.workspaces = self.workspaces.map((w) => new Workspace(w.root, w.target)); - return self; - } - /** - * Prints the configuration to the action log. - */ - printInfo(cacheProvider) { - core.startGroup("Cache Configuration"); - core.info(`Cache Provider:`); - core.info(` ${cacheProvider.name}`); - core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - core.info(` ${workspace.root}`); - } - core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - core.info(` ${path}`); - } - core.info(`Restore Key:`); - core.info(` ${this.restoreKey}`); - core.info(`Cache Key:`); - core.info(` ${this.cacheKey}`); - core.info(`.. Prefix:`); - core.info(` - ${this.keyPrefix}`); - core.info(`.. Environment considered:`); - core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - core.info(` - ${env}`); - } - core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - core.info(` - ${file}`); - } - core.endGroup(); - } - /** - * Saves the configuration to the state store. - * This is used to restore the configuration in the post action. - */ - saveState() { - core.saveState(STATE_CONFIG, this); - } -} -/** - * Checks if the cache is up to date. - * - * @returns `true` if the cache is up to date, `false` otherwise. - */ -function isCacheUpToDate() { - return core.getState(STATE_CONFIG) === ""; -} -/** - * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. - * - * @param hasher The hasher to digest. - * @returns The hex digest. - */ -function digest(hasher) { - return hasher.digest("hex").substring(0, HASH_LENGTH); -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} -async function globFiles(pattern) { - const globber = await glob.create(pattern, { - followSymbolicLinks: false, - }); - // fs.statSync resolve the symbolic link and returns stat for the - // file it pointed to, so isFile would make sure the resolved - // file is actually a regular file. - return (await globber.glob()).filter((file) => external_fs_default().statSync(file).isFile()); -} -function sort_and_uniq(a) { - return a - .sort((a, b) => a.localeCompare(b)) - .reduce((accumulator, currentValue) => { - const len = accumulator.length; - // If accumulator is empty or its last element != currentValue - // Since array is already sorted, elements with the same value - // are grouped together to be continugous in space. - // - // If currentValue != last element, then it must be unique. - if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { - accumulator.push(currentValue); - } - return accumulator; - }, []); -} - -;// CONCATENATED MODULE: ./src/cleanup.js - - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - core.debug(`cleaning profile directory "${profileDir}"`); - // Quite a few testing utility crates store compilation artifacts as nested - // workspaces under `target/tests`. Notably, `target/tests/target` and - // `target/tests/trybuild`. - if (external_path_default().basename(profileDir) === "tests") { - try { - // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 - // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 - cleanTargetDir(external_path_default().join(profileDir, "target"), packages, checkTimestamp); - } - catch { } - try { - // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 - cleanTargetDir(external_path_default().join(profileDir, "trybuild"), packages, checkTimestamp); - } - catch { } - // Delete everything else. - await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); - return; - } - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -/** - * Clean the cargo bin directory, removing the binaries that existed - * when the action started, as they were not created by the build. - * - * @param oldBins The binaries that existed when the action started. - */ -async function cleanBin(oldBins) { - const bins = await getCargoBins(); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages, crates = true) { - // remove `.cargo/credentials.toml` - try { - const credentials = external_path_default().join(CARGO_HOME, ".cargo", "credentials.toml"); - core.debug(`deleting "${credentials}"`); - await external_fs_default().promises.unlink(credentials); - } - catch { } - // `.cargo/registry/index` - let pkgSet = new Set(packages.map((p) => p.name)); - const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = external_path_default().join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(external_path_default().join(dirPath, ".git"))) { - await rmRF(external_path_default().join(dirPath, ".cache")); - } - else { - await cleanRegistryIndexCache(dirPath, pkgSet); - } - } - } - if (!crates) { - core.debug("skipping registry cache and src cleanup"); - return; - } - // `.cargo/registry/src` - // Cargo usually re-creates these from the `.crate` cache below, - // but for some reason that does not work for `-sys` crates that check timestamps - // to decide if rebuilds are necessary. - pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); - const srcDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "src")); - for await (const dirent of srcDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` - // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` - const dir = await external_fs_default().promises.opendir(external_path_default().join(srcDir.path, dirent.name)); - for await (const dirent of dir) { - if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { - await rmRF(external_path_default().join(dir.path, dirent.name)); - } - } - } - } - // `.cargo/registry/cache` - pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -/// Recursively walks and cleans the index `.cache` -async function cleanRegistryIndexCache(dirName, keepPkg) { - let dirIsEmpty = true; - const cacheDir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - if (await cleanRegistryIndexCache(external_path_default().join(dirName, dirent.name), keepPkg)) { - await rm(dirName, dirent); - } - else { - dirIsEmpty && (dirIsEmpty = false); - } - } - else { - if (keepPkg.has(dirent.name)) { - dirIsEmpty && (dirIsEmpty = false); - } - else { - await rm(dirName, dirent); - } - } - } - return dirIsEmpty; -} -async function cleanGit(packages) { - const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); - const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await external_fs_default().promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await external_fs_default().promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName` matching some criteria. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - * - * Otherwise, it will remove everything that does not match any string in the - * `keepPrefix` set. - * The matching strips and trailing `-$hash` suffix. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - if (isOutdated) { - await rm(dir.path, dirent); - } - return; - } - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - if (!keepPrefix.has(name)) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} - -;// CONCATENATED MODULE: ./src/save.ts - - - - - -process.on("uncaughtException", (e) => { - core.error(e.message); - if (e.stack) { - core.error(e.stack); - } -}); -async function run() { - const cacheProvider = getCacheProvider(); - const save = core.getInput("save-if").toLowerCase() || "true"; - if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { - return; - } - try { - if (isCacheUpToDate()) { - core.info(`Cache up-to-date.`); - return; - } - const config = CacheConfig.fromState(); - config.printInfo(cacheProvider); - core.info(""); - // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands - if (process.env["RUNNER_OS"] == "macOS") { - await macOsWorkaround(); - } - const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false"; - const allPackages = []; - for (const workspace of config.workspaces) { - const packages = await workspace.getPackagesOutsideWorkspaceRoot(); - if (workspaceCrates === "true") { - const wsMembers = await workspace.getWorkspaceMembers(); - packages.push(...wsMembers); - } - allPackages.push(...packages); - try { - core.info(`... Cleaning ${workspace.target} ...`); - await cleanTargetDir(workspace.target, packages); - } - catch (e) { - core.debug(`${e.stack}`); - } - } - try { - const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; - core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); - await cleanRegistry(allPackages, crates !== "true"); - } - catch (e) { - core.debug(`${e.stack}`); - } - if (config.cacheBin) { - try { - core.info(`... Cleaning cargo/bin ...`); - await cleanBin(config.cargoBins); - } - catch (e) { - core.debug(`${e.stack}`); - } - } - try { - core.info(`... Cleaning cargo git cache ...`); - await cleanGit(allPackages); - } - catch (e) { - core.debug(`${e.stack}`); - } - core.info(`... Saving cache ...`); - // Pass a copy of cachePaths to avoid mutating the original array as reported by: - // https://github.com/actions/toolkit/pull/1378 - // TODO: remove this once the underlying bug is fixed. - await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey); - } - catch (e) { - reportError(e); - } - process.exit(); -} -run(); -async function macOsWorkaround() { - try { - // Workaround for https://github.com/actions/cache/issues/403 - // Also see https://github.com/rust-lang/cargo/issues/8603 - await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); - } - catch { } -} - -})(); - -module.exports = __webpack_exports__; +/******/ +/******/ // startup +/******/ // Load entry module and return exports +/******/ // This entry module is referenced by other modules so it can't be inlined +/******/ var __webpack_exports__ = __nccwpck_require__(90198); +/******/ module.exports = __webpack_exports__; +/******/ /******/ })() ; \ No newline at end of file diff --git a/src/cleanup.js b/src/cleanup.js new file mode 100644 index 0000000..812a8e3 --- /dev/null +++ b/src/cleanup.js @@ -0,0 +1,330 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.cleanTargetDir = cleanTargetDir; +exports.getCargoBins = getCargoBins; +exports.cleanBin = cleanBin; +exports.cleanRegistry = cleanRegistry; +exports.cleanGit = cleanGit; +const core = __importStar(require("@actions/core")); +const io = __importStar(require("@actions/io")); +const fs_1 = __importDefault(require("fs")); +const path_1 = __importDefault(require("path")); +const config_1 = require("./config"); +const utils_1 = require("./utils"); +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await fs_1.default.promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = path_1.default.join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await (0, utils_1.exists)(path_1.default.join(dirName, "CACHEDIR.TAG"))) || (await (0, utils_1.exists)(path_1.default.join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + core.debug(`cleaning profile directory "${profileDir}"`); + // Quite a few testing utility crates store compilation artifacts as nested + // workspaces under `target/tests`. Notably, `target/tests/target` and + // `target/tests/trybuild`. + if (path_1.default.basename(profileDir) === "tests") { + try { + // https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25 + // https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27 + cleanTargetDir(path_1.default.join(profileDir, "target"), packages, checkTimestamp); + } + catch { } + try { + // https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50 + cleanTargetDir(path_1.default.join(profileDir, "trybuild"), packages, checkTimestamp); + } + catch { } + // Delete everything else. + await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp); + return; + } + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(path_1.default.join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(path_1.default.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(path_1.default.join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await fs_1.default.promises.readFile(path_1.default.join(config_1.CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +/** + * Clean the cargo bin directory, removing the binaries that existed + * when the action started, as they were not created by the build. + * + * @param oldBins The binaries that existed when the action started. + */ +async function cleanBin(oldBins) { + const bins = await getCargoBins(); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages, crates = true) { + // remove `.cargo/credentials.toml` + try { + const credentials = path_1.default.join(config_1.CARGO_HOME, ".cargo", "credentials.toml"); + core.debug(`deleting "${credentials}"`); + await fs_1.default.promises.unlink(credentials); + } + catch { } + // `.cargo/registry/index` + let pkgSet = new Set(packages.map((p) => p.name)); + const indexDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = path_1.default.join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await (0, utils_1.exists)(path_1.default.join(dirPath, ".git"))) { + await rmRF(path_1.default.join(dirPath, ".cache")); + } + else { + await cleanRegistryIndexCache(dirPath, pkgSet); + } + } + } + if (!crates) { + core.debug("skipping registry cache and src cleanup"); + return; + } + // `.cargo/registry/src` + // Cargo usually re-creates these from the `.crate` cache below, + // but for some reason that does not work for `-sys` crates that check timestamps + // to decide if rebuilds are necessary. + pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`)); + const srcDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "src")); + for await (const dirent of srcDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/src/github.com-1ecc6299db9ec823` + // or `.cargo/registry/src/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(srcDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isDirectory() && !pkgSet.has(dirent.name)) { + await rmRF(path_1.default.join(dir.path, dirent.name)); + } + } + } + } + // `.cargo/registry/cache` + pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + const cacheDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs_1.default.promises.opendir(path_1.default.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +/// Recursively walks and cleans the index `.cache` +async function cleanRegistryIndexCache(dirName, keepPkg) { + let dirIsEmpty = true; + const cacheDir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + if (await cleanRegistryIndexCache(path_1.default.join(dirName, dirent.name), keepPkg)) { + await rm(dirName, dirent); + } + else { + dirIsEmpty && (dirIsEmpty = false); + } + } + else { + if (keepPkg.has(dirent.name)) { + dirIsEmpty && (dirIsEmpty = false); + } + else { + await rm(dirName, dirent); + } + } + } + return dirIsEmpty; +} +async function cleanGit(packages) { + const coPath = path_1.default.join(config_1.CARGO_HOME, "git", "checkouts"); + const dbPath = path_1.default.join(config_1.CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split(path_1.default.sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await fs_1.default.promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await fs_1.default.promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await fs_1.default.promises.opendir(path_1.default.join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName` matching some criteria. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + * + * Otherwise, it will remove everything that does not match any string in the + * `keepPrefix` set. + * The matching strips and trailing `-$hash` suffix. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await fs_1.default.promises.opendir(dirName); + for await (const dirent of dir) { + if (checkTimestamp) { + const fileName = path_1.default.join(dir.path, dirent.name); + const { mtime } = await fs_1.default.promises.stat(fileName); + const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + if (isOutdated) { + await rm(dir.path, dirent); + } + return; + } + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + if (!keepPrefix.has(name)) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = path_1.default.join(parent, dirent.name); + core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await fs_1.default.promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} diff --git a/src/config.js b/src/config.js new file mode 100644 index 0000000..0996e17 --- /dev/null +++ b/src/config.js @@ -0,0 +1,372 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CacheConfig = exports.CARGO_HOME = void 0; +exports.isCacheUpToDate = isCacheUpToDate; +const core = __importStar(require("@actions/core")); +const glob = __importStar(require("@actions/glob")); +const crypto_1 = __importDefault(require("crypto")); +const fs_1 = __importDefault(require("fs")); +const promises_1 = __importDefault(require("fs/promises")); +const os_1 = __importDefault(require("os")); +const path_1 = __importDefault(require("path")); +const toml = __importStar(require("smol-toml")); +const cleanup_1 = require("./cleanup"); +const utils_1 = require("./utils"); +const workspace_1 = require("./workspace"); +const HOME = os_1.default.homedir(); +exports.CARGO_HOME = process.env.CARGO_HOME || path_1.default.join(HOME, ".cargo"); +const STATE_CONFIG = "RUST_CACHE_CONFIG"; +const HASH_LENGTH = 8; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** Whether to cache CARGO_HOME/.bin */ + this.cacheBin = true; + /** The workspace configurations */ + this.workspaces = []; + /** The cargo binaries present during main step */ + this.cargoBins = []; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = core.getInput("prefix-key") || "v0-rust"; + const sharedKey = core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if ((job) && core.getInput("use-job-key").toLowerCase() == "true") { + key += `-${job}`; + } + } + // Add runner OS and CPU architecture to the key to avoid cross-contamination of cache + const runnerOS = os_1.default.type(); + const runnerArch = os_1.default.arch(); + key += `-${runnerOS}-${runnerArch}`; + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = crypto_1.default.createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + // Add job hash suffix if 'add-job-hash' is true + if (core.getInput("add-job-hash").toLowerCase() == "true") { + key += `-${digest(hasher)}`; + } + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = path_1.default.resolve(root); + target = path_1.default.join(root, target); + workspaces.push(new workspace_1.Workspace(root, target)); + } + self.workspaces = workspaces; + let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml"); + const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed + hasher = crypto_1.default.createHash("sha1"); + for (const workspace of workspaces) { + const root = workspace.root; + keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`))); + const workspaceMembers = await workspace.getWorkspaceMembers(); + const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path_1.default.join(member.path, "Cargo.toml"))); + for (const cargo_manifest of cargo_manifests) { + try { + const content = await promises_1.default.readFile(cargo_manifest, { encoding: "utf8" }); + // Use any since TomlPrimitive is not exposed + const parsed = toml.parse(content); + if ("package" in parsed) { + const pack = parsed.package; + if ("version" in pack) { + pack["version"] = "0.0.0"; + } + } + for (const prefix of ["", "build-", "dev-"]) { + const section_name = `${prefix}dependencies`; + if (!(section_name in parsed)) { + continue; + } + const deps = parsed[section_name]; + for (const key of Object.keys(deps)) { + const dep = deps[key]; + try { + if ("path" in dep) { + dep.version = "0.0.0"; + dep.path = ""; + } + } + catch (_e) { + // Not an object, probably a string (version), + // continue. + continue; + } + } + } + hasher.update(JSON.stringify(parsed)); + parsedKeyFiles.push(cargo_manifest); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_manifest); + } + } + const cargo_lock = path_1.default.join(workspace.root, "Cargo.lock"); + if (await (0, utils_1.exists)(cargo_lock)) { + try { + const content = await promises_1.default.readFile(cargo_lock, { encoding: "utf8" }); + const parsed = toml.parse(content); + if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { + // Fallback to caching them as regular file since this action + // can only handle Cargo.lock format version 3 + core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); + keyFiles.push(cargo_lock); + continue; + } + // Package without `[[package]].source` and `[[package]].checksum` + // are the one with `path = "..."` to crates within the workspace. + const packages = parsed.package.filter((p) => "source" in p || "checksum" in p); + hasher.update(JSON.stringify(packages)); + parsedKeyFiles.push(cargo_lock); + } + catch (e) { + // Fallback to caching them as regular file + core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); + keyFiles.push(cargo_lock); + } + } + } + keyFiles = sort_and_uniq(keyFiles); + for (const file of keyFiles) { + for await (const chunk of fs_1.default.createReadStream(file)) { + hasher.update(chunk); + } + } + let lockHash = digest(hasher); + keyFiles.push(...parsedKeyFiles); + self.keyFiles = sort_and_uniq(keyFiles); + key += `-${lockHash}`; + self.cacheKey = key; + self.cachePaths = [path_1.default.join(exports.CARGO_HOME, "registry"), path_1.default.join(exports.CARGO_HOME, "git")]; + if (self.cacheBin) { + self.cachePaths = [ + path_1.default.join(exports.CARGO_HOME, "bin"), + path_1.default.join(exports.CARGO_HOME, ".crates.toml"), + path_1.default.join(exports.CARGO_HOME, ".crates2.json"), + ...self.cachePaths, + ]; + } + const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; + if (cacheTargets === "true") { + self.cachePaths.push(...workspaces.map((ws) => ws.target)); + } + const cacheDirectories = core.getInput("cache-directories"); + for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { + self.cachePaths.push(dir); + } + const bins = await (0, cleanup_1.getCargoBins)(); + self.cargoBins = Array.from(bins.values()); + return self; + } + /** + * Reads and returns the cache config from the action `state`. + * + * @throws {Error} if the state is not present. + * @returns {CacheConfig} the configuration. + * @see {@link CacheConfig#saveState} + * @see {@link CacheConfig#new} + */ + static fromState() { + const source = core.getState(STATE_CONFIG); + if (!source) { + throw new Error("Cache configuration not found in state"); + } + const self = new CacheConfig(); + Object.assign(self, JSON.parse(source)); + self.workspaces = self.workspaces.map((w) => new workspace_1.Workspace(w.root, w.target)); + return self; + } + /** + * Prints the configuration to the action log. + */ + printInfo(cacheProvider) { + core.startGroup("Cache Configuration"); + core.info(`Cache Provider:`); + core.info(` ${cacheProvider.name}`); + core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + core.info(` ${workspace.root}`); + } + core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + core.info(` ${path}`); + } + core.info(`Restore Key:`); + core.info(` ${this.restoreKey}`); + core.info(`Cache Key:`); + core.info(` ${this.cacheKey}`); + core.info(`.. Prefix:`); + core.info(` - ${this.keyPrefix}`); + core.info(`.. Environment considered:`); + core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + core.info(` - ${env}`); + } + core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + core.info(` - ${file}`); + } + core.endGroup(); + } + /** + * Saves the configuration to the state store. + * This is used to restore the configuration in the post action. + */ + saveState() { + core.saveState(STATE_CONFIG, this); + } +} +exports.CacheConfig = CacheConfig; +/** + * Checks if the cache is up to date. + * + * @returns `true` if the cache is up to date, `false` otherwise. + */ +function isCacheUpToDate() { + return core.getState(STATE_CONFIG) === ""; +} +/** + * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. + * + * @param hasher The hasher to digest. + * @returns The hex digest. + */ +function digest(hasher) { + return hasher.digest("hex").substring(0, HASH_LENGTH); +} +async function getRustVersion() { + const stdout = await (0, utils_1.getCmdOutput)("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} +async function globFiles(pattern) { + const globber = await glob.create(pattern, { + followSymbolicLinks: false, + }); + // fs.statSync resolve the symbolic link and returns stat for the + // file it pointed to, so isFile would make sure the resolved + // file is actually a regular file. + return (await globber.glob()).filter((file) => fs_1.default.statSync(file).isFile()); +} +function sort_and_uniq(a) { + return a + .sort((a, b) => a.localeCompare(b)) + .reduce((accumulator, currentValue) => { + const len = accumulator.length; + // If accumulator is empty or its last element != currentValue + // Since array is already sorted, elements with the same value + // are grouped together to be continugous in space. + // + // If currentValue != last element, then it must be unique. + if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) { + accumulator.push(currentValue); + } + return accumulator; + }, []); +} diff --git a/src/config.ts b/src/config.ts index d3ed31b..af5acd0 100644 --- a/src/config.ts +++ b/src/config.ts @@ -116,7 +116,10 @@ export class CacheConfig { self.keyEnvs = keyEnvs; - key += `-${digest(hasher)}`; + // Add job hash suffix if 'add-job-hash' is true + if (core.getInput("add-job-hash").toLowerCase() == "true") { + key += `-${digest(hasher)}`; + } self.restoreKey = key; diff --git a/src/restore.js b/src/restore.js new file mode 100644 index 0000000..d0153c9 --- /dev/null +++ b/src/restore.js @@ -0,0 +1,102 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const cleanup_1 = require("./cleanup"); +const config_1 = require("./config"); +const utils_1 = require("./utils"); +process.on("uncaughtException", (e) => { + core.error(e.message); + if (e.stack) { + core.error(e.stack); + } +}); +async function run() { + const cacheProvider = (0, utils_1.getCacheProvider)(); + if (!cacheProvider.cache.isFeatureAvailable()) { + setCacheHitOutput(false); + return; + } + try { + var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase(); + if (cacheOnFailure !== "true") { + cacheOnFailure = "false"; + } + var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true"; + core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); + core.exportVariable("CARGO_INCREMENTAL", 0); + const config = await config_1.CacheConfig.new(); + config.printInfo(cacheProvider); + core.info(""); + core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`); + const key = config.cacheKey; + // Pass a copy of cachePaths to avoid mutating the original array as reported by: + // https://github.com/actions/toolkit/pull/1378 + // TODO: remove this once the underlying bug is fixed. + const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { + lookupOnly, + }); + if (restoreKey) { + const match = restoreKey === key; + core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`); + if (!match) { + // pre-clean the target directory on cache mismatch + for (const workspace of config.workspaces) { + try { + await (0, cleanup_1.cleanTargetDir)(workspace.target, [], true); + } + catch { } + } + // We restored the cache but it is not a full match. + config.saveState(); + } + setCacheHitOutput(match); + } + else { + core.info("No cache found."); + config.saveState(); + setCacheHitOutput(false); + } + } + catch (e) { + setCacheHitOutput(false); + (0, utils_1.reportError)(e); + } + process.exit(); +} +function setCacheHitOutput(cacheHit) { + core.setOutput("cache-hit", cacheHit.toString()); +} +run(); diff --git a/src/save.js b/src/save.js new file mode 100644 index 0000000..440f8ef --- /dev/null +++ b/src/save.js @@ -0,0 +1,125 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(require("@actions/core")); +const exec = __importStar(require("@actions/exec")); +const cleanup_1 = require("./cleanup"); +const config_1 = require("./config"); +const utils_1 = require("./utils"); +process.on("uncaughtException", (e) => { + core.error(e.message); + if (e.stack) { + core.error(e.stack); + } +}); +async function run() { + const cacheProvider = (0, utils_1.getCacheProvider)(); + const save = core.getInput("save-if").toLowerCase() || "true"; + if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { + return; + } + try { + if ((0, config_1.isCacheUpToDate)()) { + core.info(`Cache up-to-date.`); + return; + } + const config = config_1.CacheConfig.fromState(); + config.printInfo(cacheProvider); + core.info(""); + // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands + if (process.env["RUNNER_OS"] == "macOS") { + await macOsWorkaround(); + } + const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false"; + const allPackages = []; + for (const workspace of config.workspaces) { + const packages = await workspace.getPackagesOutsideWorkspaceRoot(); + if (workspaceCrates === "true") { + const wsMembers = await workspace.getWorkspaceMembers(); + packages.push(...wsMembers); + } + allPackages.push(...packages); + try { + core.info(`... Cleaning ${workspace.target} ...`); + await (0, cleanup_1.cleanTargetDir)(workspace.target, packages); + } + catch (e) { + core.debug(`${e.stack}`); + } + } + try { + const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; + core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); + await (0, cleanup_1.cleanRegistry)(allPackages, crates !== "true"); + } + catch (e) { + core.debug(`${e.stack}`); + } + if (config.cacheBin) { + try { + core.info(`... Cleaning cargo/bin ...`); + await (0, cleanup_1.cleanBin)(config.cargoBins); + } + catch (e) { + core.debug(`${e.stack}`); + } + } + try { + core.info(`... Cleaning cargo git cache ...`); + await (0, cleanup_1.cleanGit)(allPackages); + } + catch (e) { + core.debug(`${e.stack}`); + } + core.info(`... Saving cache ...`); + // Pass a copy of cachePaths to avoid mutating the original array as reported by: + // https://github.com/actions/toolkit/pull/1378 + // TODO: remove this once the underlying bug is fixed. + await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey); + } + catch (e) { + (0, utils_1.reportError)(e); + } + process.exit(); +} +run(); +async function macOsWorkaround() { + try { + // Workaround for https://github.com/actions/cache/issues/403 + // Also see https://github.com/rust-lang/cargo/issues/8603 + await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); + } + catch { } +} diff --git a/src/utils.js b/src/utils.js new file mode 100644 index 0000000..78ef5b6 --- /dev/null +++ b/src/utils.js @@ -0,0 +1,114 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.reportError = reportError; +exports.getCmdOutput = getCmdOutput; +exports.getCacheProvider = getCacheProvider; +exports.exists = exists; +const core = __importStar(require("@actions/core")); +const exec = __importStar(require("@actions/exec")); +const buildjetCache = __importStar(require("@actions/buildjet-cache")); +const warpbuildCache = __importStar(require("@actions/warpbuild-cache")); +const ghCache = __importStar(require("@actions/cache")); +const fs_1 = __importDefault(require("fs")); +function reportError(e) { + const { commandFailed } = e; + if (commandFailed) { + core.error(`Command failed: ${commandFailed.command}`); + core.error(commandFailed.stderr); + } + else { + core.error(`${e.stack}`); + } +} +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + e.commandFailed = { + command: `${cmd} ${args.join(" ")}`, + stderr, + }; + throw e; + } + return stdout; +} +function getCacheProvider() { + const cacheProvider = core.getInput("cache-provider"); + let cache; + switch (cacheProvider) { + case "github": + cache = ghCache; + break; + case "buildjet": + cache = buildjetCache; + break; + case "warpbuild": + cache = warpbuildCache; + break; + default: + throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`); + } + return { + name: cacheProvider, + cache: cache, + }; +} +async function exists(path) { + try { + await fs_1.default.promises.access(path); + return true; + } + catch { + return false; + } +} diff --git a/src/workspace.js b/src/workspace.js new file mode 100644 index 0000000..70f984f --- /dev/null +++ b/src/workspace.js @@ -0,0 +1,75 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || (function () { + var ownKeys = function(o) { + ownKeys = Object.getOwnPropertyNames || function (o) { + var ar = []; + for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k; + return ar; + }; + return ownKeys(o); + }; + return function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]); + __setModuleDefault(result, mod); + return result; + }; +})(); +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Workspace = void 0; +const core = __importStar(require("@actions/core")); +const path_1 = __importDefault(require("path")); +const utils_1 = require("./utils"); +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages(filter, ...extraArgs) { + let packages = []; + try { + core.debug(`collecting metadata for "${this.root}"`); + const meta = JSON.parse(await (0, utils_1.getCmdOutput)("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { + cwd: this.root, + env: { "CARGO_ENCODED_RUSTFLAGS": "" }, + })); + core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); + for (const pkg of meta.packages.filter(filter)) { + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: path_1.default.dirname(pkg.manifest_path) }); + } + } + catch (err) { + console.error(err); + } + return packages; + } + async getPackagesOutsideWorkspaceRoot() { + return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root)); + } + async getWorkspaceMembers() { + return await this.getPackages((_) => true, "--no-deps"); + } +} +exports.Workspace = Workspace; diff --git a/tsconfig.json b/tsconfig.json index 5a5b361..65b5cbc 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -7,8 +7,8 @@ "target": "es2020", "resolveJsonModule": true, - "moduleResolution": "node", - "module": "esnext", + "moduleResolution": "nodenext", + "module": "NodeNext", "esModuleInterop": true, "strict": true,