From 6978315cd74e633e079eb32fabd0250d022cff66 Mon Sep 17 00:00:00 2001 From: MOZGIII Date: Sun, 4 Sep 2022 18:51:30 +0400 Subject: [PATCH] Update build artifacts --- dist/restore/index.js | 1033 ++++++++++++++++++++------------------- dist/save/index.js | 1063 +++++++++++++++++++++-------------------- 2 files changed, 1095 insertions(+), 1001 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index fb6f17e..475dba9 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -64364,507 +64364,554 @@ var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __nccwpck_require__(1514); ;// CONCATENATED MODULE: ./src/utils.ts - - -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); - lib_core.info(`[warning] ${stderr}`); - throw e; - } - return stdout; -} + + +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); + lib_core.info(`[warning] ${stderr}`); + throw e; + } + return stdout; +} +async function withRetries(operation, maxRetryAttempts, isRetriable) { + let attemptsLeft = maxRetryAttempts; + while (true) { + try { + return await operation(); + } + catch (e) { + attemptsLeft -= 1; + if (attemptsLeft <= 0) { + throw e; + } + if (!isRetriable(e)) { + throw e; + } + lib_core.info(`[warning] Retrying after an error, ${attemptsLeft} attempts left, error: ${e}`); + } + } +} +class TimeoutError extends Error { +} +async function withTimeout(operation, timeoutMs) { + const timeout = timeoutMs + ? new Promise((resolve) => { + setTimeout(resolve, timeoutMs); + }) + : new Promise(() => { }); + const timeoutSym = Symbol("timeout"); + const racingTimeout = timeout.then(() => timeoutSym); + const result = await Promise.race([racingTimeout, operation(timeout)]); + if (result === timeoutSym) { + throw new TimeoutError("operation timeout"); + } + return result; +} ;// CONCATENATED MODULE: ./src/workspace.ts - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages() { - let packages = []; - try { - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { - cwd: this.root, - })); - for (const pkg of meta.packages) { - if (pkg.manifest_path.startsWith(this.root)) { - continue; - } - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch { } - return packages; - } -} + + +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages() { + let packages = []; + try { + const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { + cwd: this.root, + })); + for (const pkg of meta.packages) { + if (pkg.manifest_path.startsWith(this.root)) { + continue; + } + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); + } + } + catch { } + return packages; + } +} ;// CONCATENATED MODULE: ./src/config.ts - - - - - - - - -const HOME = external_os_default().homedir(); -const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; -const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; -const config_STATE_BINS = "RUST_CACHE_BINS"; -const STATE_KEY = "RUST_CACHE_KEY"; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** The workspace configurations */ - this.workspaces = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = `v0-rust`; - const sharedKey = lib_core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = lib_core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${hasher.digest("hex")}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - // This part is computed in the "pre"/"restore" part of the job and persisted - // into the `state`. That state is loaded in the "post"/"save" part of the - // job so we have consistent values even though the "main" actions run - // might create/overwrite lockfiles. - let lockHash = lib_core.getState(STATE_LOCKFILE_HASH); - let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]"); - if (!lockHash) { - const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { - followSymbolicLinks: false, - }); - keyFiles = await globber.glob(); - keyFiles.sort((a, b) => a.localeCompare(b)); - hasher = external_crypto_default().createHash("sha1"); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - lockHash = hasher.digest("hex"); - lib_core.saveState(STATE_LOCKFILE_HASH, lockHash); - lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); - } - self.keyFiles = keyFiles; - key += `-${lockHash}`; - self.cacheKey = key; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = lib_core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)]; - return self; - } - printInfo() { - lib_core.startGroup("Cache Configuration"); - lib_core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - lib_core.info(` ${workspace.root}`); - } - lib_core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - lib_core.info(` ${path}`); - } - lib_core.info(`Restore Key:`); - lib_core.info(` ${this.restoreKey}`); - lib_core.info(`Cache Key:`); - lib_core.info(` ${this.cacheKey}`); - lib_core.info(`.. Prefix:`); - lib_core.info(` - ${this.keyPrefix}`); - lib_core.info(`.. Environment considered:`); - lib_core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - lib_core.info(` - ${env}`); - } - lib_core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - lib_core.info(` - ${file}`); - } - lib_core.endGroup(); - } -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} + + + + + + + + +const HOME = external_os_default().homedir(); +const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); +const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; +const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; +const config_STATE_BINS = "RUST_CACHE_BINS"; +const STATE_KEY = "RUST_CACHE_KEY"; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** The workspace configurations */ + this.workspaces = []; + /** The max timeout for the networking operations */ + this.timeout = null; + /** The max retry attemtps for the networking operations */ + this.maxRetryAttempts = 0; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = `v0-rust`; + const sharedKey = lib_core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = lib_core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if (job) { + key += `-${job}`; + } + } + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = external_crypto_default().createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + key += `-${hasher.digest("hex")}`; + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + // This part is computed in the "pre"/"restore" part of the job and persisted + // into the `state`. That state is loaded in the "post"/"save" part of the + // job so we have consistent values even though the "main" actions run + // might create/overwrite lockfiles. + let lockHash = lib_core.getState(STATE_LOCKFILE_HASH); + let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]"); + if (!lockHash) { + const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { + followSymbolicLinks: false, + }); + keyFiles = await globber.glob(); + keyFiles.sort((a, b) => a.localeCompare(b)); + hasher = external_crypto_default().createHash("sha1"); + for (const file of keyFiles) { + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); + } + } + lockHash = hasher.digest("hex"); + lib_core.saveState(STATE_LOCKFILE_HASH, lockHash); + lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); + } + self.keyFiles = keyFiles; + key += `-${lockHash}`; + self.cacheKey = key; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = lib_core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = external_path_default().resolve(root); + target = external_path_default().join(root, target); + workspaces.push(new Workspace(root, target)); + } + self.workspaces = workspaces; + self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)]; + const timeoutInput = lib_core.getInput("timeout"); + self.timeout = timeoutInput ? parseFloat(timeoutInput) : null; + const maxRetryAttemptsInput = lib_core.getInput("maxRetryAttempts"); + self.maxRetryAttempts = maxRetryAttemptsInput ? parseFloat(maxRetryAttemptsInput) : 0; + return self; + } + printInfo() { + lib_core.startGroup("Cache Configuration"); + lib_core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + lib_core.info(` ${workspace.root}`); + } + lib_core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + lib_core.info(` ${path}`); + } + lib_core.info(`Restore Key:`); + lib_core.info(` ${this.restoreKey}`); + lib_core.info(`Cache Key:`); + lib_core.info(` ${this.cacheKey}`); + lib_core.info(`.. Prefix:`); + lib_core.info(` - ${this.keyPrefix}`); + lib_core.info(`.. Environment considered:`); + lib_core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + lib_core.info(` - ${env}`); + } + lib_core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + lib_core.info(` - ${file}`); + } + lib_core.info(`Network operations timeout:`); + lib_core.info(` ${this.timeout}`); + lib_core.info(`Max retry attempts for the network operations:`); + lib_core.info(` ${this.maxRetryAttempts}`); + lib_core.endGroup(); + } +} +async function getRustVersion() { + const stdout = await getCmdOutput("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} ;// CONCATENATED MODULE: ./src/cleanup.ts - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - lib_core.debug(`cleaning profile directory "${profileDir}"`); - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -async function cleanBin() { - const bins = await getCargoBins(); - const oldBins = JSON.parse(core.getState(STATE_BINS)); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(path.join(CARGO_HOME, "registry", "src")); - // `.cargo/registry/index` - const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = path.join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(path.join(dirPath, ".git"))) { - await rmRF(path.join(dirPath, ".cache")); - } - // TODO: else, clean `.cache` based on the `packages` - } - } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - // `.cargo/registry/cache` - const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -async function cleanGit(packages) { - const coPath = path.join(CARGO_HOME, "git", "checkouts"); - const dbPath = path.join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await fs.promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await fs.promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName`, except the ones matching - * any string in the `keepPrefix` set. - * - * The matching strips and trailing `-$hash` suffix. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - let isOutdated = false; - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - } - if (!keepPrefix.has(name) || isOutdated) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - lib_core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await lib_io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} -async function exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} + + + + + +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + lib_core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await external_fs_default().promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = external_path_default().join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + lib_core.debug(`cleaning profile directory "${profileDir}"`); + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); + const oldBins = JSON.parse(core.getState(STATE_BINS)); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages) { + // `.cargo/registry/src` + // we can remove this completely, as cargo will recreate this from `cache` + await rmRF(path.join(CARGO_HOME, "registry", "src")); + // `.cargo/registry/index` + const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = path.join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await exists(path.join(dirPath, ".git"))) { + await rmRF(path.join(dirPath, ".cache")); + } + // TODO: else, clean `.cache` based on the `packages` + } + } + const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/cache` + const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +async function cleanGit(packages) { + const coPath = path.join(CARGO_HOME, "git", "checkouts"); + const dbPath = path.join(CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split(path.sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await fs.promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await fs.promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await fs.promises.opendir(path.join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName`, except the ones matching + * any string in the `keepPrefix` set. + * + * The matching strips and trailing `-$hash` suffix. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of dir) { + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + let isOutdated = false; + if (checkTimestamp) { + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + } + if (!keepPrefix.has(name) || isOutdated) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = external_path_default().join(parent, dirent.name); + lib_core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await external_fs_default().promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await lib_io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} +async function exists(path) { + try { + await external_fs_default().promises.access(path); + return true; + } + catch { + return false; + } +} ;// CONCATENATED MODULE: ./src/restore.ts - - - - -process.on("uncaughtException", (e) => { - lib_core.info(`[warning] ${e.message}`); - if (e.stack) { - lib_core.info(e.stack); - } -}); -async function run() { - if (!cache.isFeatureAvailable()) { - setCacheHitOutput(false); - return; - } - try { - var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); - if (cacheOnFailure !== "true") { - cacheOnFailure = "false"; - } - lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); - lib_core.exportVariable("CARGO_INCREMENTAL", 0); - const config = await CacheConfig["new"](); - config.printInfo(); - lib_core.info(""); - const bins = await getCargoBins(); - lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins])); - lib_core.info(`... Restoring cache ...`); - const key = config.cacheKey; - const restoreKey = await cache.restoreCache(config.cachePaths, key, [config.restoreKey]); - if (restoreKey) { - lib_core.info(`Restored from cache key "${restoreKey}".`); - lib_core.saveState(STATE_KEY, restoreKey); - if (restoreKey !== key) { - // pre-clean the target directory on cache mismatch - for (const workspace of config.workspaces) { - try { - const packages = await workspace.getPackages(); - await cleanTargetDir(workspace.target, packages, true); - } - catch { } - } - } - setCacheHitOutput(restoreKey === key); - } - else { - lib_core.info("No cache found."); - setCacheHitOutput(false); - } - } - catch (e) { - setCacheHitOutput(false); - lib_core.info(`[warning] ${e.stack}`); - } -} -function setCacheHitOutput(cacheHit) { - lib_core.setOutput("cache-hit", cacheHit.toString()); -} -run(); + + + + + +process.on("uncaughtException", (e) => { + lib_core.info(`[warning] ${e.message}`); + if (e.stack) { + lib_core.info(e.stack); + } +}); +async function run() { + if (!cache.isFeatureAvailable()) { + setCacheHitOutput(false); + return; + } + try { + var cacheOnFailure = lib_core.getInput("cache-on-failure").toLowerCase(); + if (cacheOnFailure !== "true") { + cacheOnFailure = "false"; + } + lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure); + lib_core.exportVariable("CARGO_INCREMENTAL", 0); + const config = await CacheConfig["new"](); + config.printInfo(); + lib_core.info(""); + const bins = await getCargoBins(); + lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins])); + lib_core.info(`... Restoring cache ...`); + const key = config.cacheKey; + const restoreKey = await withRetries(() => withTimeout(() => cache.restoreCache(config.cachePaths, key, [config.restoreKey]), config.timeout), config.maxRetryAttempts, () => true); + if (restoreKey) { + lib_core.info(`Restored from cache key "${restoreKey}".`); + lib_core.saveState(STATE_KEY, restoreKey); + if (restoreKey !== key) { + // pre-clean the target directory on cache mismatch + for (const workspace of config.workspaces) { + try { + const packages = await workspace.getPackages(); + await cleanTargetDir(workspace.target, packages, true); + } + catch { } + } + } + setCacheHitOutput(restoreKey === key); + } + else { + lib_core.info("No cache found."); + setCacheHitOutput(false); + } + } + catch (e) { + setCacheHitOutput(false); + lib_core.info(`[warning] ${e.stack}`); + } +} +function setCacheHitOutput(cacheHit) { + lib_core.setOutput("cache-hit", cacheHit.toString()); +} +run(); })(); diff --git a/dist/save/index.js b/dist/save/index.js index 3cc65b7..dc7a769 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -64364,522 +64364,569 @@ var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto var external_os_ = __nccwpck_require__(2037); var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_); ;// CONCATENATED MODULE: ./src/utils.ts - - -async function getCmdOutput(cmd, args = [], options = {}) { - let stdout = ""; - let stderr = ""; - try { - await exec.exec(cmd, args, { - silent: true, - listeners: { - stdout(data) { - stdout += data.toString(); - }, - stderr(data) { - stderr += data.toString(); - }, - }, - ...options, - }); - } - catch (e) { - core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); - core.info(`[warning] ${stderr}`); - throw e; - } - return stdout; -} + + +async function getCmdOutput(cmd, args = [], options = {}) { + let stdout = ""; + let stderr = ""; + try { + await exec.exec(cmd, args, { + silent: true, + listeners: { + stdout(data) { + stdout += data.toString(); + }, + stderr(data) { + stderr += data.toString(); + }, + }, + ...options, + }); + } + catch (e) { + core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); + core.info(`[warning] ${stderr}`); + throw e; + } + return stdout; +} +async function withRetries(operation, maxRetryAttempts, isRetriable) { + let attemptsLeft = maxRetryAttempts; + while (true) { + try { + return await operation(); + } + catch (e) { + attemptsLeft -= 1; + if (attemptsLeft <= 0) { + throw e; + } + if (!isRetriable(e)) { + throw e; + } + core.info(`[warning] Retrying after an error, ${attemptsLeft} attempts left, error: ${e}`); + } + } +} +class TimeoutError extends Error { +} +async function withTimeout(operation, timeoutMs) { + const timeout = timeoutMs + ? new Promise((resolve) => { + setTimeout(resolve, timeoutMs); + }) + : new Promise(() => { }); + const timeoutSym = Symbol("timeout"); + const racingTimeout = timeout.then(() => timeoutSym); + const result = await Promise.race([racingTimeout, operation(timeout)]); + if (result === timeoutSym) { + throw new TimeoutError("operation timeout"); + } + return result; +} ;// CONCATENATED MODULE: ./src/workspace.ts - - -const SAVE_TARGETS = new Set(["lib", "proc-macro"]); -class Workspace { - constructor(root, target) { - this.root = root; - this.target = target; - } - async getPackages() { - let packages = []; - try { - const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { - cwd: this.root, - })); - for (const pkg of meta.packages) { - if (pkg.manifest_path.startsWith(this.root)) { - continue; - } - const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); - packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); - } - } - catch { } - return packages; - } -} + + +const SAVE_TARGETS = new Set(["lib", "proc-macro"]); +class Workspace { + constructor(root, target) { + this.root = root; + this.target = target; + } + async getPackages() { + let packages = []; + try { + const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"], { + cwd: this.root, + })); + for (const pkg of meta.packages) { + if (pkg.manifest_path.startsWith(this.root)) { + continue; + } + const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); + packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); + } + } + catch { } + return packages; + } +} ;// CONCATENATED MODULE: ./src/config.ts - - - - - - - - -const HOME = external_os_default().homedir(); -const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); -const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; -const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; -const STATE_BINS = "RUST_CACHE_BINS"; -const STATE_KEY = "RUST_CACHE_KEY"; -class CacheConfig { - constructor() { - /** All the paths we want to cache */ - this.cachePaths = []; - /** The primary cache key */ - this.cacheKey = ""; - /** The secondary (restore) key that only contains the prefix and environment */ - this.restoreKey = ""; - /** The workspace configurations */ - this.workspaces = []; - /** The prefix portion of the cache key */ - this.keyPrefix = ""; - /** The rust version considered for the cache key */ - this.keyRust = ""; - /** The environment variables considered for the cache key */ - this.keyEnvs = []; - /** The files considered for the cache key */ - this.keyFiles = []; - } - /** - * Constructs a [`CacheConfig`] with all the paths and keys. - * - * This will read the action `input`s, and read and persist `state` as necessary. - */ - static async new() { - const self = new CacheConfig(); - // Construct key prefix: - // This uses either the `shared-key` input, - // or the `key` input combined with the `job` key. - let key = `v0-rust`; - const sharedKey = core.getInput("shared-key"); - if (sharedKey) { - key += `-${sharedKey}`; - } - else { - const inputKey = core.getInput("key"); - if (inputKey) { - key += `-${inputKey}`; - } - const job = process.env.GITHUB_JOB; - if (job) { - key += `-${job}`; - } - } - self.keyPrefix = key; - // Construct environment portion of the key: - // This consists of a hash that considers the rust version - // as well as all the environment variables as given by a default list - // and the `env-vars` input. - // The env vars are sorted, matched by prefix and hashed into the - // resulting environment hash. - let hasher = external_crypto_default().createHash("sha1"); - const rustVersion = await getRustVersion(); - let keyRust = `${rustVersion.release} ${rustVersion.host}`; - hasher.update(keyRust); - hasher.update(rustVersion["commit-hash"]); - keyRust += ` (${rustVersion["commit-hash"]})`; - self.keyRust = keyRust; - // these prefixes should cover most of the compiler / rust / cargo keys - const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); - // sort the available env vars so we have a more stable hash - const keyEnvs = []; - const envKeys = Object.keys(process.env); - envKeys.sort((a, b) => a.localeCompare(b)); - for (const key of envKeys) { - const value = process.env[key]; - if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { - hasher.update(`${key}=${value}`); - keyEnvs.push(key); - } - } - self.keyEnvs = keyEnvs; - key += `-${hasher.digest("hex")}`; - self.restoreKey = key; - // Construct the lockfiles portion of the key: - // This considers all the files found via globbing for various manifests - // and lockfiles. - // This part is computed in the "pre"/"restore" part of the job and persisted - // into the `state`. That state is loaded in the "post"/"save" part of the - // job so we have consistent values even though the "main" actions run - // might create/overwrite lockfiles. - let lockHash = core.getState(STATE_LOCKFILE_HASH); - let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]"); - if (!lockHash) { - const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { - followSymbolicLinks: false, - }); - keyFiles = await globber.glob(); - keyFiles.sort((a, b) => a.localeCompare(b)); - hasher = external_crypto_default().createHash("sha1"); - for (const file of keyFiles) { - for await (const chunk of external_fs_default().createReadStream(file)) { - hasher.update(chunk); - } - } - lockHash = hasher.digest("hex"); - core.saveState(STATE_LOCKFILE_HASH, lockHash); - core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); - } - self.keyFiles = keyFiles; - key += `-${lockHash}`; - self.cacheKey = key; - // Constructs the workspace config and paths to restore: - // The workspaces are given using a `$workspace -> $target` syntax. - const workspaces = []; - const workspacesInput = core.getInput("workspaces") || "."; - for (const workspace of workspacesInput.trim().split("\n")) { - let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); - root = external_path_default().resolve(root); - target = external_path_default().join(root, target); - workspaces.push(new Workspace(root, target)); - } - self.workspaces = workspaces; - self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; - return self; - } - printInfo() { - core.startGroup("Cache Configuration"); - core.info(`Workspaces:`); - for (const workspace of this.workspaces) { - core.info(` ${workspace.root}`); - } - core.info(`Cache Paths:`); - for (const path of this.cachePaths) { - core.info(` ${path}`); - } - core.info(`Restore Key:`); - core.info(` ${this.restoreKey}`); - core.info(`Cache Key:`); - core.info(` ${this.cacheKey}`); - core.info(`.. Prefix:`); - core.info(` - ${this.keyPrefix}`); - core.info(`.. Environment considered:`); - core.info(` - Rust Version: ${this.keyRust}`); - for (const env of this.keyEnvs) { - core.info(` - ${env}`); - } - core.info(`.. Lockfiles considered:`); - for (const file of this.keyFiles) { - core.info(` - ${file}`); - } - core.endGroup(); - } -} -async function getRustVersion() { - const stdout = await getCmdOutput("rustc", ["-vV"]); - let splits = stdout - .split(/[\n\r]+/) - .filter(Boolean) - .map((s) => s.split(":").map((s) => s.trim())) - .filter((s) => s.length === 2); - return Object.fromEntries(splits); -} + + + + + + + + +const HOME = external_os_default().homedir(); +const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); +const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; +const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; +const STATE_BINS = "RUST_CACHE_BINS"; +const STATE_KEY = "RUST_CACHE_KEY"; +class CacheConfig { + constructor() { + /** All the paths we want to cache */ + this.cachePaths = []; + /** The primary cache key */ + this.cacheKey = ""; + /** The secondary (restore) key that only contains the prefix and environment */ + this.restoreKey = ""; + /** The workspace configurations */ + this.workspaces = []; + /** The max timeout for the networking operations */ + this.timeout = null; + /** The max retry attemtps for the networking operations */ + this.maxRetryAttempts = 0; + /** The prefix portion of the cache key */ + this.keyPrefix = ""; + /** The rust version considered for the cache key */ + this.keyRust = ""; + /** The environment variables considered for the cache key */ + this.keyEnvs = []; + /** The files considered for the cache key */ + this.keyFiles = []; + } + /** + * Constructs a [`CacheConfig`] with all the paths and keys. + * + * This will read the action `input`s, and read and persist `state` as necessary. + */ + static async new() { + const self = new CacheConfig(); + // Construct key prefix: + // This uses either the `shared-key` input, + // or the `key` input combined with the `job` key. + let key = `v0-rust`; + const sharedKey = core.getInput("shared-key"); + if (sharedKey) { + key += `-${sharedKey}`; + } + else { + const inputKey = core.getInput("key"); + if (inputKey) { + key += `-${inputKey}`; + } + const job = process.env.GITHUB_JOB; + if (job) { + key += `-${job}`; + } + } + self.keyPrefix = key; + // Construct environment portion of the key: + // This consists of a hash that considers the rust version + // as well as all the environment variables as given by a default list + // and the `env-vars` input. + // The env vars are sorted, matched by prefix and hashed into the + // resulting environment hash. + let hasher = external_crypto_default().createHash("sha1"); + const rustVersion = await getRustVersion(); + let keyRust = `${rustVersion.release} ${rustVersion.host}`; + hasher.update(keyRust); + hasher.update(rustVersion["commit-hash"]); + keyRust += ` (${rustVersion["commit-hash"]})`; + self.keyRust = keyRust; + // these prefixes should cover most of the compiler / rust / cargo keys + const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; + envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + // sort the available env vars so we have a more stable hash + const keyEnvs = []; + const envKeys = Object.keys(process.env); + envKeys.sort((a, b) => a.localeCompare(b)); + for (const key of envKeys) { + const value = process.env[key]; + if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) { + hasher.update(`${key}=${value}`); + keyEnvs.push(key); + } + } + self.keyEnvs = keyEnvs; + key += `-${hasher.digest("hex")}`; + self.restoreKey = key; + // Construct the lockfiles portion of the key: + // This considers all the files found via globbing for various manifests + // and lockfiles. + // This part is computed in the "pre"/"restore" part of the job and persisted + // into the `state`. That state is loaded in the "post"/"save" part of the + // job so we have consistent values even though the "main" actions run + // might create/overwrite lockfiles. + let lockHash = core.getState(STATE_LOCKFILE_HASH); + let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]"); + if (!lockHash) { + const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", { + followSymbolicLinks: false, + }); + keyFiles = await globber.glob(); + keyFiles.sort((a, b) => a.localeCompare(b)); + hasher = external_crypto_default().createHash("sha1"); + for (const file of keyFiles) { + for await (const chunk of external_fs_default().createReadStream(file)) { + hasher.update(chunk); + } + } + lockHash = hasher.digest("hex"); + core.saveState(STATE_LOCKFILE_HASH, lockHash); + core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles)); + } + self.keyFiles = keyFiles; + key += `-${lockHash}`; + self.cacheKey = key; + // Constructs the workspace config and paths to restore: + // The workspaces are given using a `$workspace -> $target` syntax. + const workspaces = []; + const workspacesInput = core.getInput("workspaces") || "."; + for (const workspace of workspacesInput.trim().split("\n")) { + let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); + root = external_path_default().resolve(root); + target = external_path_default().join(root, target); + workspaces.push(new Workspace(root, target)); + } + self.workspaces = workspaces; + self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; + const timeoutInput = core.getInput("timeout"); + self.timeout = timeoutInput ? parseFloat(timeoutInput) : null; + const maxRetryAttemptsInput = core.getInput("maxRetryAttempts"); + self.maxRetryAttempts = maxRetryAttemptsInput ? parseFloat(maxRetryAttemptsInput) : 0; + return self; + } + printInfo() { + core.startGroup("Cache Configuration"); + core.info(`Workspaces:`); + for (const workspace of this.workspaces) { + core.info(` ${workspace.root}`); + } + core.info(`Cache Paths:`); + for (const path of this.cachePaths) { + core.info(` ${path}`); + } + core.info(`Restore Key:`); + core.info(` ${this.restoreKey}`); + core.info(`Cache Key:`); + core.info(` ${this.cacheKey}`); + core.info(`.. Prefix:`); + core.info(` - ${this.keyPrefix}`); + core.info(`.. Environment considered:`); + core.info(` - Rust Version: ${this.keyRust}`); + for (const env of this.keyEnvs) { + core.info(` - ${env}`); + } + core.info(`.. Lockfiles considered:`); + for (const file of this.keyFiles) { + core.info(` - ${file}`); + } + core.info(`Network operations timeout:`); + core.info(` ${this.timeout}`); + core.info(`Max retry attempts for the network operations:`); + core.info(` ${this.maxRetryAttempts}`); + core.endGroup(); + } +} +async function getRustVersion() { + const stdout = await getCmdOutput("rustc", ["-vV"]); + let splits = stdout + .split(/[\n\r]+/) + .filter(Boolean) + .map((s) => s.split(":").map((s) => s.trim())) + .filter((s) => s.length === 2); + return Object.fromEntries(splits); +} ;// CONCATENATED MODULE: ./src/cleanup.ts - - - - - -async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { - core.debug(`cleaning target directory "${targetDir}"`); - // remove all *files* from the profile directory - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await cleanTargetDir(dirName, packages, checkTimestamp); - } - else { - await cleanProfileTarget(dirName, packages, checkTimestamp); - } - } - catch { } - } - else if (dirent.name !== "CACHEDIR.TAG") { - await rm(dir.path, dirent); - } - } -} -async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { - core.debug(`cleaning profile directory "${profileDir}"`); - let keepProfile = new Set(["build", ".fingerprint", "deps"]); - await rmExcept(profileDir, keepProfile); - const keepPkg = new Set(packages.map((p) => p.name)); - await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); - await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); - const keepDeps = new Set(packages.flatMap((p) => { - const names = []; - for (const n of [p.name, ...p.targets]) { - const name = n.replace(/-/g, "_"); - names.push(name, `lib${name}`); - } - return names; - })); - await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); -} -async function getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; -} -async function cleanBin() { - const bins = await getCargoBins(); - const oldBins = JSON.parse(core.getState(STATE_BINS)); - for (const bin of oldBins) { - bins.delete(bin); - } - const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); - for await (const dirent of dir) { - if (dirent.isFile() && !bins.has(dirent.name)) { - await rm(dir.path, dirent); - } - } -} -async function cleanRegistry(packages) { - // `.cargo/registry/src` - // we can remove this completely, as cargo will recreate this from `cache` - await rmRF(external_path_default().join(CARGO_HOME, "registry", "src")); - // `.cargo/registry/index` - const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); - for await (const dirent of indexDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` - // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` - const dirPath = external_path_default().join(indexDir.path, dirent.name); - // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(external_path_default().join(dirPath, ".git"))) { - await rmRF(external_path_default().join(dirPath, ".cache")); - } - // TODO: else, clean `.cache` based on the `packages` - } - } - const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - // `.cargo/registry/cache` - const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); - for await (const dirent of cacheDir) { - if (dirent.isDirectory()) { - // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` - // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` - const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); - for await (const dirent of dir) { - // here we check that the downloaded `.crate` matches one from our dependencies - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - } -} -async function cleanGit(packages) { - const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); - const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); - const repos = new Map(); - for (const p of packages) { - if (!p.path.startsWith(coPath)) { - continue; - } - const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep); - const refs = repos.get(repo); - if (refs) { - refs.add(ref); - } - else { - repos.set(repo, new Set([ref])); - } - } - // we have to keep both the clone, and the checkout, removing either will - // trigger a rebuild - // clean the db - try { - let dir = await external_fs_default().promises.opendir(dbPath); - for await (const dirent of dir) { - if (!repos.has(dirent.name)) { - await rm(dir.path, dirent); - } - } - } - catch { } - // clean the checkouts - try { - let dir = await external_fs_default().promises.opendir(coPath); - for await (const dirent of dir) { - const refs = repos.get(dirent.name); - if (!refs) { - await rm(dir.path, dirent); - continue; - } - if (!dirent.isDirectory()) { - continue; - } - const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); - for await (const dirent of refsDir) { - if (!refs.has(dirent.name)) { - await rm(refsDir.path, dirent); - } - } - } - } - catch { } -} -const ONE_WEEK = 7 * 24 * 3600 * 1000; -/** - * Removes all files or directories in `dirName`, except the ones matching - * any string in the `keepPrefix` set. - * - * The matching strips and trailing `-$hash` suffix. - * - * When the `checkTimestamp` flag is set, this will also remove anything older - * than one week. - */ -async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { - const dir = await external_fs_default().promises.opendir(dirName); - for await (const dirent of dir) { - let name = dirent.name; - // strip the trailing hash - const idx = name.lastIndexOf("-"); - if (idx !== -1) { - name = name.slice(0, idx); - } - let isOutdated = false; - if (checkTimestamp) { - const fileName = external_path_default().join(dir.path, dirent.name); - const { mtime } = await external_fs_default().promises.stat(fileName); - isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; - } - if (!keepPrefix.has(name) || isOutdated) { - await rm(dir.path, dirent); - } - } -} -async function rm(parent, dirent) { - try { - const fileName = external_path_default().join(parent, dirent.name); - core.debug(`deleting "${fileName}"`); - if (dirent.isFile()) { - await external_fs_default().promises.unlink(fileName); - } - else if (dirent.isDirectory()) { - await io.rmRF(fileName); - } - } - catch { } -} -async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); - await io.rmRF(dirName); -} -async function exists(path) { - try { - await external_fs_default().promises.access(path); - return true; - } - catch { - return false; - } -} + + + + + +async function cleanTargetDir(targetDir, packages, checkTimestamp = false) { + core.debug(`cleaning target directory "${targetDir}"`); + // remove all *files* from the profile directory + let dir = await external_fs_default().promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = external_path_default().join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await cleanTargetDir(dirName, packages, checkTimestamp); + } + else { + await cleanProfileTarget(dirName, packages, checkTimestamp); + } + } + catch { } + } + else if (dirent.name !== "CACHEDIR.TAG") { + await rm(dir.path, dirent); + } + } +} +async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) { + core.debug(`cleaning profile directory "${profileDir}"`); + let keepProfile = new Set(["build", ".fingerprint", "deps"]); + await rmExcept(profileDir, keepProfile); + const keepPkg = new Set(packages.map((p) => p.name)); + await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp); + await rmExcept(external_path_default().join(profileDir, ".fingerprint"), keepPkg, checkTimestamp); + const keepDeps = new Set(packages.flatMap((p) => { + const names = []; + for (const n of [p.name, ...p.targets]) { + const name = n.replace(/-/g, "_"); + names.push(name, `lib${name}`); + } + return names; + })); + await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps, checkTimestamp); +} +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); + const oldBins = JSON.parse(core.getState(STATE_BINS)); + for (const bin of oldBins) { + bins.delete(bin); + } + const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); + for await (const dirent of dir) { + if (dirent.isFile() && !bins.has(dirent.name)) { + await rm(dir.path, dirent); + } + } +} +async function cleanRegistry(packages) { + // `.cargo/registry/src` + // we can remove this completely, as cargo will recreate this from `cache` + await rmRF(external_path_default().join(CARGO_HOME, "registry", "src")); + // `.cargo/registry/index` + const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dirPath = external_path_default().join(indexDir.path, dirent.name); + // for a git registry, we can remove `.cache`, as cargo will recreate it from git + if (await exists(external_path_default().join(dirPath, ".git"))) { + await rmRF(external_path_default().join(dirPath, ".cache")); + } + // TODO: else, clean `.cache` based on the `packages` + } + } + const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); + // `.cargo/registry/cache` + const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + // here we check that the downloaded `.crate` matches one from our dependencies + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + } +} +async function cleanGit(packages) { + const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); + const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); + const repos = new Map(); + for (const p of packages) { + if (!p.path.startsWith(coPath)) { + continue; + } + const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep); + const refs = repos.get(repo); + if (refs) { + refs.add(ref); + } + else { + repos.set(repo, new Set([ref])); + } + } + // we have to keep both the clone, and the checkout, removing either will + // trigger a rebuild + // clean the db + try { + let dir = await external_fs_default().promises.opendir(dbPath); + for await (const dirent of dir) { + if (!repos.has(dirent.name)) { + await rm(dir.path, dirent); + } + } + } + catch { } + // clean the checkouts + try { + let dir = await external_fs_default().promises.opendir(coPath); + for await (const dirent of dir) { + const refs = repos.get(dirent.name); + if (!refs) { + await rm(dir.path, dirent); + continue; + } + if (!dirent.isDirectory()) { + continue; + } + const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name)); + for await (const dirent of refsDir) { + if (!refs.has(dirent.name)) { + await rm(refsDir.path, dirent); + } + } + } + } + catch { } +} +const ONE_WEEK = 7 * 24 * 3600 * 1000; +/** + * Removes all files or directories in `dirName`, except the ones matching + * any string in the `keepPrefix` set. + * + * The matching strips and trailing `-$hash` suffix. + * + * When the `checkTimestamp` flag is set, this will also remove anything older + * than one week. + */ +async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { + const dir = await external_fs_default().promises.opendir(dirName); + for await (const dirent of dir) { + let name = dirent.name; + // strip the trailing hash + const idx = name.lastIndexOf("-"); + if (idx !== -1) { + name = name.slice(0, idx); + } + let isOutdated = false; + if (checkTimestamp) { + const fileName = external_path_default().join(dir.path, dirent.name); + const { mtime } = await external_fs_default().promises.stat(fileName); + isOutdated = Date.now() - mtime.getTime() > ONE_WEEK; + } + if (!keepPrefix.has(name) || isOutdated) { + await rm(dir.path, dirent); + } + } +} +async function rm(parent, dirent) { + try { + const fileName = external_path_default().join(parent, dirent.name); + core.debug(`deleting "${fileName}"`); + if (dirent.isFile()) { + await external_fs_default().promises.unlink(fileName); + } + else if (dirent.isDirectory()) { + await io.rmRF(fileName); + } + } + catch { } +} +async function rmRF(dirName) { + core.debug(`deleting "${dirName}"`); + await io.rmRF(dirName); +} +async function exists(path) { + try { + await external_fs_default().promises.access(path); + return true; + } + catch { + return false; + } +} ;// CONCATENATED MODULE: ./src/save.ts - - - - - -process.on("uncaughtException", (e) => { - core.info(`[warning] ${e.message}`); - if (e.stack) { - core.info(e.stack); - } -}); -async function run() { - if (!cache.isFeatureAvailable()) { - return; - } - try { - const config = await CacheConfig["new"](); - config.printInfo(); - core.info(""); - if (core.getState(STATE_KEY) === config.cacheKey) { - core.info(`Cache up-to-date.`); - return; - } - // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands - await macOsWorkaround(); - const allPackages = []; - for (const workspace of config.workspaces) { - const packages = await workspace.getPackages(); - allPackages.push(...packages); - try { - core.info(`... Cleaning ${workspace.target} ...`); - await cleanTargetDir(workspace.target, packages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - } - try { - core.info(`... Cleaning cargo registry ...`); - await cleanRegistry(allPackages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - try { - core.info(`... Cleaning cargo/bin ...`); - await cleanBin(); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - try { - core.info(`... Cleaning cargo git cache ...`); - await cleanGit(allPackages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } - core.info(`... Saving cache ...`); - await cache.saveCache(config.cachePaths, config.cacheKey); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } -} -run(); -async function macOsWorkaround() { - try { - // Workaround for https://github.com/actions/cache/issues/403 - // Also see https://github.com/rust-lang/cargo/issues/8603 - await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); - } - catch { } -} + + + + + + +process.on("uncaughtException", (e) => { + core.info(`[warning] ${e.message}`); + if (e.stack) { + core.info(e.stack); + } +}); +async function run() { + if (!cache.isFeatureAvailable()) { + return; + } + try { + const config = await CacheConfig["new"](); + config.printInfo(); + core.info(""); + if (core.getState(STATE_KEY) === config.cacheKey) { + core.info(`Cache up-to-date.`); + return; + } + // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands + await macOsWorkaround(); + const allPackages = []; + for (const workspace of config.workspaces) { + const packages = await workspace.getPackages(); + allPackages.push(...packages); + try { + core.info(`... Cleaning ${workspace.target} ...`); + await cleanTargetDir(workspace.target, packages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + } + try { + core.info(`... Cleaning cargo registry ...`); + await cleanRegistry(allPackages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + try { + core.info(`... Cleaning cargo/bin ...`); + await cleanBin(); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + try { + core.info(`... Cleaning cargo git cache ...`); + await cleanGit(allPackages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } + core.info(`... Saving cache ...`); + await withRetries(() => withTimeout(() => cache.saveCache(config.cachePaths, config.cacheKey), config.timeout), config.maxRetryAttempts, () => true); + } + catch (e) { + core.info(`[warning] ${e.stack}`); + } +} +run(); +async function macOsWorkaround() { + try { + // Workaround for https://github.com/actions/cache/issues/403 + // Also see https://github.com/rust-lang/cargo/issues/8603 + await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true }); + } + catch { } +} })();