diff --git a/dist/restore/index.js b/dist/restore/index.js index 697c5dc..7adf64f 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -86691,6 +86691,59 @@ class Workspace { } } +;// CONCATENATED MODULE: ./src/incremental.ts + +// import * as io from "@actions/io"; + + +// import { CARGO_HOME } from "./config"; + +// import { Packages } from "./workspace"; +let incremental_missing = false; +function isIncrementalMissing() { + return incremental_missing; +} +async function restoreIncremental(targetDir) { + lib_core.debug(`restoring incremental directory "${targetDir}"`); + let dir = await external_fs_default().promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = external_path_default().join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await restoreIncremental(dirName); + } + else { + await restoreIncrementalProfile(dirName); + } + restoreIncrementalProfile; + } + catch { } + } + } +} +async function restoreIncrementalProfile(dirName) { + lib_core.debug(`restoring incremental profile directory "${dirName}"`); + const incrementalJson = external_path_default().join(dirName, "incremental-restore.json"); + if (await utils_exists(incrementalJson)) { + const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8"); + const { modifiedTimes } = JSON.parse(contents); + lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); + // Write the mtimes to all the files in the profile directory + for (const fileName of Object.keys(modifiedTimes)) { + const mtime = modifiedTimes[fileName]; + const filePath = external_path_default().join(dirName, fileName); + await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime)); + } + } + else { + lib_core.debug(`incremental-restore.json not found for ${dirName}`); + incremental_missing = true; + } +} + ;// CONCATENATED MODULE: ./src/config.ts @@ -86703,6 +86756,7 @@ class Workspace { + const HOME = external_os_default().homedir(); const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const STATE_CONFIG = "RUST_CACHE_CONFIG"; @@ -86911,6 +86965,13 @@ class CacheConfig { for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { self.cachePaths.push(dir); } + if (self.incremental) { + if (cacheTargets === "true") { + for (const target of self.workspaces.map((ws) => ws.target)) { + self.cachePaths.push(external_path_default().join(target, "incremental")); + } + } + } const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); return self; @@ -86973,6 +87034,12 @@ class CacheConfig { saveState() { lib_core.saveState(STATE_CONFIG, this); } + isIncrementalMissing() { + if (this.incremental) { + return isIncrementalMissing(); + } + return false; + } } /** * Checks if the cache is up to date. @@ -87340,51 +87407,6 @@ async function rmRF(dirName) { await io.rmRF(dirName); } -;// CONCATENATED MODULE: ./src/incremental.ts - -// import * as io from "@actions/io"; - - -// import { CARGO_HOME } from "./config"; - -// import { Packages } from "./workspace"; -async function restoreIncremental(targetDir) { - lib_core.debug(`restoring incremental directory "${targetDir}"`); - let dir = await external_fs_default().promises.opendir(targetDir); - for await (const dirent of dir) { - if (dirent.isDirectory()) { - let dirName = external_path_default().join(dir.path, dirent.name); - // is it a profile dir, or a nested target dir? - let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); - try { - if (isNestedTarget) { - await restoreIncremental(dirName); - } - else { - await restoreIncrementalProfile(dirName); - } - restoreIncrementalProfile; - } - catch { } - } - } -} -async function restoreIncrementalProfile(dirName) { - lib_core.debug(`restoring incremental profile directory "${dirName}"`); - const incrementalJson = external_path_default().join(dirName, "incremental-restore.json"); - if (await utils_exists(incrementalJson)) { - const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8"); - const { modifiedTimes } = JSON.parse(contents); - lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); - // Write the mtimes to all the files in the profile directory - for (const fileName of Object.keys(modifiedTimes)) { - const mtime = modifiedTimes[fileName]; - const filePath = external_path_default().join(dirName, fileName); - await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime)); - } - } -} - ;// CONCATENATED MODULE: ./src/restore.ts @@ -87433,7 +87455,7 @@ async function run() { await restoreIncremental(workspace.target); } } - if (!match) { + if (!match || config.isIncrementalMissing()) { // pre-clean the target directory on cache mismatch for (const workspace of config.workspaces) { try { diff --git a/dist/save/index.js b/dist/save/index.js index 15ddf1e..714a7c6 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -85430,7 +85430,7 @@ var __webpack_exports__ = {}; "use strict"; // EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js -var core = __nccwpck_require__(7484); +var lib_core = __nccwpck_require__(7484); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __nccwpck_require__(5236); // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js @@ -86601,11 +86601,11 @@ var cache_lib_cache = __nccwpck_require__(5116); function reportError(e) { const { commandFailed } = e; if (commandFailed) { - core.error(`Command failed: ${commandFailed.command}`); - core.error(commandFailed.stderr); + lib_core.error(`Command failed: ${commandFailed.command}`); + lib_core.error(commandFailed.stderr); } else { - core.error(`${e.stack}`); + lib_core.error(`${e.stack}`); } } async function getCmdOutput(cmd, args = [], options = {}) { @@ -86635,7 +86635,7 @@ async function getCmdOutput(cmd, args = [], options = {}) { return stdout; } function getCacheProvider() { - const cacheProvider = core.getInput("cache-provider"); + const cacheProvider = lib_core.getInput("cache-provider"); const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined; if (!cache) { throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`); @@ -86645,7 +86645,7 @@ function getCacheProvider() { cache: cache, }; } -async function exists(path) { +async function utils_exists(path) { try { await external_fs_default().promises.access(path); return true; @@ -86668,11 +86668,11 @@ class Workspace { async getPackages(filter, ...extraArgs) { let packages = []; try { - core.debug(`collecting metadata for "${this.root}"`); + lib_core.debug(`collecting metadata for "${this.root}"`); const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], { cwd: this.root, })); - core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); + lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`); for (const pkg of meta.packages.filter(filter)) { const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name); packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) }); @@ -86691,6 +86691,59 @@ class Workspace { } } +;// CONCATENATED MODULE: ./src/incremental.ts + +// import * as io from "@actions/io"; + + +// import { CARGO_HOME } from "./config"; + +// import { Packages } from "./workspace"; +let incremental_missing = false; +function isIncrementalMissing() { + return incremental_missing; +} +async function restoreIncremental(targetDir) { + core.debug(`restoring incremental directory "${targetDir}"`); + let dir = await fs.promises.opendir(targetDir); + for await (const dirent of dir) { + if (dirent.isDirectory()) { + let dirName = path.join(dir.path, dirent.name); + // is it a profile dir, or a nested target dir? + let isNestedTarget = (await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json"))); + try { + if (isNestedTarget) { + await restoreIncremental(dirName); + } + else { + await restoreIncrementalProfile(dirName); + } + restoreIncrementalProfile; + } + catch { } + } + } +} +async function restoreIncrementalProfile(dirName) { + core.debug(`restoring incremental profile directory "${dirName}"`); + const incrementalJson = path.join(dirName, "incremental-restore.json"); + if (await exists(incrementalJson)) { + const contents = await fs.promises.readFile(incrementalJson, "utf8"); + const { modifiedTimes } = JSON.parse(contents); + core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`); + // Write the mtimes to all the files in the profile directory + for (const fileName of Object.keys(modifiedTimes)) { + const mtime = modifiedTimes[fileName]; + const filePath = path.join(dirName, fileName); + await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime)); + } + } + else { + core.debug(`incremental-restore.json not found for ${dirName}`); + incremental_missing = true; + } +} + ;// CONCATENATED MODULE: ./src/config.ts @@ -86703,6 +86756,7 @@ class Workspace { + const HOME = external_os_default().homedir(); const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const STATE_CONFIG = "RUST_CACHE_CONFIG"; @@ -86742,13 +86796,13 @@ class CacheConfig { // Construct key prefix: // This uses either the `shared-key` input, // or the `key` input combined with the `job` key. - let key = core.getInput("prefix-key") || "v0-rust"; - const sharedKey = core.getInput("shared-key"); + let key = lib_core.getInput("prefix-key") || "v0-rust"; + const sharedKey = lib_core.getInput("shared-key"); if (sharedKey) { key += `-${sharedKey}`; } else { - const inputKey = core.getInput("key"); + const inputKey = lib_core.getInput("key"); if (inputKey) { key += `-${inputKey}`; } @@ -86776,7 +86830,7 @@ class CacheConfig { self.keyRust = keyRust; // these prefixes should cover most of the compiler / rust / cargo keys const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"]; - envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean)); + envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean)); // sort the available env vars so we have a more stable hash const keyEnvs = []; const envKeys = Object.keys(process.env); @@ -86790,18 +86844,18 @@ class CacheConfig { } self.keyEnvs = keyEnvs; // Make sure we consider incremental builds - self.incremental = core.getInput("incremental").toLowerCase() == "true"; + self.incremental = lib_core.getInput("incremental").toLowerCase() == "true"; hasher.update(`incremental=${self.incremental}`); key += `-${digest(hasher)}`; self.restoreKey = key; // Construct the lockfiles portion of the key: // This considers all the files found via globbing for various manifests // and lockfiles. - self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true"; + self.cacheBin = lib_core.getInput("cache-bin").toLowerCase() == "true"; // Constructs the workspace config and paths to restore: // The workspaces are given using a `$workspace -> $target` syntax. const workspaces = []; - const workspacesInput = core.getInput("workspaces") || "."; + const workspacesInput = lib_core.getInput("workspaces") || "."; for (const workspace of workspacesInput.trim().split("\n")) { let [root, target = "target"] = workspace.split("->").map((s) => s.trim()); root = external_path_default().resolve(root); @@ -86854,19 +86908,19 @@ class CacheConfig { } catch (e) { // Fallback to caching them as regular file - core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); + lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`); keyFiles.push(cargo_manifest); } } const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock"); - if (await exists(cargo_lock)) { + if (await utils_exists(cargo_lock)) { try { const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" }); const parsed = parse(content); if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) { // Fallback to caching them as regular file since this action // can only handle Cargo.lock format version 3 - core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); + lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file"); keyFiles.push(cargo_lock); continue; } @@ -86878,7 +86932,7 @@ class CacheConfig { } catch (e) { // Fallback to caching them as regular file - core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); + lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`); keyFiles.push(cargo_lock); } } @@ -86903,14 +86957,21 @@ class CacheConfig { ...self.cachePaths, ]; } - const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true"; + const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true"; if (cacheTargets === "true") { self.cachePaths.push(...workspaces.map((ws) => ws.target)); } - const cacheDirectories = core.getInput("cache-directories"); + const cacheDirectories = lib_core.getInput("cache-directories"); for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { self.cachePaths.push(dir); } + if (self.incremental) { + if (cacheTargets === "true") { + for (const target of self.workspaces.map((ws) => ws.target)) { + self.cachePaths.push(external_path_default().join(target, "incremental")); + } + } + } const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); return self; @@ -86924,7 +86985,7 @@ class CacheConfig { * @see {@link CacheConfig#new} */ static fromState() { - const source = core.getState(STATE_CONFIG); + const source = lib_core.getState(STATE_CONFIG); if (!source) { throw new Error("Cache configuration not found in state"); } @@ -86937,41 +86998,47 @@ class CacheConfig { * Prints the configuration to the action log. */ printInfo(cacheProvider) { - core.startGroup("Cache Configuration"); - core.info(`Cache Provider:`); - core.info(` ${cacheProvider.name}`); - core.info(`Workspaces:`); + lib_core.startGroup("Cache Configuration"); + lib_core.info(`Cache Provider:`); + lib_core.info(` ${cacheProvider.name}`); + lib_core.info(`Workspaces:`); for (const workspace of this.workspaces) { - core.info(` ${workspace.root}`); + lib_core.info(` ${workspace.root}`); } - core.info(`Cache Paths:`); + lib_core.info(`Cache Paths:`); for (const path of this.cachePaths) { - core.info(` ${path}`); + lib_core.info(` ${path}`); } - core.info(`Restore Key:`); - core.info(` ${this.restoreKey}`); - core.info(`Cache Key:`); - core.info(` ${this.cacheKey}`); - core.info(`.. Prefix:`); - core.info(` - ${this.keyPrefix}`); - core.info(`.. Environment considered:`); - core.info(` - Rust Version: ${this.keyRust}`); + lib_core.info(`Restore Key:`); + lib_core.info(` ${this.restoreKey}`); + lib_core.info(`Cache Key:`); + lib_core.info(` ${this.cacheKey}`); + lib_core.info(`.. Prefix:`); + lib_core.info(` - ${this.keyPrefix}`); + lib_core.info(`.. Environment considered:`); + lib_core.info(` - Rust Version: ${this.keyRust}`); for (const env of this.keyEnvs) { - core.info(` - ${env}`); + lib_core.info(` - ${env}`); } - core.info(`.. Lockfiles considered:`); + lib_core.info(`.. Lockfiles considered:`); for (const file of this.keyFiles) { - core.info(` - ${file}`); + lib_core.info(` - ${file}`); } - core.info(`.. Incremental: ${this.incremental}`); - core.endGroup(); + lib_core.info(`.. Incremental: ${this.incremental}`); + lib_core.endGroup(); } /** * Saves the configuration to the state store. * This is used to restore the configuration in the post action. */ saveState() { - core.saveState(STATE_CONFIG, this); + lib_core.saveState(STATE_CONFIG, this); + } + isIncrementalMissing() { + if (this.incremental) { + return isIncrementalMissing(); + } + return false; } } /** @@ -86980,7 +87047,7 @@ class CacheConfig { * @returns `true` if the cache is up to date, `false` otherwise. */ function isCacheUpToDate() { - return core.getState(STATE_CONFIG) === ""; + return lib_core.getState(STATE_CONFIG) === ""; } /** * Returns a hex digest of the given hasher truncated to `HASH_LENGTH`. @@ -87034,14 +87101,14 @@ function sort_and_uniq(a) { async function cleanTargetDir(targetDir, packages, checkTimestamp, incremental) { - core.debug(`cleaning target directory "${targetDir}"`); + lib_core.debug(`cleaning target directory "${targetDir}"`); // remove all *files* from the profile directory let dir = await external_fs_default().promises.opendir(targetDir); for await (const dirent of dir) { if (dirent.isDirectory()) { let dirName = external_path_default().join(dir.path, dirent.name); // is it a profile dir, or a nested target dir? - let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json"))); + let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json"))); try { if (isNestedTarget) { await cleanTargetDir(dirName, packages, checkTimestamp, incremental); @@ -87058,7 +87125,7 @@ async function cleanTargetDir(targetDir, packages, checkTimestamp, incremental) } } async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremental) { - core.debug(`cleaning profile directory "${profileDir}"`); + lib_core.debug(`cleaning profile directory "${profileDir}"`); // Quite a few testing utility crates store compilation artifacts as nested // workspaces under `target/tests`. Notably, `target/tests/target` and // `target/tests/trybuild`. @@ -87100,7 +87167,7 @@ async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremen }; await fillModifiedTimes(incrementalDir); // Write the modified times to the incremental folder - core.debug(`writing incremental-restore.json for ${incrementalDir} with ${modifiedTimes} files`); + lib_core.debug(`writing incremental-restore.json for ${incrementalDir} with ${modifiedTimes} files`); const contents = JSON.stringify({ modifiedTimes }); await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents); } @@ -87153,7 +87220,7 @@ async function cleanRegistry(packages, crates = true) { // remove `.cargo/credentials.toml` try { const credentials = external_path_default().join(CARGO_HOME, ".cargo", "credentials.toml"); - core.debug(`deleting "${credentials}"`); + lib_core.debug(`deleting "${credentials}"`); await external_fs_default().promises.unlink(credentials); } catch { } @@ -87166,7 +87233,7 @@ async function cleanRegistry(packages, crates = true) { // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` const dirPath = external_path_default().join(indexDir.path, dirent.name); // for a git registry, we can remove `.cache`, as cargo will recreate it from git - if (await exists(external_path_default().join(dirPath, ".git"))) { + if (await utils_exists(external_path_default().join(dirPath, ".git"))) { await rmRF(external_path_default().join(dirPath, ".cache")); } else { @@ -87175,7 +87242,7 @@ async function cleanRegistry(packages, crates = true) { } } if (!crates) { - core.debug("skipping registry cache and src cleanup"); + lib_core.debug("skipping registry cache and src cleanup"); return; } // `.cargo/registry/src` @@ -87325,7 +87392,7 @@ async function rmExcept(dirName, keepPrefix, checkTimestamp = false) { async function rm(parent, dirent) { try { const fileName = external_path_default().join(parent, dirent.name); - core.debug(`deleting "${fileName}"`); + lib_core.debug(`deleting "${fileName}"`); if (dirent.isFile()) { await external_fs_default().promises.unlink(fileName); } @@ -87336,7 +87403,7 @@ async function rm(parent, dirent) { catch { } } async function rmRF(dirName) { - core.debug(`deleting "${dirName}"`); + lib_core.debug(`deleting "${dirName}"`); await io.rmRF(dirName); } @@ -87347,25 +87414,25 @@ async function rmRF(dirName) { process.on("uncaughtException", (e) => { - core.error(e.message); + lib_core.error(e.message); if (e.stack) { - core.error(e.stack); + lib_core.error(e.stack); } }); async function run() { const cacheProvider = getCacheProvider(); - const save = core.getInput("save-if").toLowerCase() || "true"; + const save = lib_core.getInput("save-if").toLowerCase() || "true"; if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) { return; } try { if (isCacheUpToDate()) { - core.info(`Cache up-to-date.`); + lib_core.info(`Cache up-to-date.`); return; } const config = CacheConfig.fromState(); config.printInfo(cacheProvider); - core.info(""); + lib_core.info(""); // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands if (process.env["RUNNER_OS"] == "macOS") { await macOsWorkaround(); @@ -87375,38 +87442,38 @@ async function run() { const packages = await workspace.getPackagesOutsideWorkspaceRoot(); allPackages.push(...packages); try { - core.info(`... Cleaning ${workspace.target} ...`); + lib_core.info(`... Cleaning ${workspace.target} ...`); await cleanTargetDir(workspace.target, packages, false, config.incremental); } catch (e) { - core.debug(`${e.stack}`); + lib_core.debug(`${e.stack}`); } } try { - const crates = core.getInput("cache-all-crates").toLowerCase() || "false"; - core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); + const crates = lib_core.getInput("cache-all-crates").toLowerCase() || "false"; + lib_core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`); await cleanRegistry(allPackages, crates !== "true"); } catch (e) { - core.debug(`${e.stack}`); + lib_core.debug(`${e.stack}`); } if (config.cacheBin) { try { - core.info(`... Cleaning cargo/bin ...`); + lib_core.info(`... Cleaning cargo/bin ...`); await cleanBin(config.cargoBins); } catch (e) { - core.debug(`${e.stack}`); + lib_core.debug(`${e.stack}`); } } try { - core.info(`... Cleaning cargo git cache ...`); + lib_core.info(`... Cleaning cargo git cache ...`); await cleanGit(allPackages); } catch (e) { - core.debug(`${e.stack}`); + lib_core.debug(`${e.stack}`); } - core.info(`... Saving cache ...`); + lib_core.info(`... Saving cache ...`); // Pass a copy of cachePaths to avoid mutating the original array as reported by: // https://github.com/actions/toolkit/pull/1378 // TODO: remove this once the underlying bug is fixed. diff --git a/src/config.ts b/src/config.ts index ab9d311..ecec9c6 100644 --- a/src/config.ts +++ b/src/config.ts @@ -10,6 +10,7 @@ import * as toml from "smol-toml"; import { getCargoBins } from "./cleanup"; import { CacheProvider, exists, getCmdOutput } from "./utils"; import { Workspace } from "./workspace"; +import { isIncrementalMissing } from "./incremental"; const HOME = os.homedir(); export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo"); @@ -62,8 +63,6 @@ export class CacheConfig { let key = core.getInput("prefix-key") || "v0-rust"; - - const sharedKey = core.getInput("shared-key"); if (sharedKey) { key += `-${sharedKey}`; @@ -120,7 +119,6 @@ export class CacheConfig { self.keyEnvs = keyEnvs; - // Make sure we consider incremental builds self.incremental = core.getInput("incremental").toLowerCase() == "true"; hasher.update(`incremental=${self.incremental}`); @@ -275,6 +273,14 @@ export class CacheConfig { self.cachePaths.push(dir); } + if (self.incremental) { + if (cacheTargets === "true") { + for (const target of self.workspaces.map((ws) => ws.target)) { + self.cachePaths.push(path.join(target, "incremental")); + } + } + } + const bins = await getCargoBins(); self.cargoBins = Array.from(bins.values()); @@ -343,6 +349,14 @@ export class CacheConfig { saveState() { core.saveState(STATE_CONFIG, this); } + + isIncrementalMissing(): boolean { + if (this.incremental) { + return isIncrementalMissing(); + } + + return false; + } } /** diff --git a/src/incremental.ts b/src/incremental.ts index 0a5fed1..43303f0 100644 --- a/src/incremental.ts +++ b/src/incremental.ts @@ -7,11 +7,15 @@ import path from "path"; import { exists } from "./utils"; // import { Packages } from "./workspace"; +let incremental_missing = false; + +export function isIncrementalMissing(): boolean { + return incremental_missing; +} export async function restoreIncremental(targetDir: string) { core.debug(`restoring incremental directory "${targetDir}"`); - let dir = await fs.promises.opendir(targetDir); for await (const dirent of dir) { if (dirent.isDirectory()) { @@ -46,5 +50,8 @@ async function restoreIncrementalProfile(dirName: string) { const filePath = path.join(dirName, fileName); await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime)); } + } else { + core.debug(`incremental-restore.json not found for ${dirName}`); + incremental_missing = true; } } diff --git a/src/restore.ts b/src/restore.ts index a1d8a5d..f592d26 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -56,7 +56,7 @@ async function run() { } } - if (!match) { + if (!match || config.isIncrementalMissing()) { // pre-clean the target directory on cache mismatch for (const workspace of config.workspaces) { try {