3
0
Fork 0
mirror of https://github.com/Swatinem/rust-cache synced 2025-04-26 05:15:35 +00:00

fix: cache key stability (#142)

Ensure consistency of main and post configuration by storing and
restoring it from state, which in turn ensures cache key stability.

Also:
* Fixed some typos.
* Use core.error for logging errors.
* Fix inverted condition on cache-all-crates.

Reverts: #138
Fixes #140
This commit is contained in:
Steven Hartland 2023-05-18 21:48:40 +01:00 committed by GitHub
parent 060bda31e0
commit ad97570a01
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 260 additions and 175 deletions

124
dist/save/index.js vendored
View file

@ -59977,8 +59977,8 @@ async function getCmdOutput(cmd, args = [], options = {}) {
});
}
catch (e) {
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`);
core.info(`[warning] ${stderr}`);
core.error(`Command failed: ${cmd} ${args.join(" ")}`);
core.error(stderr);
throw e;
}
return stdout;
@ -60024,12 +60024,10 @@ class Workspace {
const HOME = external_os_default().homedir();
const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
const STATE_BINS = "RUST_CACHE_BINS";
const STATE_KEY = "RUST_CACHE_KEY";
const STATE_CONFIG = "RUST_CACHE_CONFIG";
class CacheConfig {
constructor() {
/** All the paths we want to cache */
@ -60040,6 +60038,8 @@ class CacheConfig {
this.restoreKey = "";
/** The workspace configurations */
this.workspaces = [];
/** The cargo binaries present during main step */
this.cargoBins = [];
/** The prefix portion of the cache key */
this.keyPrefix = "";
/** The rust version considered for the cache key */
@ -60103,20 +60103,11 @@ class CacheConfig {
}
}
self.keyEnvs = keyEnvs;
// Installed packages and their versions are also considered for the key.
const packages = await getPackages();
hasher.update(packages);
key += `-${hasher.digest("hex")}`;
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
let lockHash = core.getState(STATE_LOCKFILE_HASH);
let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
const workspaces = [];
@ -60128,24 +60119,20 @@ class CacheConfig {
workspaces.push(new Workspace(root, target));
}
self.workspaces = workspaces;
if (!lockHash) {
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml"));
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
}
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
core.saveState(STATE_LOCKFILE_HASH, lockHash);
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
}
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = hasher.digest("hex");
self.keyFiles = keyFiles;
key += `-${lockHash}`;
self.cacheKey = key;
@ -60158,8 +60145,32 @@ class CacheConfig {
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir);
}
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self;
}
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState() {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces
.map((w) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo() {
core.startGroup("Cache Configuration");
core.info(`Workspaces:`);
@ -60187,6 +60198,21 @@ class CacheConfig {
}
core.endGroup();
}
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
function isCacheUpToDate() {
return core.getState(STATE_CONFIG) === "";
}
async function getRustVersion() {
const stdout = await getCmdOutput("rustc", ["-vV"]);
@ -60197,11 +60223,6 @@ async function getRustVersion() {
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}
async function getPackages() {
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
// Make OS independent.
return stdout.split(/[\n\r]+/).join("\n");
}
async function globFiles(pattern) {
const globber = await glob.create(pattern, {
followSymbolicLinks: false,
@ -60269,9 +60290,14 @@ async function getCargoBins() {
catch { }
return bins;
}
async function cleanBin() {
/**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
async function cleanBin(oldBins) {
const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) {
bins.delete(bin);
}
@ -60440,9 +60466,9 @@ async function exists(path) {
process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`);
core.error(e.message);
if (e.stack) {
core.info(e.stack);
core.error(e.stack);
}
});
async function run() {
@ -60451,13 +60477,13 @@ async function run() {
return;
}
try {
const config = await CacheConfig["new"]();
config.printInfo();
core.info("");
if (core.getState(STATE_KEY) === config.cacheKey) {
if (isCacheUpToDate()) {
core.info(`Cache up-to-date.`);
return;
}
const config = CacheConfig.fromState();
config.printInfo();
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround();
const allPackages = [];
@ -60473,16 +60499,16 @@ async function run() {
}
}
try {
const creates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`);
await cleanRegistry(allPackages, creates === "true");
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
await cleanRegistry(allPackages, crates !== "true");
}
catch (e) {
core.error(`${e.stack}`);
}
try {
core.info(`... Cleaning cargo/bin ...`);
await cleanBin();
await cleanBin(config.cargoBins);
}
catch (e) {
core.error(`${e.stack}`);