3
0
Fork 0
mirror of https://github.com/Swatinem/rust-cache synced 2025-04-05 21:24:07 +00:00

fix: cache key stability (#142)

Ensure consistency of main and post configuration by storing and
restoring it from state, which in turn ensures cache key stability.

Also:
* Fixed some typos.
* Use core.error for logging errors.
* Fix inverted condition on cache-all-crates.

Reverts: #138
Fixes #140
This commit is contained in:
Steven Hartland 2023-05-18 21:48:40 +01:00 committed by GitHub
parent 060bda31e0
commit ad97570a01
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 260 additions and 175 deletions

View file

@ -1,5 +1,9 @@
# Changelog # Changelog
## 2.3.1
- Fix cache key stability.
## 2.3.0 ## 2.3.0
- Add `cache-all-crates` option, which enables caching of crates installed by workflows. - Add `cache-all-crates` option, which enables caching of crates installed by workflows.

View file

@ -101,7 +101,6 @@ This cache is automatically keyed by:
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and - the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present). - a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present). - a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of installed packages as generated by `cargo install --list`.
An additional input `key` can be provided if the builtin keys are not sufficient. An additional input `key` can be provided if the builtin keys are not sufficient.
@ -137,7 +136,7 @@ otherwise corrupt the cache on macOS builds.
This specialized cache action is built on top of the upstream cache action This specialized cache action is built on top of the upstream cache action
maintained by GitHub. The same restrictions and limits apply, which are maintained by GitHub. The same restrictions and limits apply, which are
documented here: documented here:
https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows [Caching dependencies to speed up workflows](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows)
In particular, caches are currently limited to 10 GB in total and exceeding that In particular, caches are currently limited to 10 GB in total and exceeding that
limit will cause eviction of older caches. limit will cause eviction of older caches.

123
dist/restore/index.js vendored
View file

@ -59977,8 +59977,8 @@ async function getCmdOutput(cmd, args = [], options = {}) {
}); });
} }
catch (e) { catch (e) {
lib_core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); lib_core.error(`Command failed: ${cmd} ${args.join(" ")}`);
lib_core.info(`[warning] ${stderr}`); lib_core.error(stderr);
throw e; throw e;
} }
return stdout; return stdout;
@ -60024,12 +60024,10 @@ class Workspace {
const HOME = external_os_default().homedir(); const HOME = external_os_default().homedir();
const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_CONFIG = "RUST_CACHE_CONFIG";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
const config_STATE_BINS = "RUST_CACHE_BINS";
const STATE_KEY = "RUST_CACHE_KEY";
class CacheConfig { class CacheConfig {
constructor() { constructor() {
/** All the paths we want to cache */ /** All the paths we want to cache */
@ -60040,6 +60038,8 @@ class CacheConfig {
this.restoreKey = ""; this.restoreKey = "";
/** The workspace configurations */ /** The workspace configurations */
this.workspaces = []; this.workspaces = [];
/** The cargo binaries present during main step */
this.cargoBins = [];
/** The prefix portion of the cache key */ /** The prefix portion of the cache key */
this.keyPrefix = ""; this.keyPrefix = "";
/** The rust version considered for the cache key */ /** The rust version considered for the cache key */
@ -60103,20 +60103,11 @@ class CacheConfig {
} }
} }
self.keyEnvs = keyEnvs; self.keyEnvs = keyEnvs;
// Installed packages and their versions are also considered for the key.
const packages = await getPackages();
hasher.update(packages);
key += `-${hasher.digest("hex")}`; key += `-${hasher.digest("hex")}`;
self.restoreKey = key; self.restoreKey = key;
// Construct the lockfiles portion of the key: // Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests // This considers all the files found via globbing for various manifests
// and lockfiles. // and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
let lockHash = lib_core.getState(STATE_LOCKFILE_HASH);
let keyFiles = JSON.parse(lib_core.getState(STATE_LOCKFILES) || "[]");
// Constructs the workspace config and paths to restore: // Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax. // The workspaces are given using a `$workspace -> $target` syntax.
const workspaces = []; const workspaces = [];
@ -60128,24 +60119,20 @@ class CacheConfig {
workspaces.push(new Workspace(root, target)); workspaces.push(new Workspace(root, target));
} }
self.workspaces = workspaces; self.workspaces = workspaces;
if (!lockHash) { let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml")); for (const workspace of workspaces) {
for (const workspace of workspaces) { const root = workspace.root;
const root = workspace.root; keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
}
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
lib_core.saveState(STATE_LOCKFILE_HASH, lockHash);
lib_core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
} }
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = hasher.digest("hex");
self.keyFiles = keyFiles; self.keyFiles = keyFiles;
key += `-${lockHash}`; key += `-${lockHash}`;
self.cacheKey = key; self.cacheKey = key;
@ -60158,8 +60145,32 @@ class CacheConfig {
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir); self.cachePaths.push(dir);
} }
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self; return self;
} }
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState() {
const source = lib_core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces
.map((w) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo() { printInfo() {
lib_core.startGroup("Cache Configuration"); lib_core.startGroup("Cache Configuration");
lib_core.info(`Workspaces:`); lib_core.info(`Workspaces:`);
@ -60187,6 +60198,21 @@ class CacheConfig {
} }
lib_core.endGroup(); lib_core.endGroup();
} }
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
lib_core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
function isCacheUpToDate() {
return core.getState(STATE_CONFIG) === "";
} }
async function getRustVersion() { async function getRustVersion() {
const stdout = await getCmdOutput("rustc", ["-vV"]); const stdout = await getCmdOutput("rustc", ["-vV"]);
@ -60197,11 +60223,6 @@ async function getRustVersion() {
.filter((s) => s.length === 2); .filter((s) => s.length === 2);
return Object.fromEntries(splits); return Object.fromEntries(splits);
} }
async function getPackages() {
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
// Make OS independent.
return stdout.split(/[\n\r]+/).join("\n");
}
async function globFiles(pattern) { async function globFiles(pattern) {
const globber = await glob.create(pattern, { const globber = await glob.create(pattern, {
followSymbolicLinks: false, followSymbolicLinks: false,
@ -60269,9 +60290,14 @@ async function getCargoBins() {
catch { } catch { }
return bins; return bins;
} }
async function cleanBin() { /**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
async function cleanBin(oldBins) {
const bins = await getCargoBins(); const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) { for (const bin of oldBins) {
bins.delete(bin); bins.delete(bin);
} }
@ -60439,9 +60465,9 @@ async function exists(path) {
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
lib_core.info(`[warning] ${e.message}`); lib_core.error(e.message);
if (e.stack) { if (e.stack) {
lib_core.info(e.stack); lib_core.error(e.stack);
} }
}); });
async function run() { async function run() {
@ -60459,8 +60485,6 @@ async function run() {
const config = await CacheConfig["new"](); const config = await CacheConfig["new"]();
config.printInfo(); config.printInfo();
lib_core.info(""); lib_core.info("");
const bins = await getCargoBins();
lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins]));
lib_core.info(`... Restoring cache ...`); lib_core.info(`... Restoring cache ...`);
const key = config.cacheKey; const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by: // Pass a copy of cachePaths to avoid mutating the original array as reported by:
@ -60468,9 +60492,9 @@ async function run() {
// TODO: remove this once the underlying bug is fixed. // TODO: remove this once the underlying bug is fixed.
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]); const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
if (restoreKey) { if (restoreKey) {
lib_core.info(`Restored from cache key "${restoreKey}".`); const match = restoreKey === key;
lib_core.saveState(STATE_KEY, restoreKey); lib_core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
if (restoreKey !== key) { if (!match) {
// pre-clean the target directory on cache mismatch // pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) { for (const workspace of config.workspaces) {
try { try {
@ -60478,17 +60502,20 @@ async function run() {
} }
catch { } catch { }
} }
// We restored the cache but it is not a full match.
config.saveState();
} }
setCacheHitOutput(restoreKey === key); setCacheHitOutput(match);
} }
else { else {
lib_core.info("No cache found."); lib_core.info("No cache found.");
config.saveState();
setCacheHitOutput(false); setCacheHitOutput(false);
} }
} }
catch (e) { catch (e) {
setCacheHitOutput(false); setCacheHitOutput(false);
lib_core.info(`[warning] ${e.stack}`); lib_core.error(`${e.stack}`);
} }
} }
function setCacheHitOutput(cacheHit) { function setCacheHitOutput(cacheHit) {

124
dist/save/index.js vendored
View file

@ -59977,8 +59977,8 @@ async function getCmdOutput(cmd, args = [], options = {}) {
}); });
} }
catch (e) { catch (e) {
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); core.error(`Command failed: ${cmd} ${args.join(" ")}`);
core.info(`[warning] ${stderr}`); core.error(stderr);
throw e; throw e;
} }
return stdout; return stdout;
@ -60024,12 +60024,10 @@ class Workspace {
const HOME = external_os_default().homedir(); const HOME = external_os_default().homedir();
const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_CONFIG = "RUST_CACHE_CONFIG";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
const STATE_BINS = "RUST_CACHE_BINS";
const STATE_KEY = "RUST_CACHE_KEY";
class CacheConfig { class CacheConfig {
constructor() { constructor() {
/** All the paths we want to cache */ /** All the paths we want to cache */
@ -60040,6 +60038,8 @@ class CacheConfig {
this.restoreKey = ""; this.restoreKey = "";
/** The workspace configurations */ /** The workspace configurations */
this.workspaces = []; this.workspaces = [];
/** The cargo binaries present during main step */
this.cargoBins = [];
/** The prefix portion of the cache key */ /** The prefix portion of the cache key */
this.keyPrefix = ""; this.keyPrefix = "";
/** The rust version considered for the cache key */ /** The rust version considered for the cache key */
@ -60103,20 +60103,11 @@ class CacheConfig {
} }
} }
self.keyEnvs = keyEnvs; self.keyEnvs = keyEnvs;
// Installed packages and their versions are also considered for the key.
const packages = await getPackages();
hasher.update(packages);
key += `-${hasher.digest("hex")}`; key += `-${hasher.digest("hex")}`;
self.restoreKey = key; self.restoreKey = key;
// Construct the lockfiles portion of the key: // Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests // This considers all the files found via globbing for various manifests
// and lockfiles. // and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
let lockHash = core.getState(STATE_LOCKFILE_HASH);
let keyFiles = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
// Constructs the workspace config and paths to restore: // Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax. // The workspaces are given using a `$workspace -> $target` syntax.
const workspaces = []; const workspaces = [];
@ -60128,24 +60119,20 @@ class CacheConfig {
workspaces.push(new Workspace(root, target)); workspaces.push(new Workspace(root, target));
} }
self.workspaces = workspaces; self.workspaces = workspaces;
if (!lockHash) { let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml")); for (const workspace of workspaces) {
for (const workspace of workspaces) { const root = workspace.root;
const root = workspace.root; keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
keyFiles.push(...(await globFiles(`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
}
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
core.saveState(STATE_LOCKFILE_HASH, lockHash);
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
} }
keyFiles = keyFiles.filter(file => !external_fs_default().statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = external_crypto_default().createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = hasher.digest("hex");
self.keyFiles = keyFiles; self.keyFiles = keyFiles;
key += `-${lockHash}`; key += `-${lockHash}`;
self.cacheKey = key; self.cacheKey = key;
@ -60158,8 +60145,32 @@ class CacheConfig {
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) { for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir); self.cachePaths.push(dir);
} }
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self; return self;
} }
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState() {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces
.map((w) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo() { printInfo() {
core.startGroup("Cache Configuration"); core.startGroup("Cache Configuration");
core.info(`Workspaces:`); core.info(`Workspaces:`);
@ -60187,6 +60198,21 @@ class CacheConfig {
} }
core.endGroup(); core.endGroup();
} }
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
function isCacheUpToDate() {
return core.getState(STATE_CONFIG) === "";
} }
async function getRustVersion() { async function getRustVersion() {
const stdout = await getCmdOutput("rustc", ["-vV"]); const stdout = await getCmdOutput("rustc", ["-vV"]);
@ -60197,11 +60223,6 @@ async function getRustVersion() {
.filter((s) => s.length === 2); .filter((s) => s.length === 2);
return Object.fromEntries(splits); return Object.fromEntries(splits);
} }
async function getPackages() {
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
// Make OS independent.
return stdout.split(/[\n\r]+/).join("\n");
}
async function globFiles(pattern) { async function globFiles(pattern) {
const globber = await glob.create(pattern, { const globber = await glob.create(pattern, {
followSymbolicLinks: false, followSymbolicLinks: false,
@ -60269,9 +60290,14 @@ async function getCargoBins() {
catch { } catch { }
return bins; return bins;
} }
async function cleanBin() { /**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
async function cleanBin(oldBins) {
const bins = await getCargoBins(); const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) { for (const bin of oldBins) {
bins.delete(bin); bins.delete(bin);
} }
@ -60440,9 +60466,9 @@ async function exists(path) {
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`); core.error(e.message);
if (e.stack) { if (e.stack) {
core.info(e.stack); core.error(e.stack);
} }
}); });
async function run() { async function run() {
@ -60451,13 +60477,13 @@ async function run() {
return; return;
} }
try { try {
const config = await CacheConfig["new"](); if (isCacheUpToDate()) {
config.printInfo();
core.info("");
if (core.getState(STATE_KEY) === config.cacheKey) {
core.info(`Cache up-to-date.`); core.info(`Cache up-to-date.`);
return; return;
} }
const config = CacheConfig.fromState();
config.printInfo();
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround(); await macOsWorkaround();
const allPackages = []; const allPackages = [];
@ -60473,16 +60499,16 @@ async function run() {
} }
} }
try { try {
const creates = core.getInput("cache-all-crates").toLowerCase() || "false"; const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`); core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
await cleanRegistry(allPackages, creates === "true"); await cleanRegistry(allPackages, crates !== "true");
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.error(`${e.stack}`);
} }
try { try {
core.info(`... Cleaning cargo/bin ...`); core.info(`... Cleaning cargo/bin ...`);
await cleanBin(); await cleanBin(config.cargoBins);
} }
catch (e) { catch (e) {
core.error(`${e.stack}`); core.error(`${e.stack}`);

View file

@ -3,7 +3,7 @@ import * as io from "@actions/io";
import fs from "fs"; import fs from "fs";
import path from "path"; import path from "path";
import { CARGO_HOME, STATE_BINS } from "./config"; import { CARGO_HOME } from "./config";
import { Packages } from "./workspace"; import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) { export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
@ -69,9 +69,14 @@ export async function getCargoBins(): Promise<Set<string>> {
return bins; return bins;
} }
export async function cleanBin() { /**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
export async function cleanBin(oldBins: Array<string>) {
const bins = await getCargoBins(); const bins = await getCargoBins();
const oldBins = JSON.parse(core.getState(STATE_BINS));
for (const bin of oldBins) { for (const bin of oldBins) {
bins.delete(bin); bins.delete(bin);
@ -186,10 +191,10 @@ const ONE_WEEK = 7 * 24 * 3600 * 1000;
/** /**
* Removes all files or directories in `dirName` matching some criteria. * Removes all files or directories in `dirName` matching some criteria.
* *
* When the `checkTimestamp` flag is set, this will also remove anything older * When the `checkTimestamp` flag is set, this will also remove anything older
* than one week. * than one week.
* *
* Otherwise, it will remove everything that does not match any string in the * Otherwise, it will remove everything that does not match any string in the
* `keepPrefix` set. * `keepPrefix` set.
* The matching strips and trailing `-$hash` suffix. * The matching strips and trailing `-$hash` suffix.

View file

@ -7,14 +7,12 @@ import path from "path";
import { getCmdOutput } from "./utils"; import { getCmdOutput } from "./utils";
import { Workspace } from "./workspace"; import { Workspace } from "./workspace";
import { getCargoBins } from "./cleanup";
const HOME = os.homedir(); const HOME = os.homedir();
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo"); export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_CONFIG = "RUST_CACHE_CONFIG";
const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES";
export const STATE_BINS = "RUST_CACHE_BINS";
export const STATE_KEY = "RUST_CACHE_KEY";
export class CacheConfig { export class CacheConfig {
/** All the paths we want to cache */ /** All the paths we want to cache */
@ -27,6 +25,9 @@ export class CacheConfig {
/** The workspace configurations */ /** The workspace configurations */
public workspaces: Array<Workspace> = []; public workspaces: Array<Workspace> = [];
/** The cargo binaries present during main step */
public cargoBins: Array<string> = [];
/** The prefix portion of the cache key */ /** The prefix portion of the cache key */
private keyPrefix = ""; private keyPrefix = "";
/** The rust version considered for the cache key */ /** The rust version considered for the cache key */
@ -104,10 +105,6 @@ export class CacheConfig {
self.keyEnvs = keyEnvs; self.keyEnvs = keyEnvs;
// Installed packages and their versions are also considered for the key.
const packages = await getPackages();
hasher.update(packages);
key += `-${hasher.digest("hex")}`; key += `-${hasher.digest("hex")}`;
self.restoreKey = key; self.restoreKey = key;
@ -115,13 +112,6 @@ export class CacheConfig {
// Construct the lockfiles portion of the key: // Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests // This considers all the files found via globbing for various manifests
// and lockfiles. // and lockfiles.
// This part is computed in the "pre"/"restore" part of the job and persisted
// into the `state`. That state is loaded in the "post"/"save" part of the
// job so we have consistent values even though the "main" actions run
// might create/overwrite lockfiles.
let lockHash = core.getState(STATE_LOCKFILE_HASH);
let keyFiles: Array<string> = JSON.parse(core.getState(STATE_LOCKFILES) || "[]");
// Constructs the workspace config and paths to restore: // Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax. // The workspaces are given using a `$workspace -> $target` syntax.
@ -136,30 +126,25 @@ export class CacheConfig {
} }
self.workspaces = workspaces; self.workspaces = workspaces;
if (!lockHash) { let keyFiles = await globFiles("rust-toolchain\nrust-toolchain.toml");
keyFiles = keyFiles.concat(await globFiles("rust-toolchain\nrust-toolchain.toml")); for (const workspace of workspaces) {
for (const workspace of workspaces) { const root = workspace.root;
const root = workspace.root; keyFiles.push(
keyFiles.push( ...(await globFiles(
...(await globFiles( `${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
`${root}/**/Cargo.toml\n${root}/**/Cargo.lock\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`, )),
)), );
);
}
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = crypto.createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
lockHash = hasher.digest("hex");
core.saveState(STATE_LOCKFILE_HASH, lockHash);
core.saveState(STATE_LOCKFILES, JSON.stringify(keyFiles));
} }
keyFiles = keyFiles.filter(file => !fs.statSync(file).isDirectory());
keyFiles.sort((a, b) => a.localeCompare(b));
hasher = crypto.createHash("sha1");
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = hasher.digest("hex");
self.keyFiles = keyFiles; self.keyFiles = keyFiles;
@ -177,9 +162,37 @@ export class CacheConfig {
self.cachePaths.push(dir); self.cachePaths.push(dir);
} }
const bins = await getCargoBins();
self.cargoBins = Array.from(bins.values());
return self; return self;
} }
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState(): CacheConfig {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces
.map((w: any) => new Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo() { printInfo() {
core.startGroup("Cache Configuration"); core.startGroup("Cache Configuration");
core.info(`Workspaces:`); core.info(`Workspaces:`);
@ -207,6 +220,23 @@ export class CacheConfig {
} }
core.endGroup(); core.endGroup();
} }
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
export function isCacheUpToDate(): boolean {
return core.getState(STATE_CONFIG) === "";
} }
interface RustVersion { interface RustVersion {
@ -225,12 +255,6 @@ async function getRustVersion(): Promise<RustVersion> {
return Object.fromEntries(splits); return Object.fromEntries(splits);
} }
async function getPackages(): Promise<string> {
let stdout = await getCmdOutput("cargo", ["install", "--list"]);
// Make OS independent.
return stdout.split(/[\n\r]+/).join("\n");
}
async function globFiles(pattern: string): Promise<string[]> { async function globFiles(pattern: string): Promise<string[]> {
const globber = await glob.create(pattern, { const globber = await glob.create(pattern, {
followSymbolicLinks: false, followSymbolicLinks: false,

View file

@ -1,13 +1,13 @@
import * as cache from "@actions/cache"; import * as cache from "@actions/cache";
import * as core from "@actions/core"; import * as core from "@actions/core";
import { cleanTargetDir, getCargoBins } from "./cleanup"; import { cleanTargetDir } from "./cleanup";
import { CacheConfig, STATE_BINS, STATE_KEY } from "./config"; import { CacheConfig } from "./config";
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`); core.error(e.message);
if (e.stack) { if (e.stack) {
core.info(e.stack); core.error(e.stack);
} }
}); });
@ -29,9 +29,6 @@ async function run() {
config.printInfo(); config.printInfo();
core.info(""); core.info("");
const bins = await getCargoBins();
core.saveState(STATE_BINS, JSON.stringify([...bins]));
core.info(`... Restoring cache ...`); core.info(`... Restoring cache ...`);
const key = config.cacheKey; const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by: // Pass a copy of cachePaths to avoid mutating the original array as reported by:
@ -39,28 +36,31 @@ async function run() {
// TODO: remove this once the underlying bug is fixed. // TODO: remove this once the underlying bug is fixed.
const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]); const restoreKey = await cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey]);
if (restoreKey) { if (restoreKey) {
core.info(`Restored from cache key "${restoreKey}".`); const match = restoreKey === key;
core.saveState(STATE_KEY, restoreKey); core.info(`Restored from cache key "${restoreKey}" full match: ${match}.`);
if (!match) {
if (restoreKey !== key) {
// pre-clean the target directory on cache mismatch // pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) { for (const workspace of config.workspaces) {
try { try {
await cleanTargetDir(workspace.target, [], true); await cleanTargetDir(workspace.target, [], true);
} catch {} } catch {}
} }
// We restored the cache but it is not a full match.
config.saveState();
} }
setCacheHitOutput(restoreKey === key); setCacheHitOutput(match);
} else { } else {
core.info("No cache found."); core.info("No cache found.");
config.saveState();
setCacheHitOutput(false); setCacheHitOutput(false);
} }
} catch (e) { } catch (e) {
setCacheHitOutput(false); setCacheHitOutput(false);
core.info(`[warning] ${(e as any).stack}`); core.error(`${(e as any).stack}`);
} }
} }

View file

@ -3,12 +3,12 @@ import * as core from "@actions/core";
import * as exec from "@actions/exec"; import * as exec from "@actions/exec";
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
import { CacheConfig, STATE_KEY } from "./config"; import { CacheConfig, isCacheUpToDate } from "./config";
process.on("uncaughtException", (e) => { process.on("uncaughtException", (e) => {
core.info(`[warning] ${e.message}`); core.error(e.message);
if (e.stack) { if (e.stack) {
core.info(e.stack); core.error(e.stack);
} }
}); });
@ -20,15 +20,15 @@ async function run() {
} }
try { try {
const config = await CacheConfig.new(); if (isCacheUpToDate()) {
config.printInfo();
core.info("");
if (core.getState(STATE_KEY) === config.cacheKey) {
core.info(`Cache up-to-date.`); core.info(`Cache up-to-date.`);
return; return;
} }
const config = CacheConfig.fromState();
config.printInfo();
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands // TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
await macOsWorkaround(); await macOsWorkaround();
@ -45,16 +45,16 @@ async function run() {
} }
try { try {
const creates = core.getInput("cache-all-crates").toLowerCase() || "false"; const crates = core.getInput("cache-all-crates").toLowerCase() || "false"
core.info(`... Cleaning cargo registry cache-all-crates: ${creates} ...`); core.info(`... Cleaning cargo registry cache-all-crates: ${crates} ...`);
await cleanRegistry(allPackages, creates === "true"); await cleanRegistry(allPackages, crates !== "true");
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.error(`${(e as any).stack}`);
} }
try { try {
core.info(`... Cleaning cargo/bin ...`); core.info(`... Cleaning cargo/bin ...`);
await cleanBin(); await cleanBin(config.cargoBins);
} catch (e) { } catch (e) {
core.error(`${(e as any).stack}`); core.error(`${(e as any).stack}`);
} }

View file

@ -22,8 +22,8 @@ export async function getCmdOutput(
...options, ...options,
}); });
} catch (e) { } catch (e) {
core.info(`[warning] Command failed: ${cmd} ${args.join(" ")}`); core.error(`Command failed: ${cmd} ${args.join(" ")}`);
core.info(`[warning] ${stderr}`); core.error(stderr);
throw e; throw e;
} }
return stdout; return stdout;