3
0
Fork 0
mirror of https://github.com/Swatinem/rust-cache synced 2025-11-04 14:39:11 +00:00

feat: Implement caching mechanism for Rust projects

- Added CacheConfig class to manage cache paths, keys, and workspace configurations.
- Implemented cache restoration logic in restore.js, including handling cache misses and mismatches.
- Developed save.js to handle cache saving, including cleaning up target directories and registry.
- Introduced utility functions in utils.js for command execution and cache provider selection.
- Created Workspace class in workspace.js to manage Rust workspace metadata and package retrieval.
This commit is contained in:
Ryan-Brice 2025-10-21 17:40:45 +08:00
parent d1bc4f9b16
commit 74f2e4d3dd
No known key found for this signature in database
GPG key ID: 3702BD1113AFDD13
12 changed files with 5053 additions and 4034 deletions

View file

@ -32,6 +32,10 @@ sensible defaults.
# default: "true"
use-job-key: ""
# If the automatic `job`-based cache key should include a hash of the job's contents.
# default: "true"
add-job-hash: ""
# A whitespace separated list of env-var *prefixes* who's value contributes
# to the environment cache key.
# The env-vars are matched by *prefix*, so the default `RUST` var will

View file

@ -16,6 +16,10 @@ inputs:
description: "If the automatic `job`-based cache key should be used for the cache name. Defaults to true."
required: false
default: "true"
add-job-hash:
description: "If the automatic `job`-based cache key should include a hash of the job's contents. Defaults to false."
required: false
default: "true"
env-vars:
description: "Additional environment variables to include in the cache key, separated by spaces."
required: false

3953
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

3999
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

330
src/cleanup.js Normal file
View file

@ -0,0 +1,330 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.cleanTargetDir = cleanTargetDir;
exports.getCargoBins = getCargoBins;
exports.cleanBin = cleanBin;
exports.cleanRegistry = cleanRegistry;
exports.cleanGit = cleanGit;
const core = __importStar(require("@actions/core"));
const io = __importStar(require("@actions/io"));
const fs_1 = __importDefault(require("fs"));
const path_1 = __importDefault(require("path"));
const config_1 = require("./config");
const utils_1 = require("./utils");
async function cleanTargetDir(targetDir, packages, checkTimestamp = false) {
core.debug(`cleaning target directory "${targetDir}"`);
// remove all *files* from the profile directory
let dir = await fs_1.default.promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = path_1.default.join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget = (await (0, utils_1.exists)(path_1.default.join(dirName, "CACHEDIR.TAG"))) || (await (0, utils_1.exists)(path_1.default.join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await cleanTargetDir(dirName, packages, checkTimestamp);
}
else {
await cleanProfileTarget(dirName, packages, checkTimestamp);
}
}
catch { }
}
else if (dirent.name !== "CACHEDIR.TAG") {
await rm(dir.path, dirent);
}
}
}
async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
// `target/tests/trybuild`.
if (path_1.default.basename(profileDir) === "tests") {
try {
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
cleanTargetDir(path_1.default.join(profileDir, "target"), packages, checkTimestamp);
}
catch { }
try {
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
cleanTargetDir(path_1.default.join(profileDir, "trybuild"), packages, checkTimestamp);
}
catch { }
// Delete everything else.
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
return;
}
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
await rmExcept(profileDir, keepProfile);
const keepPkg = new Set(packages.map((p) => p.name));
await rmExcept(path_1.default.join(profileDir, "build"), keepPkg, checkTimestamp);
await rmExcept(path_1.default.join(profileDir, ".fingerprint"), keepPkg, checkTimestamp);
const keepDeps = new Set(packages.flatMap((p) => {
const names = [];
for (const n of [p.name, ...p.targets]) {
const name = n.replace(/-/g, "_");
names.push(name, `lib${name}`);
}
return names;
}));
await rmExcept(path_1.default.join(profileDir, "deps"), keepDeps, checkTimestamp);
}
async function getCargoBins() {
const bins = new Set();
try {
const { installs } = JSON.parse(await fs_1.default.promises.readFile(path_1.default.join(config_1.CARGO_HOME, ".crates2.json"), "utf8"));
for (const pkg of Object.values(installs)) {
for (const bin of pkg.bins) {
bins.add(bin);
}
}
}
catch { }
return bins;
}
/**
* Clean the cargo bin directory, removing the binaries that existed
* when the action started, as they were not created by the build.
*
* @param oldBins The binaries that existed when the action started.
*/
async function cleanBin(oldBins) {
const bins = await getCargoBins();
for (const bin of oldBins) {
bins.delete(bin);
}
const dir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "bin"));
for await (const dirent of dir) {
if (dirent.isFile() && !bins.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
async function cleanRegistry(packages, crates = true) {
// remove `.cargo/credentials.toml`
try {
const credentials = path_1.default.join(config_1.CARGO_HOME, ".cargo", "credentials.toml");
core.debug(`deleting "${credentials}"`);
await fs_1.default.promises.unlink(credentials);
}
catch { }
// `.cargo/registry/index`
let pkgSet = new Set(packages.map((p) => p.name));
const indexDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "index"));
for await (const dirent of indexDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/index/github.com-1ecc6299db9ec823`
// or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
const dirPath = path_1.default.join(indexDir.path, dirent.name);
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await (0, utils_1.exists)(path_1.default.join(dirPath, ".git"))) {
await rmRF(path_1.default.join(dirPath, ".cache"));
}
else {
await cleanRegistryIndexCache(dirPath, pkgSet);
}
}
}
if (!crates) {
core.debug("skipping registry cache and src cleanup");
return;
}
// `.cargo/registry/src`
// Cargo usually re-creates these from the `.crate` cache below,
// but for some reason that does not work for `-sys` crates that check timestamps
// to decide if rebuilds are necessary.
pkgSet = new Set(packages.filter((p) => p.name.endsWith("-sys")).map((p) => `${p.name}-${p.version}`));
const srcDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "src"));
for await (const dirent of srcDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/src/github.com-1ecc6299db9ec823`
// or `.cargo/registry/src/index.crates.io-e139d0d48fed7772`
const dir = await fs_1.default.promises.opendir(path_1.default.join(srcDir.path, dirent.name));
for await (const dirent of dir) {
if (dirent.isDirectory() && !pkgSet.has(dirent.name)) {
await rmRF(path_1.default.join(dir.path, dirent.name));
}
}
}
}
// `.cargo/registry/cache`
pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
const cacheDir = await fs_1.default.promises.opendir(path_1.default.join(config_1.CARGO_HOME, "registry", "cache"));
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
// eg `.cargo/registry/cache/github.com-1ecc6299db9ec823`
// or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772`
const dir = await fs_1.default.promises.opendir(path_1.default.join(cacheDir.path, dirent.name));
for await (const dirent of dir) {
// here we check that the downloaded `.crate` matches one from our dependencies
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
}
}
/// Recursively walks and cleans the index `.cache`
async function cleanRegistryIndexCache(dirName, keepPkg) {
let dirIsEmpty = true;
const cacheDir = await fs_1.default.promises.opendir(dirName);
for await (const dirent of cacheDir) {
if (dirent.isDirectory()) {
if (await cleanRegistryIndexCache(path_1.default.join(dirName, dirent.name), keepPkg)) {
await rm(dirName, dirent);
}
else {
dirIsEmpty && (dirIsEmpty = false);
}
}
else {
if (keepPkg.has(dirent.name)) {
dirIsEmpty && (dirIsEmpty = false);
}
else {
await rm(dirName, dirent);
}
}
}
return dirIsEmpty;
}
async function cleanGit(packages) {
const coPath = path_1.default.join(config_1.CARGO_HOME, "git", "checkouts");
const dbPath = path_1.default.join(config_1.CARGO_HOME, "git", "db");
const repos = new Map();
for (const p of packages) {
if (!p.path.startsWith(coPath)) {
continue;
}
const [repo, ref] = p.path.slice(coPath.length + 1).split(path_1.default.sep);
const refs = repos.get(repo);
if (refs) {
refs.add(ref);
}
else {
repos.set(repo, new Set([ref]));
}
}
// we have to keep both the clone, and the checkout, removing either will
// trigger a rebuild
// clean the db
try {
let dir = await fs_1.default.promises.opendir(dbPath);
for await (const dirent of dir) {
if (!repos.has(dirent.name)) {
await rm(dir.path, dirent);
}
}
}
catch { }
// clean the checkouts
try {
let dir = await fs_1.default.promises.opendir(coPath);
for await (const dirent of dir) {
const refs = repos.get(dirent.name);
if (!refs) {
await rm(dir.path, dirent);
continue;
}
if (!dirent.isDirectory()) {
continue;
}
const refsDir = await fs_1.default.promises.opendir(path_1.default.join(dir.path, dirent.name));
for await (const dirent of refsDir) {
if (!refs.has(dirent.name)) {
await rm(refsDir.path, dirent);
}
}
}
}
catch { }
}
const ONE_WEEK = 7 * 24 * 3600 * 1000;
/**
* Removes all files or directories in `dirName` matching some criteria.
*
* When the `checkTimestamp` flag is set, this will also remove anything older
* than one week.
*
* Otherwise, it will remove everything that does not match any string in the
* `keepPrefix` set.
* The matching strips and trailing `-$hash` suffix.
*/
async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
const dir = await fs_1.default.promises.opendir(dirName);
for await (const dirent of dir) {
if (checkTimestamp) {
const fileName = path_1.default.join(dir.path, dirent.name);
const { mtime } = await fs_1.default.promises.stat(fileName);
const isOutdated = Date.now() - mtime.getTime() > ONE_WEEK;
if (isOutdated) {
await rm(dir.path, dirent);
}
return;
}
let name = dirent.name;
// strip the trailing hash
const idx = name.lastIndexOf("-");
if (idx !== -1) {
name = name.slice(0, idx);
}
if (!keepPrefix.has(name)) {
await rm(dir.path, dirent);
}
}
}
async function rm(parent, dirent) {
try {
const fileName = path_1.default.join(parent, dirent.name);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await fs_1.default.promises.unlink(fileName);
}
else if (dirent.isDirectory()) {
await io.rmRF(fileName);
}
}
catch { }
}
async function rmRF(dirName) {
core.debug(`deleting "${dirName}"`);
await io.rmRF(dirName);
}

372
src/config.js Normal file
View file

@ -0,0 +1,372 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.CacheConfig = exports.CARGO_HOME = void 0;
exports.isCacheUpToDate = isCacheUpToDate;
const core = __importStar(require("@actions/core"));
const glob = __importStar(require("@actions/glob"));
const crypto_1 = __importDefault(require("crypto"));
const fs_1 = __importDefault(require("fs"));
const promises_1 = __importDefault(require("fs/promises"));
const os_1 = __importDefault(require("os"));
const path_1 = __importDefault(require("path"));
const toml = __importStar(require("smol-toml"));
const cleanup_1 = require("./cleanup");
const utils_1 = require("./utils");
const workspace_1 = require("./workspace");
const HOME = os_1.default.homedir();
exports.CARGO_HOME = process.env.CARGO_HOME || path_1.default.join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG";
const HASH_LENGTH = 8;
class CacheConfig {
constructor() {
/** All the paths we want to cache */
this.cachePaths = [];
/** The primary cache key */
this.cacheKey = "";
/** The secondary (restore) key that only contains the prefix and environment */
this.restoreKey = "";
/** Whether to cache CARGO_HOME/.bin */
this.cacheBin = true;
/** The workspace configurations */
this.workspaces = [];
/** The cargo binaries present during main step */
this.cargoBins = [];
/** The prefix portion of the cache key */
this.keyPrefix = "";
/** The rust version considered for the cache key */
this.keyRust = "";
/** The environment variables considered for the cache key */
this.keyEnvs = [];
/** The files considered for the cache key */
this.keyFiles = [];
}
/**
* Constructs a [`CacheConfig`] with all the paths and keys.
*
* This will read the action `input`s, and read and persist `state` as necessary.
*/
static async new() {
const self = new CacheConfig();
// Construct key prefix:
// This uses either the `shared-key` input,
// or the `key` input combined with the `job` key.
let key = core.getInput("prefix-key") || "v0-rust";
const sharedKey = core.getInput("shared-key");
if (sharedKey) {
key += `-${sharedKey}`;
}
else {
const inputKey = core.getInput("key");
if (inputKey) {
key += `-${inputKey}`;
}
const job = process.env.GITHUB_JOB;
if ((job) && core.getInput("use-job-key").toLowerCase() == "true") {
key += `-${job}`;
}
}
// Add runner OS and CPU architecture to the key to avoid cross-contamination of cache
const runnerOS = os_1.default.type();
const runnerArch = os_1.default.arch();
key += `-${runnerOS}-${runnerArch}`;
self.keyPrefix = key;
// Construct environment portion of the key:
// This consists of a hash that considers the rust version
// as well as all the environment variables as given by a default list
// and the `env-vars` input.
// The env vars are sorted, matched by prefix and hashed into the
// resulting environment hash.
let hasher = crypto_1.default.createHash("sha1");
const rustVersion = await getRustVersion();
let keyRust = `${rustVersion.release} ${rustVersion.host}`;
hasher.update(keyRust);
hasher.update(rustVersion["commit-hash"]);
keyRust += ` (${rustVersion["commit-hash"]})`;
self.keyRust = keyRust;
// these prefixes should cover most of the compiler / rust / cargo keys
const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"];
envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean));
// sort the available env vars so we have a more stable hash
const keyEnvs = [];
const envKeys = Object.keys(process.env);
envKeys.sort((a, b) => a.localeCompare(b));
for (const key of envKeys) {
const value = process.env[key];
if (envPrefixes.some((prefix) => key.startsWith(prefix)) && value) {
hasher.update(`${key}=${value}`);
keyEnvs.push(key);
}
}
self.keyEnvs = keyEnvs;
// Add job hash suffix if 'add-job-hash' is true
if (core.getInput("add-job-hash").toLowerCase() == "true") {
key += `-${digest(hasher)}`;
}
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
const workspaces = [];
const workspacesInput = core.getInput("workspaces") || ".";
for (const workspace of workspacesInput.trim().split("\n")) {
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
root = path_1.default.resolve(root);
target = path_1.default.join(root, target);
workspaces.push(new workspace_1.Workspace(root, target));
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = crypto_1.default.createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path_1.default.join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await promises_1.default.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content);
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
}
catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = path_1.default.join(workspace.root, "Cargo.lock");
if (await (0, utils_1.exists)(cargo_lock)) {
try {
const content = await promises_1.default.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = parsed.package.filter((p) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of fs_1.default.createReadStream(file)) {
hasher.update(chunk);
}
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [path_1.default.join(exports.CARGO_HOME, "registry"), path_1.default.join(exports.CARGO_HOME, "git")];
if (self.cacheBin) {
self.cachePaths = [
path_1.default.join(exports.CARGO_HOME, "bin"),
path_1.default.join(exports.CARGO_HOME, ".crates.toml"),
path_1.default.join(exports.CARGO_HOME, ".crates2.json"),
...self.cachePaths,
];
}
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
if (cacheTargets === "true") {
self.cachePaths.push(...workspaces.map((ws) => ws.target));
}
const cacheDirectories = core.getInput("cache-directories");
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir);
}
const bins = await (0, cleanup_1.getCargoBins)();
self.cargoBins = Array.from(bins.values());
return self;
}
/**
* Reads and returns the cache config from the action `state`.
*
* @throws {Error} if the state is not present.
* @returns {CacheConfig} the configuration.
* @see {@link CacheConfig#saveState}
* @see {@link CacheConfig#new}
*/
static fromState() {
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
const self = new CacheConfig();
Object.assign(self, JSON.parse(source));
self.workspaces = self.workspaces.map((w) => new workspace_1.Workspace(w.root, w.target));
return self;
}
/**
* Prints the configuration to the action log.
*/
printInfo(cacheProvider) {
core.startGroup("Cache Configuration");
core.info(`Cache Provider:`);
core.info(` ${cacheProvider.name}`);
core.info(`Workspaces:`);
for (const workspace of this.workspaces) {
core.info(` ${workspace.root}`);
}
core.info(`Cache Paths:`);
for (const path of this.cachePaths) {
core.info(` ${path}`);
}
core.info(`Restore Key:`);
core.info(` ${this.restoreKey}`);
core.info(`Cache Key:`);
core.info(` ${this.cacheKey}`);
core.info(`.. Prefix:`);
core.info(` - ${this.keyPrefix}`);
core.info(`.. Environment considered:`);
core.info(` - Rust Version: ${this.keyRust}`);
for (const env of this.keyEnvs) {
core.info(` - ${env}`);
}
core.info(`.. Lockfiles considered:`);
for (const file of this.keyFiles) {
core.info(` - ${file}`);
}
core.endGroup();
}
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
core.saveState(STATE_CONFIG, this);
}
}
exports.CacheConfig = CacheConfig;
/**
* Checks if the cache is up to date.
*
* @returns `true` if the cache is up to date, `false` otherwise.
*/
function isCacheUpToDate() {
return core.getState(STATE_CONFIG) === "";
}
/**
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
*
* @param hasher The hasher to digest.
* @returns The hex digest.
*/
function digest(hasher) {
return hasher.digest("hex").substring(0, HASH_LENGTH);
}
async function getRustVersion() {
const stdout = await (0, utils_1.getCmdOutput)("rustc", ["-vV"]);
let splits = stdout
.split(/[\n\r]+/)
.filter(Boolean)
.map((s) => s.split(":").map((s) => s.trim()))
.filter((s) => s.length === 2);
return Object.fromEntries(splits);
}
async function globFiles(pattern) {
const globber = await glob.create(pattern, {
followSymbolicLinks: false,
});
// fs.statSync resolve the symbolic link and returns stat for the
// file it pointed to, so isFile would make sure the resolved
// file is actually a regular file.
return (await globber.glob()).filter((file) => fs_1.default.statSync(file).isFile());
}
function sort_and_uniq(a) {
return a
.sort((a, b) => a.localeCompare(b))
.reduce((accumulator, currentValue) => {
const len = accumulator.length;
// If accumulator is empty or its last element != currentValue
// Since array is already sorted, elements with the same value
// are grouped together to be continugous in space.
//
// If currentValue != last element, then it must be unique.
if (len == 0 || accumulator[len - 1].localeCompare(currentValue) != 0) {
accumulator.push(currentValue);
}
return accumulator;
}, []);
}

View file

@ -116,7 +116,10 @@ export class CacheConfig {
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
// Add job hash suffix if 'add-job-hash' is true
if (core.getInput("add-job-hash").toLowerCase() == "true") {
key += `-${digest(hasher)}`;
}
self.restoreKey = key;

102
src/restore.js Normal file
View file

@ -0,0 +1,102 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const cleanup_1 = require("./cleanup");
const config_1 = require("./config");
const utils_1 = require("./utils");
process.on("uncaughtException", (e) => {
core.error(e.message);
if (e.stack) {
core.error(e.stack);
}
});
async function run() {
const cacheProvider = (0, utils_1.getCacheProvider)();
if (!cacheProvider.cache.isFeatureAvailable()) {
setCacheHitOutput(false);
return;
}
try {
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
if (cacheOnFailure !== "true") {
cacheOnFailure = "false";
}
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
core.exportVariable("CARGO_INCREMENTAL", 0);
const config = await config_1.CacheConfig.new();
config.printInfo(cacheProvider);
core.info("");
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
lookupOnly,
});
if (restoreKey) {
const match = restoreKey === key;
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
if (!match) {
// pre-clean the target directory on cache mismatch
for (const workspace of config.workspaces) {
try {
await (0, cleanup_1.cleanTargetDir)(workspace.target, [], true);
}
catch { }
}
// We restored the cache but it is not a full match.
config.saveState();
}
setCacheHitOutput(match);
}
else {
core.info("No cache found.");
config.saveState();
setCacheHitOutput(false);
}
}
catch (e) {
setCacheHitOutput(false);
(0, utils_1.reportError)(e);
}
process.exit();
}
function setCacheHitOutput(cacheHit) {
core.setOutput("cache-hit", cacheHit.toString());
}
run();

125
src/save.js Normal file
View file

@ -0,0 +1,125 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
Object.defineProperty(exports, "__esModule", { value: true });
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const cleanup_1 = require("./cleanup");
const config_1 = require("./config");
const utils_1 = require("./utils");
process.on("uncaughtException", (e) => {
core.error(e.message);
if (e.stack) {
core.error(e.stack);
}
});
async function run() {
const cacheProvider = (0, utils_1.getCacheProvider)();
const save = core.getInput("save-if").toLowerCase() || "true";
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
return;
}
try {
if ((0, config_1.isCacheUpToDate)()) {
core.info(`Cache up-to-date.`);
return;
}
const config = config_1.CacheConfig.fromState();
config.printInfo(cacheProvider);
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
if (process.env["RUNNER_OS"] == "macOS") {
await macOsWorkaround();
}
const workspaceCrates = core.getInput("cache-workspace-crates").toLowerCase() || "false";
const allPackages = [];
for (const workspace of config.workspaces) {
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
if (workspaceCrates === "true") {
const wsMembers = await workspace.getWorkspaceMembers();
packages.push(...wsMembers);
}
allPackages.push(...packages);
try {
core.info(`... Cleaning ${workspace.target} ...`);
await (0, cleanup_1.cleanTargetDir)(workspace.target, packages);
}
catch (e) {
core.debug(`${e.stack}`);
}
}
try {
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
await (0, cleanup_1.cleanRegistry)(allPackages, crates !== "true");
}
catch (e) {
core.debug(`${e.stack}`);
}
if (config.cacheBin) {
try {
core.info(`... Cleaning cargo/bin ...`);
await (0, cleanup_1.cleanBin)(config.cargoBins);
}
catch (e) {
core.debug(`${e.stack}`);
}
}
try {
core.info(`... Cleaning cargo git cache ...`);
await (0, cleanup_1.cleanGit)(allPackages);
}
catch (e) {
core.debug(`${e.stack}`);
}
core.info(`... Saving cache ...`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
}
catch (e) {
(0, utils_1.reportError)(e);
}
process.exit();
}
run();
async function macOsWorkaround() {
try {
// Workaround for https://github.com/actions/cache/issues/403
// Also see https://github.com/rust-lang/cargo/issues/8603
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
}
catch { }
}

114
src/utils.js Normal file
View file

@ -0,0 +1,114 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.reportError = reportError;
exports.getCmdOutput = getCmdOutput;
exports.getCacheProvider = getCacheProvider;
exports.exists = exists;
const core = __importStar(require("@actions/core"));
const exec = __importStar(require("@actions/exec"));
const buildjetCache = __importStar(require("@actions/buildjet-cache"));
const warpbuildCache = __importStar(require("@actions/warpbuild-cache"));
const ghCache = __importStar(require("@actions/cache"));
const fs_1 = __importDefault(require("fs"));
function reportError(e) {
const { commandFailed } = e;
if (commandFailed) {
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
}
else {
core.error(`${e.stack}`);
}
}
async function getCmdOutput(cmd, args = [], options = {}) {
let stdout = "";
let stderr = "";
try {
await exec.exec(cmd, args, {
silent: true,
listeners: {
stdout(data) {
stdout += data.toString();
},
stderr(data) {
stderr += data.toString();
},
},
...options,
});
}
catch (e) {
e.commandFailed = {
command: `${cmd} ${args.join(" ")}`,
stderr,
};
throw e;
}
return stdout;
}
function getCacheProvider() {
const cacheProvider = core.getInput("cache-provider");
let cache;
switch (cacheProvider) {
case "github":
cache = ghCache;
break;
case "buildjet":
cache = buildjetCache;
break;
case "warpbuild":
cache = warpbuildCache;
break;
default:
throw new Error(`The \`cache-provider\` \`${cacheProvider}\` is not valid.`);
}
return {
name: cacheProvider,
cache: cache,
};
}
async function exists(path) {
try {
await fs_1.default.promises.access(path);
return true;
}
catch {
return false;
}
}

75
src/workspace.js Normal file
View file

@ -0,0 +1,75 @@
"use strict";
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.Workspace = void 0;
const core = __importStar(require("@actions/core"));
const path_1 = __importDefault(require("path"));
const utils_1 = require("./utils");
const SAVE_TARGETS = new Set(["lib", "proc-macro"]);
class Workspace {
constructor(root, target) {
this.root = root;
this.target = target;
}
async getPackages(filter, ...extraArgs) {
let packages = [];
try {
core.debug(`collecting metadata for "${this.root}"`);
const meta = JSON.parse(await (0, utils_1.getCmdOutput)("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
cwd: this.root,
env: { "CARGO_ENCODED_RUSTFLAGS": "" },
}));
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
for (const pkg of meta.packages.filter(filter)) {
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: path_1.default.dirname(pkg.manifest_path) });
}
}
catch (err) {
console.error(err);
}
return packages;
}
async getPackagesOutsideWorkspaceRoot() {
return await this.getPackages((pkg) => !pkg.manifest_path.startsWith(this.root));
}
async getWorkspaceMembers() {
return await this.getPackages((_) => true, "--no-deps");
}
}
exports.Workspace = Workspace;

View file

@ -7,8 +7,8 @@
"target": "es2020",
"resolveJsonModule": true,
"moduleResolution": "node",
"module": "esnext",
"moduleResolution": "nodenext",
"module": "NodeNext",
"esModuleInterop": true,
"strict": true,