mirror of
https://github.com/Swatinem/rust-cache
synced 2025-06-08 17:43:25 +00:00
Merge 829dfd6b08
into 9d47c6ad4b
This commit is contained in:
commit
c0745ebe3f
8 changed files with 323 additions and 24 deletions
|
@ -48,6 +48,14 @@ inputs:
|
||||||
description: "Check if a cache entry exists without downloading the cache"
|
description: "Check if a cache entry exists without downloading the cache"
|
||||||
required: false
|
required: false
|
||||||
default: "false"
|
default: "false"
|
||||||
|
incremental:
|
||||||
|
description: "Determines whether to cache incremental builds - speeding up builds for more disk usage. Defaults to false."
|
||||||
|
required: false
|
||||||
|
default: "false"
|
||||||
|
incremental-key:
|
||||||
|
description: "The key to use for incremental builds. Used on a per-branch basis"
|
||||||
|
required: false
|
||||||
|
default: ${{ github.ref }}
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: "A boolean value that indicates an exact match was found."
|
description: "A boolean value that indicates an exact match was found."
|
||||||
|
|
59
dist/restore/index.js
vendored
59
dist/restore/index.js
vendored
|
@ -86711,9 +86711,17 @@ class CacheConfig {
|
||||||
constructor() {
|
constructor() {
|
||||||
/** All the paths we want to cache */
|
/** All the paths we want to cache */
|
||||||
this.cachePaths = [];
|
this.cachePaths = [];
|
||||||
|
/** All the paths we want to cache for incremental builds */
|
||||||
|
// public incrementalPaths: Array<string> = [];
|
||||||
/** The primary cache key */
|
/** The primary cache key */
|
||||||
this.cacheKey = "";
|
this.cacheKey = "";
|
||||||
/** The secondary (restore) key that only contains the prefix and environment */
|
/** The primary cache key for incremental builds */
|
||||||
|
this.incrementalKey = "";
|
||||||
|
/**
|
||||||
|
* The secondary (restore) key that only contains the prefix and environment
|
||||||
|
* This should be used if the primary cacheKey is not available - IE pulling from main on a branch
|
||||||
|
* instead of the branch itself
|
||||||
|
* */
|
||||||
this.restoreKey = "";
|
this.restoreKey = "";
|
||||||
/** Whether to cache CARGO_HOME/.bin */
|
/** Whether to cache CARGO_HOME/.bin */
|
||||||
this.cacheBin = true;
|
this.cacheBin = true;
|
||||||
|
@ -86721,6 +86729,8 @@ class CacheConfig {
|
||||||
this.workspaces = [];
|
this.workspaces = [];
|
||||||
/** The cargo binaries present during main step */
|
/** The cargo binaries present during main step */
|
||||||
this.cargoBins = [];
|
this.cargoBins = [];
|
||||||
|
/** Whether to cache incremental builds */
|
||||||
|
this.incremental = false;
|
||||||
/** The prefix portion of the cache key */
|
/** The prefix portion of the cache key */
|
||||||
this.keyPrefix = "";
|
this.keyPrefix = "";
|
||||||
/** The rust version considered for the cache key */
|
/** The rust version considered for the cache key */
|
||||||
|
@ -86788,6 +86798,9 @@ class CacheConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.keyEnvs = keyEnvs;
|
self.keyEnvs = keyEnvs;
|
||||||
|
// Make sure we consider incremental builds
|
||||||
|
self.incremental = lib_core.getInput("incremental").toLowerCase() == "true";
|
||||||
|
hasher.update(`incremental=${self.incremental}`);
|
||||||
key += `-${digest(hasher)}`;
|
key += `-${digest(hasher)}`;
|
||||||
self.restoreKey = key;
|
self.restoreKey = key;
|
||||||
// Construct the lockfiles portion of the key:
|
// Construct the lockfiles portion of the key:
|
||||||
|
@ -86909,6 +86922,14 @@ class CacheConfig {
|
||||||
}
|
}
|
||||||
const bins = await getCargoBins();
|
const bins = await getCargoBins();
|
||||||
self.cargoBins = Array.from(bins.values());
|
self.cargoBins = Array.from(bins.values());
|
||||||
|
if (self.incremental) {
|
||||||
|
// wire the incremental key to be just for this branch
|
||||||
|
const branchName = lib_core.getInput("incremental-key") || "-shared";
|
||||||
|
const incrementalKey = key + `-incremental--` + branchName;
|
||||||
|
self.incrementalKey = incrementalKey;
|
||||||
|
// Add the incremental cache to the cachePaths so we can restore it
|
||||||
|
self.cachePaths.push(external_path_default().join(config_CARGO_HOME, "incremental-restore.json"));
|
||||||
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -86959,6 +86980,7 @@ class CacheConfig {
|
||||||
for (const file of this.keyFiles) {
|
for (const file of this.keyFiles) {
|
||||||
lib_core.info(` - ${file}`);
|
lib_core.info(` - ${file}`);
|
||||||
}
|
}
|
||||||
|
lib_core.info(`.. Incremental: ${this.incremental}`);
|
||||||
lib_core.endGroup();
|
lib_core.endGroup();
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -87315,6 +87337,9 @@ async function rmRF(dirName) {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
// import { saveMtimes } from "./incremental";
|
||||||
|
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
lib_core.error(e.message);
|
lib_core.error(e.message);
|
||||||
if (e.stack) {
|
if (e.stack) {
|
||||||
|
@ -87334,10 +87359,12 @@ async function run() {
|
||||||
}
|
}
|
||||||
var lookupOnly = lib_core.getInput("lookup-only").toLowerCase() === "true";
|
var lookupOnly = lib_core.getInput("lookup-only").toLowerCase() === "true";
|
||||||
lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
lib_core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||||
lib_core.exportVariable("CARGO_INCREMENTAL", 0);
|
|
||||||
const config = await CacheConfig.new();
|
const config = await CacheConfig.new();
|
||||||
config.printInfo(cacheProvider);
|
config.printInfo(cacheProvider);
|
||||||
lib_core.info("");
|
lib_core.info("");
|
||||||
|
if (!config.incremental) {
|
||||||
|
lib_core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||||
|
}
|
||||||
lib_core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
|
lib_core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
|
||||||
const key = config.cacheKey;
|
const key = config.cacheKey;
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
|
@ -87347,7 +87374,7 @@ async function run() {
|
||||||
lookupOnly,
|
lookupOnly,
|
||||||
});
|
});
|
||||||
if (restoreKey) {
|
if (restoreKey) {
|
||||||
const match = restoreKey === key;
|
let match = restoreKey === key;
|
||||||
lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
||||||
if (!match) {
|
if (!match) {
|
||||||
// pre-clean the target directory on cache mismatch
|
// pre-clean the target directory on cache mismatch
|
||||||
|
@ -87360,10 +87387,34 @@ async function run() {
|
||||||
// We restored the cache but it is not a full match.
|
// We restored the cache but it is not a full match.
|
||||||
config.saveState();
|
config.saveState();
|
||||||
}
|
}
|
||||||
|
// Restore the incremental-restore.json file and write the mtimes to all the files in the list
|
||||||
|
if (config.incremental) {
|
||||||
|
try {
|
||||||
|
const restoreJson = external_path_default().join(config_CARGO_HOME, "incremental-restore.json");
|
||||||
|
const restoreString = await external_fs_default().promises.readFile(restoreJson, "utf8");
|
||||||
|
const restoreData = JSON.parse(restoreString);
|
||||||
|
lib_core.debug(`restoreData: ${JSON.stringify(restoreData)}`);
|
||||||
|
if (restoreData.roots.length == 0) {
|
||||||
|
throw new Error("No incremental roots found");
|
||||||
|
}
|
||||||
|
const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||||
|
lib_core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||||
|
for (const [file, mtime] of Object.entries(restoreData.times)) {
|
||||||
|
lib_core.debug(`restoring ${file} with mtime ${mtime}`);
|
||||||
|
await external_fs_default().promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (err) {
|
||||||
|
lib_core.debug(`Could not restore incremental cache - ${err}`);
|
||||||
|
lib_core.debug(`${err.stack}`);
|
||||||
|
match = false;
|
||||||
|
}
|
||||||
|
config.saveState();
|
||||||
|
}
|
||||||
setCacheHitOutput(match);
|
setCacheHitOutput(match);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
lib_core.info("No cache found.");
|
lib_core.info(`No cache found for ${config.cacheKey} - this key was found ${restoreKey}`);
|
||||||
config.saveState();
|
config.saveState();
|
||||||
setCacheHitOutput(false);
|
setCacheHitOutput(false);
|
||||||
}
|
}
|
||||||
|
|
95
dist/save/index.js
vendored
95
dist/save/index.js
vendored
|
@ -86711,9 +86711,17 @@ class CacheConfig {
|
||||||
constructor() {
|
constructor() {
|
||||||
/** All the paths we want to cache */
|
/** All the paths we want to cache */
|
||||||
this.cachePaths = [];
|
this.cachePaths = [];
|
||||||
|
/** All the paths we want to cache for incremental builds */
|
||||||
|
// public incrementalPaths: Array<string> = [];
|
||||||
/** The primary cache key */
|
/** The primary cache key */
|
||||||
this.cacheKey = "";
|
this.cacheKey = "";
|
||||||
/** The secondary (restore) key that only contains the prefix and environment */
|
/** The primary cache key for incremental builds */
|
||||||
|
this.incrementalKey = "";
|
||||||
|
/**
|
||||||
|
* The secondary (restore) key that only contains the prefix and environment
|
||||||
|
* This should be used if the primary cacheKey is not available - IE pulling from main on a branch
|
||||||
|
* instead of the branch itself
|
||||||
|
* */
|
||||||
this.restoreKey = "";
|
this.restoreKey = "";
|
||||||
/** Whether to cache CARGO_HOME/.bin */
|
/** Whether to cache CARGO_HOME/.bin */
|
||||||
this.cacheBin = true;
|
this.cacheBin = true;
|
||||||
|
@ -86721,6 +86729,8 @@ class CacheConfig {
|
||||||
this.workspaces = [];
|
this.workspaces = [];
|
||||||
/** The cargo binaries present during main step */
|
/** The cargo binaries present during main step */
|
||||||
this.cargoBins = [];
|
this.cargoBins = [];
|
||||||
|
/** Whether to cache incremental builds */
|
||||||
|
this.incremental = false;
|
||||||
/** The prefix portion of the cache key */
|
/** The prefix portion of the cache key */
|
||||||
this.keyPrefix = "";
|
this.keyPrefix = "";
|
||||||
/** The rust version considered for the cache key */
|
/** The rust version considered for the cache key */
|
||||||
|
@ -86788,6 +86798,9 @@ class CacheConfig {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self.keyEnvs = keyEnvs;
|
self.keyEnvs = keyEnvs;
|
||||||
|
// Make sure we consider incremental builds
|
||||||
|
self.incremental = core.getInput("incremental").toLowerCase() == "true";
|
||||||
|
hasher.update(`incremental=${self.incremental}`);
|
||||||
key += `-${digest(hasher)}`;
|
key += `-${digest(hasher)}`;
|
||||||
self.restoreKey = key;
|
self.restoreKey = key;
|
||||||
// Construct the lockfiles portion of the key:
|
// Construct the lockfiles portion of the key:
|
||||||
|
@ -86909,6 +86922,14 @@ class CacheConfig {
|
||||||
}
|
}
|
||||||
const bins = await getCargoBins();
|
const bins = await getCargoBins();
|
||||||
self.cargoBins = Array.from(bins.values());
|
self.cargoBins = Array.from(bins.values());
|
||||||
|
if (self.incremental) {
|
||||||
|
// wire the incremental key to be just for this branch
|
||||||
|
const branchName = core.getInput("incremental-key") || "-shared";
|
||||||
|
const incrementalKey = key + `-incremental--` + branchName;
|
||||||
|
self.incrementalKey = incrementalKey;
|
||||||
|
// Add the incremental cache to the cachePaths so we can restore it
|
||||||
|
self.cachePaths.push(external_path_default().join(CARGO_HOME, "incremental-restore.json"));
|
||||||
|
}
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -86959,6 +86980,7 @@ class CacheConfig {
|
||||||
for (const file of this.keyFiles) {
|
for (const file of this.keyFiles) {
|
||||||
core.info(` - ${file}`);
|
core.info(` - ${file}`);
|
||||||
}
|
}
|
||||||
|
core.info(`.. Incremental: ${this.incremental}`);
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -87310,12 +87332,59 @@ async function rmRF(dirName) {
|
||||||
await io.rmRF(dirName);
|
await io.rmRF(dirName);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
;// CONCATENATED MODULE: ./src/incremental.ts
|
||||||
|
// import * as core from "@actions/core";
|
||||||
|
// import * as io from "@actions/io";
|
||||||
|
// import { CARGO_HOME } from "./config";
|
||||||
|
// import { exists } from "./utils";
|
||||||
|
// import { Packages } from "./workspace";
|
||||||
|
|
||||||
|
|
||||||
|
async function saveMtimes(targetDirs) {
|
||||||
|
let data = {
|
||||||
|
roots: [],
|
||||||
|
times: {},
|
||||||
|
};
|
||||||
|
let stack = [];
|
||||||
|
// Collect all the incremental files
|
||||||
|
for (const dir of targetDirs) {
|
||||||
|
for (const maybeProfile of await external_fs_default().promises.readdir(dir)) {
|
||||||
|
const profileDir = external_path_default().join(dir, maybeProfile);
|
||||||
|
const incrementalDir = external_path_default().join(profileDir, "incremental");
|
||||||
|
if (external_fs_default().existsSync(incrementalDir)) {
|
||||||
|
stack.push(incrementalDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Save the stack as the roots - we cache these directly
|
||||||
|
data.roots = stack.slice();
|
||||||
|
while (stack.length > 0) {
|
||||||
|
const dirName = stack.pop();
|
||||||
|
const dir = await external_fs_default().promises.opendir(dirName);
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
stack.push(external_path_default().join(dirName, dirent.name));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const fileName = external_path_default().join(dirName, dirent.name);
|
||||||
|
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||||
|
data.times[fileName] = mtime.getTime();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return data;
|
||||||
|
}
|
||||||
|
|
||||||
;// CONCATENATED MODULE: ./src/save.ts
|
;// CONCATENATED MODULE: ./src/save.ts
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
core.error(e.message);
|
core.error(e.message);
|
||||||
if (e.stack) {
|
if (e.stack) {
|
||||||
|
@ -87340,6 +87409,28 @@ async function run() {
|
||||||
if (process.env["RUNNER_OS"] == "macOS") {
|
if (process.env["RUNNER_OS"] == "macOS") {
|
||||||
await macOsWorkaround();
|
await macOsWorkaround();
|
||||||
}
|
}
|
||||||
|
// Save the incremental cache before we delete it
|
||||||
|
if (config.incremental) {
|
||||||
|
core.info(`... Saving incremental cache ...`);
|
||||||
|
try {
|
||||||
|
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||||
|
const cache = await saveMtimes(targetDirs);
|
||||||
|
const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey);
|
||||||
|
core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`);
|
||||||
|
// write the incremental-restore.json file
|
||||||
|
const serialized = JSON.stringify(cache);
|
||||||
|
await external_fs_default().promises.writeFile(external_path_default().join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||||
|
// Delete the incremental cache before proceeding
|
||||||
|
for (const [path, _mtime] of cache.roots) {
|
||||||
|
core.debug(` deleting ${path}`);
|
||||||
|
await (0,promises_.rm)(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (e) {
|
||||||
|
core.debug(`Failed to save incremental cache`);
|
||||||
|
core.debug(`${e.stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
const allPackages = [];
|
const allPackages = [];
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
||||||
|
@ -87376,7 +87467,7 @@ async function run() {
|
||||||
catch (e) {
|
catch (e) {
|
||||||
core.debug(`${e.stack}`);
|
core.debug(`${e.stack}`);
|
||||||
}
|
}
|
||||||
core.info(`... Saving cache ...`);
|
core.info(`... Saving cache with key ${config.cacheKey}`);
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
// https://github.com/actions/toolkit/pull/1378
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
// TODO: remove this once the underlying bug is fixed.
|
// TODO: remove this once the underlying bug is fixed.
|
||||||
|
|
|
@ -25,7 +25,7 @@ export async function cleanTargetDir(targetDir: string, packages: Packages, chec
|
||||||
} else {
|
} else {
|
||||||
await cleanProfileTarget(dirName, packages, checkTimestamp);
|
await cleanProfileTarget(dirName, packages, checkTimestamp);
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch { }
|
||||||
} else if (dirent.name !== "CACHEDIR.TAG") {
|
} else if (dirent.name !== "CACHEDIR.TAG") {
|
||||||
await rm(dir.path, dirent);
|
await rm(dir.path, dirent);
|
||||||
}
|
}
|
||||||
|
@ -43,11 +43,11 @@ async function cleanProfileTarget(profileDir: string, packages: Packages, checkT
|
||||||
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
|
// https://github.com/vertexclique/kaos/blob/9876f6c890339741cc5be4b7cb9df72baa5a6d79/src/cargo.rs#L25
|
||||||
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
|
// https://github.com/eupn/macrotest/blob/c4151a5f9f545942f4971980b5d264ebcd0b1d11/src/cargo.rs#L27
|
||||||
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
|
cleanTargetDir(path.join(profileDir, "target"), packages, checkTimestamp);
|
||||||
} catch {}
|
} catch { }
|
||||||
try {
|
try {
|
||||||
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
|
// https://github.com/dtolnay/trybuild/blob/eec8ca6cb9b8f53d0caf1aa499d99df52cae8b40/src/cargo.rs#L50
|
||||||
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
|
cleanTargetDir(path.join(profileDir, "trybuild"), packages, checkTimestamp);
|
||||||
} catch {}
|
} catch { }
|
||||||
|
|
||||||
// Delete everything else.
|
// Delete everything else.
|
||||||
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
|
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
|
||||||
|
@ -86,7 +86,7 @@ export async function getCargoBins(): Promise<Set<string>> {
|
||||||
bins.add(bin);
|
bins.add(bin);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch { }
|
||||||
return bins;
|
return bins;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -117,7 +117,7 @@ export async function cleanRegistry(packages: Packages, crates = true) {
|
||||||
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
|
const credentials = path.join(CARGO_HOME, ".cargo", "credentials.toml");
|
||||||
core.debug(`deleting "${credentials}"`);
|
core.debug(`deleting "${credentials}"`);
|
||||||
await fs.promises.unlink(credentials);
|
await fs.promises.unlink(credentials);
|
||||||
} catch {}
|
} catch { }
|
||||||
|
|
||||||
// `.cargo/registry/index`
|
// `.cargo/registry/index`
|
||||||
let pkgSet = new Set(packages.map((p) => p.name));
|
let pkgSet = new Set(packages.map((p) => p.name));
|
||||||
|
@ -229,7 +229,7 @@ export async function cleanGit(packages: Packages) {
|
||||||
await rm(dir.path, dirent);
|
await rm(dir.path, dirent);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch { }
|
||||||
|
|
||||||
// clean the checkouts
|
// clean the checkouts
|
||||||
try {
|
try {
|
||||||
|
@ -250,7 +250,7 @@ export async function cleanGit(packages: Packages) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch { }
|
||||||
}
|
}
|
||||||
|
|
||||||
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
const ONE_WEEK = 7 * 24 * 3600 * 1000;
|
||||||
|
@ -302,7 +302,7 @@ async function rm(parent: string, dirent: fs.Dirent) {
|
||||||
} else if (dirent.isDirectory()) {
|
} else if (dirent.isDirectory()) {
|
||||||
await io.rmRF(fileName);
|
await io.rmRF(fileName);
|
||||||
}
|
}
|
||||||
} catch {}
|
} catch { }
|
||||||
}
|
}
|
||||||
|
|
||||||
async function rmRF(dirName: string) {
|
async function rmRF(dirName: string) {
|
||||||
|
|
|
@ -20,9 +20,21 @@ const HASH_LENGTH = 8;
|
||||||
export class CacheConfig {
|
export class CacheConfig {
|
||||||
/** All the paths we want to cache */
|
/** All the paths we want to cache */
|
||||||
public cachePaths: Array<string> = [];
|
public cachePaths: Array<string> = [];
|
||||||
|
|
||||||
|
/** All the paths we want to cache for incremental builds */
|
||||||
|
// public incrementalPaths: Array<string> = [];
|
||||||
|
|
||||||
/** The primary cache key */
|
/** The primary cache key */
|
||||||
public cacheKey = "";
|
public cacheKey = "";
|
||||||
/** The secondary (restore) key that only contains the prefix and environment */
|
|
||||||
|
/** The primary cache key for incremental builds */
|
||||||
|
public incrementalKey = "";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The secondary (restore) key that only contains the prefix and environment
|
||||||
|
* This should be used if the primary cacheKey is not available - IE pulling from main on a branch
|
||||||
|
* instead of the branch itself
|
||||||
|
* */
|
||||||
public restoreKey = "";
|
public restoreKey = "";
|
||||||
|
|
||||||
/** Whether to cache CARGO_HOME/.bin */
|
/** Whether to cache CARGO_HOME/.bin */
|
||||||
|
@ -34,6 +46,9 @@ export class CacheConfig {
|
||||||
/** The cargo binaries present during main step */
|
/** The cargo binaries present during main step */
|
||||||
public cargoBins: Array<string> = [];
|
public cargoBins: Array<string> = [];
|
||||||
|
|
||||||
|
/** Whether to cache incremental builds */
|
||||||
|
public incremental: boolean = false;
|
||||||
|
|
||||||
/** The prefix portion of the cache key */
|
/** The prefix portion of the cache key */
|
||||||
private keyPrefix = "";
|
private keyPrefix = "";
|
||||||
/** The rust version considered for the cache key */
|
/** The rust version considered for the cache key */
|
||||||
|
@ -43,7 +58,7 @@ export class CacheConfig {
|
||||||
/** The files considered for the cache key */
|
/** The files considered for the cache key */
|
||||||
private keyFiles: Array<string> = [];
|
private keyFiles: Array<string> = [];
|
||||||
|
|
||||||
private constructor() {}
|
private constructor() { }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructs a [`CacheConfig`] with all the paths and keys.
|
* Constructs a [`CacheConfig`] with all the paths and keys.
|
||||||
|
@ -116,6 +131,10 @@ export class CacheConfig {
|
||||||
|
|
||||||
self.keyEnvs = keyEnvs;
|
self.keyEnvs = keyEnvs;
|
||||||
|
|
||||||
|
// Make sure we consider incremental builds
|
||||||
|
self.incremental = core.getInput("incremental").toLowerCase() == "true";
|
||||||
|
hasher.update(`incremental=${self.incremental}`);
|
||||||
|
|
||||||
key += `-${digest(hasher)}`;
|
key += `-${digest(hasher)}`;
|
||||||
|
|
||||||
self.restoreKey = key;
|
self.restoreKey = key;
|
||||||
|
@ -268,6 +287,16 @@ export class CacheConfig {
|
||||||
const bins = await getCargoBins();
|
const bins = await getCargoBins();
|
||||||
self.cargoBins = Array.from(bins.values());
|
self.cargoBins = Array.from(bins.values());
|
||||||
|
|
||||||
|
if (self.incremental) {
|
||||||
|
// wire the incremental key to be just for this branch
|
||||||
|
const branchName = core.getInput("incremental-key") || "-shared";
|
||||||
|
const incrementalKey = key + `-incremental--` + branchName;
|
||||||
|
self.incrementalKey = incrementalKey;
|
||||||
|
|
||||||
|
// Add the incremental cache to the cachePaths so we can restore it
|
||||||
|
self.cachePaths.push(path.join(CARGO_HOME, "incremental-restore.json"));
|
||||||
|
}
|
||||||
|
|
||||||
return self;
|
return self;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -322,6 +351,7 @@ export class CacheConfig {
|
||||||
for (const file of this.keyFiles) {
|
for (const file of this.keyFiles) {
|
||||||
core.info(` - ${file}`);
|
core.info(` - ${file}`);
|
||||||
}
|
}
|
||||||
|
core.info(`.. Incremental: ${this.incremental}`);
|
||||||
core.endGroup();
|
core.endGroup();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
54
src/incremental.ts
Normal file
54
src/incremental.ts
Normal file
|
@ -0,0 +1,54 @@
|
||||||
|
// import * as core from "@actions/core";
|
||||||
|
// import * as io from "@actions/io";
|
||||||
|
// import { CARGO_HOME } from "./config";
|
||||||
|
// import { exists } from "./utils";
|
||||||
|
// import { Packages } from "./workspace";
|
||||||
|
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
|
export type MtimeData = {
|
||||||
|
roots: string[],
|
||||||
|
times: {
|
||||||
|
[key: string]: number
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
export async function saveMtimes(targetDirs: string[]): Promise<MtimeData> {
|
||||||
|
let data: MtimeData = {
|
||||||
|
roots: [],
|
||||||
|
times: {},
|
||||||
|
};
|
||||||
|
let stack: string[] = [];
|
||||||
|
|
||||||
|
// Collect all the incremental files
|
||||||
|
for (const dir of targetDirs) {
|
||||||
|
for (const maybeProfile of await fs.promises.readdir(dir)) {
|
||||||
|
const profileDir = path.join(dir, maybeProfile);
|
||||||
|
const incrementalDir = path.join(profileDir, "incremental");
|
||||||
|
if (fs.existsSync(incrementalDir)) {
|
||||||
|
stack.push(incrementalDir);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the stack as the roots - we cache these directly
|
||||||
|
data.roots = stack.slice();
|
||||||
|
|
||||||
|
while (stack.length > 0) {
|
||||||
|
const dirName = stack.pop()!;
|
||||||
|
const dir = await fs.promises.opendir(dirName);
|
||||||
|
|
||||||
|
for await (const dirent of dir) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
stack.push(path.join(dirName, dirent.name));
|
||||||
|
} else {
|
||||||
|
const fileName = path.join(dirName, dirent.name);
|
||||||
|
const { mtime } = await fs.promises.stat(fileName);
|
||||||
|
data.times[fileName] = mtime.getTime();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data;
|
||||||
|
}
|
|
@ -1,8 +1,12 @@
|
||||||
import * as core from "@actions/core";
|
import * as core from "@actions/core";
|
||||||
|
|
||||||
import { cleanTargetDir } from "./cleanup";
|
import { cleanTargetDir } from "./cleanup";
|
||||||
import { CacheConfig } from "./config";
|
import { CacheConfig, CARGO_HOME } from "./config";
|
||||||
import { getCacheProvider, reportError } from "./utils";
|
import { getCacheProvider, reportError } from "./utils";
|
||||||
|
// import { saveMtimes } from "./incremental";
|
||||||
|
import path from "path";
|
||||||
|
import fs from "fs";
|
||||||
|
import { MtimeData } from "./incremental";
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
core.error(e.message);
|
core.error(e.message);
|
||||||
|
@ -27,12 +31,15 @@ async function run() {
|
||||||
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
|
var lookupOnly = core.getInput("lookup-only").toLowerCase() === "true";
|
||||||
|
|
||||||
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
|
||||||
|
|
||||||
const config = await CacheConfig.new();
|
const config = await CacheConfig.new();
|
||||||
config.printInfo(cacheProvider);
|
config.printInfo(cacheProvider);
|
||||||
core.info("");
|
core.info("");
|
||||||
|
|
||||||
|
if (!config.incremental) {
|
||||||
|
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||||
|
}
|
||||||
|
|
||||||
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
|
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
|
||||||
const key = config.cacheKey;
|
const key = config.cacheKey;
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
|
@ -42,23 +49,53 @@ async function run() {
|
||||||
lookupOnly,
|
lookupOnly,
|
||||||
});
|
});
|
||||||
if (restoreKey) {
|
if (restoreKey) {
|
||||||
const match = restoreKey === key;
|
let match = restoreKey === key;
|
||||||
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
||||||
|
|
||||||
if (!match) {
|
if (!match) {
|
||||||
// pre-clean the target directory on cache mismatch
|
// pre-clean the target directory on cache mismatch
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
try {
|
try {
|
||||||
await cleanTargetDir(workspace.target, [], true);
|
await cleanTargetDir(workspace.target, [], true);
|
||||||
} catch {}
|
} catch { }
|
||||||
}
|
}
|
||||||
|
|
||||||
// We restored the cache but it is not a full match.
|
// We restored the cache but it is not a full match.
|
||||||
config.saveState();
|
config.saveState();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Restore the incremental-restore.json file and write the mtimes to all the files in the list
|
||||||
|
if (config.incremental) {
|
||||||
|
try {
|
||||||
|
const restoreJson = path.join(CARGO_HOME, "incremental-restore.json");
|
||||||
|
const restoreString = await fs.promises.readFile(restoreJson, "utf8");
|
||||||
|
const restoreData: MtimeData = JSON.parse(restoreString);
|
||||||
|
|
||||||
|
core.debug(`restoreData: ${JSON.stringify(restoreData)}`);
|
||||||
|
|
||||||
|
if (restoreData.roots.length == 0) {
|
||||||
|
throw new Error("No incremental roots found");
|
||||||
|
}
|
||||||
|
|
||||||
|
const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||||
|
core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||||
|
|
||||||
|
for (const [file, mtime] of Object.entries(restoreData.times)) {
|
||||||
|
core.debug(`restoring ${file} with mtime ${mtime}`);
|
||||||
|
await fs.promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||||
|
}
|
||||||
|
|
||||||
|
} catch (err) {
|
||||||
|
core.debug(`Could not restore incremental cache - ${err}`);
|
||||||
|
core.debug(`${(err as any).stack}`);
|
||||||
|
match = false;
|
||||||
|
}
|
||||||
|
config.saveState();
|
||||||
|
}
|
||||||
|
|
||||||
setCacheHitOutput(match);
|
setCacheHitOutput(match);
|
||||||
} else {
|
} else {
|
||||||
core.info("No cache found.");
|
core.info(`No cache found for ${config.cacheKey} - this key was found ${restoreKey}`);
|
||||||
config.saveState();
|
config.saveState();
|
||||||
|
|
||||||
setCacheHitOutput(false);
|
setCacheHitOutput(false);
|
||||||
|
|
34
src/save.ts
34
src/save.ts
|
@ -2,8 +2,12 @@ import * as core from "@actions/core";
|
||||||
import * as exec from "@actions/exec";
|
import * as exec from "@actions/exec";
|
||||||
|
|
||||||
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
import { CacheConfig, CARGO_HOME, isCacheUpToDate } from "./config";
|
||||||
import { getCacheProvider, reportError } from "./utils";
|
import { getCacheProvider, reportError } from "./utils";
|
||||||
|
import { rm } from "fs/promises";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
import { saveMtimes } from "./incremental";
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
core.error(e.message);
|
core.error(e.message);
|
||||||
|
@ -36,6 +40,30 @@ async function run() {
|
||||||
await macOsWorkaround();
|
await macOsWorkaround();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Save the incremental cache before we delete it
|
||||||
|
if (config.incremental) {
|
||||||
|
core.info(`... Saving incremental cache ...`);
|
||||||
|
try {
|
||||||
|
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||||
|
const cache = await saveMtimes(targetDirs);
|
||||||
|
const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey);
|
||||||
|
core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`);
|
||||||
|
|
||||||
|
// write the incremental-restore.json file
|
||||||
|
const serialized = JSON.stringify(cache);
|
||||||
|
await fs.promises.writeFile(path.join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||||
|
|
||||||
|
// Delete the incremental cache before proceeding
|
||||||
|
for (const [path, _mtime] of cache.roots) {
|
||||||
|
core.debug(` deleting ${path}`);
|
||||||
|
await rm(path);
|
||||||
|
}
|
||||||
|
} catch (e) {
|
||||||
|
core.debug(`Failed to save incremental cache`);
|
||||||
|
core.debug(`${(e as any).stack}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const allPackages = [];
|
const allPackages = [];
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
||||||
|
@ -72,7 +100,7 @@ async function run() {
|
||||||
core.debug(`${(e as any).stack}`);
|
core.debug(`${(e as any).stack}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
core.info(`... Saving cache ...`);
|
core.info(`... Saving cache with key ${config.cacheKey}`);
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
// https://github.com/actions/toolkit/pull/1378
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
// TODO: remove this once the underlying bug is fixed.
|
// TODO: remove this once the underlying bug is fixed.
|
||||||
|
@ -90,5 +118,5 @@ async function macOsWorkaround() {
|
||||||
// Workaround for https://github.com/actions/cache/issues/403
|
// Workaround for https://github.com/actions/cache/issues/403
|
||||||
// Also see https://github.com/rust-lang/cargo/issues/8603
|
// Also see https://github.com/rust-lang/cargo/issues/8603
|
||||||
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
||||||
} catch {}
|
} catch { }
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue