3
0
Fork 0
mirror of https://github.com/Swatinem/rust-cache synced 2025-04-11 07:03:37 +00:00

remove invalidation

This commit is contained in:
Jonathan Kelley 2025-01-28 21:58:20 -08:00
parent 07fbca13c8
commit b37d2821f8
No known key found for this signature in database
GPG key ID: 1FBB50F7EB0A08BE
7 changed files with 135 additions and 222 deletions

116
dist/restore/index.js vendored
View file

@ -86691,59 +86691,6 @@ class Workspace {
}
}
;// CONCATENATED MODULE: ./src/incremental.ts
// import * as io from "@actions/io";
// import { CARGO_HOME } from "./config";
// import { Packages } from "./workspace";
let incremental_missing = false;
function isIncrementalMissing() {
return incremental_missing;
}
async function restoreIncremental(targetDir) {
lib_core.debug(`restoring incremental directory "${targetDir}"`);
let dir = await external_fs_default().promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = external_path_default().join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await restoreIncremental(dirName);
}
else {
await restoreIncrementalProfile(dirName);
}
restoreIncrementalProfile;
}
catch { }
}
}
}
async function restoreIncrementalProfile(dirName) {
lib_core.debug(`restoring incremental profile directory "${dirName}"`);
const incrementalJson = external_path_default().join(dirName, "incremental-restore.json");
if (await utils_exists(incrementalJson)) {
const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8");
const { modifiedTimes } = JSON.parse(contents);
lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`);
// Write the mtimes to all the files in the profile directory
for (const fileName of Object.keys(modifiedTimes)) {
const mtime = modifiedTimes[fileName];
const filePath = external_path_default().join(dirName, fileName);
await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime));
}
}
else {
lib_core.debug(`incremental-restore.json not found for ${dirName}`);
incremental_missing = true;
}
}
;// CONCATENATED MODULE: ./src/config.ts
@ -86756,7 +86703,6 @@ async function restoreIncrementalProfile(dirName) {
const HOME = external_os_default().homedir();
const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG";
@ -87046,12 +86992,6 @@ class CacheConfig {
saveState() {
lib_core.saveState(STATE_CONFIG, this);
}
isIncrementalMissing() {
if (this.incremental) {
return isIncrementalMissing();
}
return false;
}
}
/**
* Checks if the cache is up to date.
@ -87112,7 +87052,7 @@ function sort_and_uniq(a) {
async function cleanTargetDir(targetDir, packages, checkTimestamp) {
async function cleanTargetDir(targetDir, packages, checkTimestamp = false) {
lib_core.debug(`cleaning target directory "${targetDir}"`);
// remove all *files* from the profile directory
let dir = await external_fs_default().promises.opendir(targetDir);
@ -87136,7 +87076,7 @@ async function cleanTargetDir(targetDir, packages, checkTimestamp) {
}
}
}
async function cleanProfileTarget(profileDir, packages, checkTimestamp) {
async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) {
lib_core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
@ -87394,6 +87334,54 @@ async function rmRF(dirName) {
await io.rmRF(dirName);
}
;// CONCATENATED MODULE: ./src/incremental.ts
// import * as io from "@actions/io";
// import { CARGO_HOME } from "./config";
// import { Packages } from "./workspace";
async function restoreIncremental(targetDir) {
lib_core.debug(`restoring incremental directory "${targetDir}"`);
let dir = await external_fs_default().promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = external_path_default().join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await restoreIncremental(dirName);
}
else {
await restoreIncrementalProfile(dirName);
}
restoreIncrementalProfile;
}
catch { }
}
}
}
async function restoreIncrementalProfile(dirName) {
lib_core.debug(`restoring incremental profile directory "${dirName}"`);
const incrementalJson = external_path_default().join(dirName, "incremental-restore.json");
if (await utils_exists(incrementalJson)) {
const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8");
const { modifiedTimes } = JSON.parse(contents);
lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`);
// Write the mtimes to all the files in the profile directory
for (const fileName of Object.keys(modifiedTimes)) {
const mtime = modifiedTimes[fileName];
const filePath = external_path_default().join(dirName, fileName);
await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime));
}
}
else {
lib_core.debug(`incremental-restore.json not found for ${dirName}`);
}
}
;// CONCATENATED MODULE: ./src/restore.ts
@ -87430,7 +87418,9 @@ async function run() {
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { lookupOnly });
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
lookupOnly,
});
if (restoreKey) {
const match = restoreKey === key;
lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);

210
dist/save/index.js vendored
View file

@ -85430,7 +85430,7 @@ var __webpack_exports__ = {};
"use strict";
// EXTERNAL MODULE: ./node_modules/@actions/core/lib/core.js
var lib_core = __nccwpck_require__(7484);
var core = __nccwpck_require__(7484);
// EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js
var exec = __nccwpck_require__(5236);
// EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js
@ -86601,11 +86601,11 @@ var cache_lib_cache = __nccwpck_require__(5116);
function reportError(e) {
const { commandFailed } = e;
if (commandFailed) {
lib_core.error(`Command failed: ${commandFailed.command}`);
lib_core.error(commandFailed.stderr);
core.error(`Command failed: ${commandFailed.command}`);
core.error(commandFailed.stderr);
}
else {
lib_core.error(`${e.stack}`);
core.error(`${e.stack}`);
}
}
async function getCmdOutput(cmd, args = [], options = {}) {
@ -86635,7 +86635,7 @@ async function getCmdOutput(cmd, args = [], options = {}) {
return stdout;
}
function getCacheProvider() {
const cacheProvider = lib_core.getInput("cache-provider");
const cacheProvider = core.getInput("cache-provider");
const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : undefined;
if (!cache) {
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
@ -86645,7 +86645,7 @@ function getCacheProvider() {
cache: cache,
};
}
async function utils_exists(path) {
async function exists(path) {
try {
await external_fs_default().promises.access(path);
return true;
@ -86668,11 +86668,11 @@ class Workspace {
async getPackages(filter, ...extraArgs) {
let packages = [];
try {
lib_core.debug(`collecting metadata for "${this.root}"`);
core.debug(`collecting metadata for "${this.root}"`);
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1", ...extraArgs], {
cwd: this.root,
}));
lib_core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
core.debug(`workspace "${this.root}" has ${meta.packages.length} packages`);
for (const pkg of meta.packages.filter(filter)) {
const targets = pkg.targets.filter((t) => t.kind.some((kind) => SAVE_TARGETS.has(kind))).map((t) => t.name);
packages.push({ name: pkg.name, version: pkg.version, targets, path: external_path_default().dirname(pkg.manifest_path) });
@ -86691,59 +86691,6 @@ class Workspace {
}
}
;// CONCATENATED MODULE: ./src/incremental.ts
// import * as io from "@actions/io";
// import { CARGO_HOME } from "./config";
// import { Packages } from "./workspace";
let incremental_missing = false;
function isIncrementalMissing() {
return incremental_missing;
}
async function restoreIncremental(targetDir) {
core.debug(`restoring incremental directory "${targetDir}"`);
let dir = await fs.promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = path.join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget = (await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await restoreIncremental(dirName);
}
else {
await restoreIncrementalProfile(dirName);
}
restoreIncrementalProfile;
}
catch { }
}
}
}
async function restoreIncrementalProfile(dirName) {
core.debug(`restoring incremental profile directory "${dirName}"`);
const incrementalJson = path.join(dirName, "incremental-restore.json");
if (await exists(incrementalJson)) {
const contents = await fs.promises.readFile(incrementalJson, "utf8");
const { modifiedTimes } = JSON.parse(contents);
core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`);
// Write the mtimes to all the files in the profile directory
for (const fileName of Object.keys(modifiedTimes)) {
const mtime = modifiedTimes[fileName];
const filePath = path.join(dirName, fileName);
await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime));
}
}
else {
core.debug(`incremental-restore.json not found for ${dirName}`);
incremental_missing = true;
}
}
;// CONCATENATED MODULE: ./src/config.ts
@ -86756,7 +86703,6 @@ async function restoreIncrementalProfile(dirName) {
const HOME = external_os_default().homedir();
const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo");
const STATE_CONFIG = "RUST_CACHE_CONFIG";
@ -86804,13 +86750,13 @@ class CacheConfig {
// Construct key prefix:
// This uses either the `shared-key` input,
// or the `key` input combined with the `job` key.
let key = lib_core.getInput("prefix-key") || "v0-rust";
const sharedKey = lib_core.getInput("shared-key");
let key = core.getInput("prefix-key") || "v0-rust";
const sharedKey = core.getInput("shared-key");
if (sharedKey) {
key += `-${sharedKey}`;
}
else {
const inputKey = lib_core.getInput("key");
const inputKey = core.getInput("key");
if (inputKey) {
key += `-${inputKey}`;
}
@ -86838,7 +86784,7 @@ class CacheConfig {
self.keyRust = keyRust;
// these prefixes should cover most of the compiler / rust / cargo keys
const envPrefixes = ["CARGO", "CC", "CFLAGS", "CXX", "CMAKE", "RUST"];
envPrefixes.push(...lib_core.getInput("env-vars").split(/\s+/).filter(Boolean));
envPrefixes.push(...core.getInput("env-vars").split(/\s+/).filter(Boolean));
// sort the available env vars so we have a more stable hash
const keyEnvs = [];
const envKeys = Object.keys(process.env);
@ -86852,18 +86798,18 @@ class CacheConfig {
}
self.keyEnvs = keyEnvs;
// Make sure we consider incremental builds
self.incremental = lib_core.getInput("incremental").toLowerCase() == "true";
self.incremental = core.getInput("incremental").toLowerCase() == "true";
hasher.update(`incremental=${self.incremental}`);
key += `-${digest(hasher)}`;
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
// and lockfiles.
self.cacheBin = lib_core.getInput("cache-bin").toLowerCase() == "true";
self.cacheBin = core.getInput("cache-bin").toLowerCase() == "true";
// Constructs the workspace config and paths to restore:
// The workspaces are given using a `$workspace -> $target` syntax.
const workspaces = [];
const workspacesInput = lib_core.getInput("workspaces") || ".";
const workspacesInput = core.getInput("workspaces") || ".";
for (const workspace of workspacesInput.trim().split("\n")) {
let [root, target = "target"] = workspace.split("->").map((s) => s.trim());
root = external_path_default().resolve(root);
@ -86916,19 +86862,19 @@ class CacheConfig {
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock");
if (await utils_exists(cargo_lock)) {
if (await exists(cargo_lock)) {
try {
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
const parsed = parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
@ -86940,7 +86886,7 @@ class CacheConfig {
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
@ -86965,11 +86911,11 @@ class CacheConfig {
...self.cachePaths,
];
}
const cacheTargets = lib_core.getInput("cache-targets").toLowerCase() || "true";
const cacheTargets = core.getInput("cache-targets").toLowerCase() || "true";
if (cacheTargets === "true") {
self.cachePaths.push(...workspaces.map((ws) => ws.target));
}
const cacheDirectories = lib_core.getInput("cache-directories");
const cacheDirectories = core.getInput("cache-directories");
for (const dir of cacheDirectories.trim().split(/\s+/).filter(Boolean)) {
self.cachePaths.push(dir);
}
@ -86977,7 +86923,7 @@ class CacheConfig {
self.cargoBins = Array.from(bins.values());
if (self.incremental) {
// wire the incremental key to be just for this branch
const branchName = lib_core.getInput("incremental-key") || "-shared";
const branchName = core.getInput("incremental-key") || "-shared";
const incrementalKey = key + `-incremental--` + branchName;
self.incrementalKey = incrementalKey;
if (cacheTargets === "true") {
@ -86997,7 +86943,7 @@ class CacheConfig {
* @see {@link CacheConfig#new}
*/
static fromState() {
const source = lib_core.getState(STATE_CONFIG);
const source = core.getState(STATE_CONFIG);
if (!source) {
throw new Error("Cache configuration not found in state");
}
@ -87010,47 +86956,41 @@ class CacheConfig {
* Prints the configuration to the action log.
*/
printInfo(cacheProvider) {
lib_core.startGroup("Cache Configuration");
lib_core.info(`Cache Provider:`);
lib_core.info(` ${cacheProvider.name}`);
lib_core.info(`Workspaces:`);
core.startGroup("Cache Configuration");
core.info(`Cache Provider:`);
core.info(` ${cacheProvider.name}`);
core.info(`Workspaces:`);
for (const workspace of this.workspaces) {
lib_core.info(` ${workspace.root}`);
core.info(` ${workspace.root}`);
}
lib_core.info(`Cache Paths:`);
core.info(`Cache Paths:`);
for (const path of this.cachePaths) {
lib_core.info(` ${path}`);
core.info(` ${path}`);
}
lib_core.info(`Restore Key:`);
lib_core.info(` ${this.restoreKey}`);
lib_core.info(`Cache Key:`);
lib_core.info(` ${this.cacheKey}`);
lib_core.info(`.. Prefix:`);
lib_core.info(` - ${this.keyPrefix}`);
lib_core.info(`.. Environment considered:`);
lib_core.info(` - Rust Version: ${this.keyRust}`);
core.info(`Restore Key:`);
core.info(` ${this.restoreKey}`);
core.info(`Cache Key:`);
core.info(` ${this.cacheKey}`);
core.info(`.. Prefix:`);
core.info(` - ${this.keyPrefix}`);
core.info(`.. Environment considered:`);
core.info(` - Rust Version: ${this.keyRust}`);
for (const env of this.keyEnvs) {
lib_core.info(` - ${env}`);
core.info(` - ${env}`);
}
lib_core.info(`.. Lockfiles considered:`);
core.info(`.. Lockfiles considered:`);
for (const file of this.keyFiles) {
lib_core.info(` - ${file}`);
core.info(` - ${file}`);
}
lib_core.info(`.. Incremental: ${this.incremental}`);
lib_core.endGroup();
core.info(`.. Incremental: ${this.incremental}`);
core.endGroup();
}
/**
* Saves the configuration to the state store.
* This is used to restore the configuration in the post action.
*/
saveState() {
lib_core.saveState(STATE_CONFIG, this);
}
isIncrementalMissing() {
if (this.incremental) {
return isIncrementalMissing();
}
return false;
core.saveState(STATE_CONFIG, this);
}
}
/**
@ -87059,7 +86999,7 @@ class CacheConfig {
* @returns `true` if the cache is up to date, `false` otherwise.
*/
function isCacheUpToDate() {
return lib_core.getState(STATE_CONFIG) === "";
return core.getState(STATE_CONFIG) === "";
}
/**
* Returns a hex digest of the given hasher truncated to `HASH_LENGTH`.
@ -87112,15 +87052,15 @@ function sort_and_uniq(a) {
async function cleanTargetDir(targetDir, packages, checkTimestamp) {
lib_core.debug(`cleaning target directory "${targetDir}"`);
async function cleanTargetDir(targetDir, packages, checkTimestamp = false) {
core.debug(`cleaning target directory "${targetDir}"`);
// remove all *files* from the profile directory
let dir = await external_fs_default().promises.opendir(targetDir);
for await (const dirent of dir) {
if (dirent.isDirectory()) {
let dirName = external_path_default().join(dir.path, dirent.name);
// is it a profile dir, or a nested target dir?
let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json")));
let isNestedTarget = (await exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await exists(external_path_default().join(dirName, ".rustc_info.json")));
try {
if (isNestedTarget) {
await cleanTargetDir(dirName, packages, checkTimestamp);
@ -87136,8 +87076,8 @@ async function cleanTargetDir(targetDir, packages, checkTimestamp) {
}
}
}
async function cleanProfileTarget(profileDir, packages, checkTimestamp) {
lib_core.debug(`cleaning profile directory "${profileDir}"`);
async function cleanProfileTarget(profileDir, packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
// workspaces under `target/tests`. Notably, `target/tests/target` and
// `target/tests/trybuild`.
@ -87207,7 +87147,7 @@ async function cleanRegistry(packages, crates = true) {
// remove `.cargo/credentials.toml`
try {
const credentials = external_path_default().join(CARGO_HOME, ".cargo", "credentials.toml");
lib_core.debug(`deleting "${credentials}"`);
core.debug(`deleting "${credentials}"`);
await external_fs_default().promises.unlink(credentials);
}
catch { }
@ -87220,7 +87160,7 @@ async function cleanRegistry(packages, crates = true) {
// or `.cargo/registry/index/index.crates.io-e139d0d48fed7772`
const dirPath = external_path_default().join(indexDir.path, dirent.name);
// for a git registry, we can remove `.cache`, as cargo will recreate it from git
if (await utils_exists(external_path_default().join(dirPath, ".git"))) {
if (await exists(external_path_default().join(dirPath, ".git"))) {
await rmRF(external_path_default().join(dirPath, ".cache"));
}
else {
@ -87229,7 +87169,7 @@ async function cleanRegistry(packages, crates = true) {
}
}
if (!crates) {
lib_core.debug("skipping registry cache and src cleanup");
core.debug("skipping registry cache and src cleanup");
return;
}
// `.cargo/registry/src`
@ -87379,7 +87319,7 @@ async function rmExcept(dirName, keepPrefix, checkTimestamp = false) {
async function rm(parent, dirent) {
try {
const fileName = external_path_default().join(parent, dirent.name);
lib_core.debug(`deleting "${fileName}"`);
core.debug(`deleting "${fileName}"`);
if (dirent.isFile()) {
await external_fs_default().promises.unlink(fileName);
}
@ -87390,7 +87330,7 @@ async function rm(parent, dirent) {
catch { }
}
async function rmRF(dirName) {
lib_core.debug(`deleting "${dirName}"`);
core.debug(`deleting "${dirName}"`);
await io.rmRF(dirName);
}
@ -87404,39 +87344,39 @@ async function rmRF(dirName) {
process.on("uncaughtException", (e) => {
lib_core.error(e.message);
core.error(e.message);
if (e.stack) {
lib_core.error(e.stack);
core.error(e.stack);
}
});
async function run() {
const cacheProvider = getCacheProvider();
const save = lib_core.getInput("save-if").toLowerCase() || "true";
const save = core.getInput("save-if").toLowerCase() || "true";
if (!(cacheProvider.cache.isFeatureAvailable() && save === "true")) {
return;
}
try {
if (isCacheUpToDate()) {
lib_core.info(`Cache up-to-date.`);
core.info(`Cache up-to-date.`);
return;
}
const config = CacheConfig.fromState();
config.printInfo(cacheProvider);
lib_core.info("");
core.info("");
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
if (process.env["RUNNER_OS"] == "macOS") {
await macOsWorkaround();
}
// Save the incremental cache before we delete it
if (config.incremental) {
lib_core.info(`... Saving incremental cache ...`);
lib_core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
core.info(`... Saving incremental cache ...`);
core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
for (const paths of config.incrementalPaths) {
await saveIncrementalDirs(paths);
}
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
for (const path of config.incrementalPaths) {
lib_core.debug(` deleting ${path}`);
core.debug(` deleting ${path}`);
await (0,promises_.rm)(path);
}
}
@ -87445,38 +87385,38 @@ async function run() {
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
allPackages.push(...packages);
try {
lib_core.info(`... Cleaning ${workspace.target} ...`);
core.info(`... Cleaning ${workspace.target} ...`);
await cleanTargetDir(workspace.target, packages, false);
}
catch (e) {
lib_core.debug(`${e.stack}`);
core.debug(`${e.stack}`);
}
}
try {
const crates = lib_core.getInput("cache-all-crates").toLowerCase() || "false";
lib_core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
const crates = core.getInput("cache-all-crates").toLowerCase() || "false";
core.info(`... Cleaning cargo registry (cache-all-crates: ${crates}) ...`);
await cleanRegistry(allPackages, crates !== "true");
}
catch (e) {
lib_core.debug(`${e.stack}`);
core.debug(`${e.stack}`);
}
if (config.cacheBin) {
try {
lib_core.info(`... Cleaning cargo/bin ...`);
core.info(`... Cleaning cargo/bin ...`);
await cleanBin(config.cargoBins);
}
catch (e) {
lib_core.debug(`${e.stack}`);
core.debug(`${e.stack}`);
}
}
try {
lib_core.info(`... Cleaning cargo git cache ...`);
core.info(`... Cleaning cargo git cache ...`);
await cleanGit(allPackages);
}
catch (e) {
lib_core.debug(`${e.stack}`);
core.debug(`${e.stack}`);
}
lib_core.info(`... Saving cache ...`);
core.info(`... Saving cache with key ${config.cacheKey}`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
@ -87515,9 +87455,9 @@ async function saveIncrementalDirs(profileDir) {
};
await fillModifiedTimes(incrementalDir);
// Write the modified times to the incremental folder
lib_core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
for (const file of modifiedTimes.keys()) {
lib_core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
}
const contents = JSON.stringify({ modifiedTimes });
await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents);

View file

@ -7,7 +7,7 @@ import { CARGO_HOME } from "./config";
import { exists } from "./utils";
import { Packages } from "./workspace";
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp: boolean) {
export async function cleanTargetDir(targetDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning target directory "${targetDir}"`);
// remove all *files* from the profile directory
@ -32,7 +32,7 @@ export async function cleanTargetDir(targetDir: string, packages: Packages, chec
}
}
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp: boolean) {
async function cleanProfileTarget(profileDir: string, packages: Packages, checkTimestamp = false) {
core.debug(`cleaning profile directory "${profileDir}"`);
// Quite a few testing utility crates store compilation artifacts as nested
@ -51,12 +51,11 @@ async function cleanProfileTarget(profileDir: string, packages: Packages, checkT
// Delete everything else.
await rmExcept(profileDir, new Set(["target", "trybuild"]), checkTimestamp);
return;
}
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
await rmExcept(profileDir, keepProfile);
const keepPkg = new Set(packages.map((p) => p.name));

View file

@ -10,7 +10,6 @@ import * as toml from "smol-toml";
import { getCargoBins } from "./cleanup";
import { CacheProvider, exists, getCmdOutput } from "./utils";
import { Workspace } from "./workspace";
import { isIncrementalMissing } from "./incremental";
const HOME = os.homedir();
export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo");
@ -365,14 +364,6 @@ export class CacheConfig {
saveState() {
core.saveState(STATE_CONFIG, this);
}
isIncrementalMissing(): boolean {
if (this.incremental) {
return isIncrementalMissing();
}
return false;
}
}
/**

View file

@ -7,12 +7,6 @@ import path from "path";
import { exists } from "./utils";
// import { Packages } from "./workspace";
let incremental_missing = false;
export function isIncrementalMissing(): boolean {
return incremental_missing;
}
export async function restoreIncremental(targetDir: string) {
core.debug(`restoring incremental directory "${targetDir}"`);
@ -52,6 +46,5 @@ async function restoreIncrementalProfile(dirName: string) {
}
} else {
core.debug(`incremental-restore.json not found for ${dirName}`);
incremental_missing = true;
}
}

View file

@ -39,12 +39,12 @@ async function run() {
core.info(`... ${lookupOnly ? "Checking" : "Restoring"} cache ...`);
const key = config.cacheKey;
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], { lookupOnly });
const restoreKey = await cacheProvider.cache.restoreCache(config.cachePaths.slice(), key, [config.restoreKey], {
lookupOnly,
});
if (restoreKey) {
const match = restoreKey === key;
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);

View file

@ -89,7 +89,7 @@ async function run() {
core.debug(`${(e as any).stack}`);
}
core.info(`... Saving cache ...`);
core.info(`... Saving cache with key ${config.cacheKey}`);
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
// https://github.com/actions/toolkit/pull/1378
// TODO: remove this once the underlying bug is fixed.