mirror of
https://github.com/Swatinem/rust-cache
synced 2025-06-27 02:28:48 +00:00
save before clearing
This commit is contained in:
parent
8c8c35255e
commit
6f58383b83
4 changed files with 86 additions and 105 deletions
28
dist/restore/index.js
vendored
28
dist/restore/index.js
vendored
|
@ -87160,34 +87160,6 @@ async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremen
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
||||||
// Keep the incremental folder if incremental builds are enabled
|
|
||||||
if (incremental) {
|
|
||||||
keepProfile.add("incremental");
|
|
||||||
// Traverse the incremental folder recursively and collect the modified times in a map
|
|
||||||
const incrementalDir = external_path_default().join(profileDir, "incremental");
|
|
||||||
const modifiedTimes = new Map();
|
|
||||||
const fillModifiedTimes = async (dir) => {
|
|
||||||
const dirEntries = await external_fs_default().promises.opendir(dir);
|
|
||||||
for await (const dirent of dirEntries) {
|
|
||||||
if (dirent.isDirectory()) {
|
|
||||||
await fillModifiedTimes(external_path_default().join(dir, dirent.name));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const fileName = external_path_default().join(dir, dirent.name);
|
|
||||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
|
||||||
modifiedTimes.set(fileName, mtime.getTime());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await fillModifiedTimes(incrementalDir);
|
|
||||||
// Write the modified times to the incremental folder
|
|
||||||
lib_core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
|
||||||
for (const file of modifiedTimes.keys()) {
|
|
||||||
lib_core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
|
||||||
}
|
|
||||||
const contents = JSON.stringify({ modifiedTimes });
|
|
||||||
await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents);
|
|
||||||
}
|
|
||||||
await rmExcept(profileDir, keepProfile);
|
await rmExcept(profileDir, keepProfile);
|
||||||
const keepPkg = new Set(packages.map((p) => p.name));
|
const keepPkg = new Set(packages.map((p) => p.name));
|
||||||
await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp);
|
await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp);
|
||||||
|
|
78
dist/save/index.js
vendored
78
dist/save/index.js
vendored
|
@ -87160,34 +87160,6 @@ async function cleanProfileTarget(profileDir, packages, checkTimestamp, incremen
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
||||||
// Keep the incremental folder if incremental builds are enabled
|
|
||||||
if (incremental) {
|
|
||||||
keepProfile.add("incremental");
|
|
||||||
// Traverse the incremental folder recursively and collect the modified times in a map
|
|
||||||
const incrementalDir = external_path_default().join(profileDir, "incremental");
|
|
||||||
const modifiedTimes = new Map();
|
|
||||||
const fillModifiedTimes = async (dir) => {
|
|
||||||
const dirEntries = await external_fs_default().promises.opendir(dir);
|
|
||||||
for await (const dirent of dirEntries) {
|
|
||||||
if (dirent.isDirectory()) {
|
|
||||||
await fillModifiedTimes(external_path_default().join(dir, dirent.name));
|
|
||||||
}
|
|
||||||
else {
|
|
||||||
const fileName = external_path_default().join(dir, dirent.name);
|
|
||||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
|
||||||
modifiedTimes.set(fileName, mtime.getTime());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await fillModifiedTimes(incrementalDir);
|
|
||||||
// Write the modified times to the incremental folder
|
|
||||||
lib_core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
|
||||||
for (const file of modifiedTimes.keys()) {
|
|
||||||
lib_core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
|
||||||
}
|
|
||||||
const contents = JSON.stringify({ modifiedTimes });
|
|
||||||
await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents);
|
|
||||||
}
|
|
||||||
await rmExcept(profileDir, keepProfile);
|
await rmExcept(profileDir, keepProfile);
|
||||||
const keepPkg = new Set(packages.map((p) => p.name));
|
const keepPkg = new Set(packages.map((p) => p.name));
|
||||||
await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp);
|
await rmExcept(external_path_default().join(profileDir, "build"), keepPkg, checkTimestamp);
|
||||||
|
@ -87431,6 +87403,8 @@ async function rmRF(dirName) {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
lib_core.error(e.message);
|
lib_core.error(e.message);
|
||||||
if (e.stack) {
|
if (e.stack) {
|
||||||
|
@ -87455,6 +87429,19 @@ async function run() {
|
||||||
if (process.env["RUNNER_OS"] == "macOS") {
|
if (process.env["RUNNER_OS"] == "macOS") {
|
||||||
await macOsWorkaround();
|
await macOsWorkaround();
|
||||||
}
|
}
|
||||||
|
// Save the incremental cache before we delete it
|
||||||
|
if (config.incremental) {
|
||||||
|
lib_core.info(`... Saving incremental cache ...`);
|
||||||
|
lib_core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
|
||||||
|
for (const paths of config.incrementalPaths) {
|
||||||
|
await saveIncrementalDirs(paths);
|
||||||
|
}
|
||||||
|
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
||||||
|
for (const path of config.incrementalPaths) {
|
||||||
|
lib_core.debug(` deleting ${path}`);
|
||||||
|
await (0,promises_.rm)(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
const allPackages = [];
|
const allPackages = [];
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
||||||
|
@ -87491,15 +87478,6 @@ async function run() {
|
||||||
catch (e) {
|
catch (e) {
|
||||||
lib_core.debug(`${e.stack}`);
|
lib_core.debug(`${e.stack}`);
|
||||||
}
|
}
|
||||||
// Save the incremental cache before we delete it
|
|
||||||
if (config.incremental) {
|
|
||||||
lib_core.info(`... Saving incremental cache ...`);
|
|
||||||
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
|
||||||
for (const path of config.incrementalPaths) {
|
|
||||||
lib_core.debug(` deleting ${path}`);
|
|
||||||
await (0,promises_.rm)(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
lib_core.info(`... Saving cache ...`);
|
lib_core.info(`... Saving cache ...`);
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
// https://github.com/actions/toolkit/pull/1378
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
|
@ -87520,6 +87498,32 @@ async function macOsWorkaround() {
|
||||||
}
|
}
|
||||||
catch { }
|
catch { }
|
||||||
}
|
}
|
||||||
|
async function saveIncrementalDirs(profileDir) {
|
||||||
|
// Traverse the incremental folder recursively and collect the modified times in a map
|
||||||
|
const incrementalDir = external_path_default().join(profileDir, "incremental");
|
||||||
|
const modifiedTimes = new Map();
|
||||||
|
const fillModifiedTimes = async (dir) => {
|
||||||
|
const dirEntries = await external_fs_default().promises.opendir(dir);
|
||||||
|
for await (const dirent of dirEntries) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
await fillModifiedTimes(external_path_default().join(dir, dirent.name));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
const fileName = external_path_default().join(dir, dirent.name);
|
||||||
|
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||||
|
modifiedTimes.set(fileName, mtime.getTime());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await fillModifiedTimes(incrementalDir);
|
||||||
|
// Write the modified times to the incremental folder
|
||||||
|
lib_core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||||
|
for (const file of modifiedTimes.keys()) {
|
||||||
|
lib_core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||||
|
}
|
||||||
|
const contents = JSON.stringify({ modifiedTimes });
|
||||||
|
await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents);
|
||||||
|
}
|
||||||
|
|
||||||
})();
|
})();
|
||||||
|
|
||||||
|
|
|
@ -57,36 +57,6 @@ async function cleanProfileTarget(profileDir: string, packages: Packages, checkT
|
||||||
|
|
||||||
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
let keepProfile = new Set(["build", ".fingerprint", "deps"]);
|
||||||
|
|
||||||
// Keep the incremental folder if incremental builds are enabled
|
|
||||||
if (incremental) {
|
|
||||||
keepProfile.add("incremental");
|
|
||||||
|
|
||||||
// Traverse the incremental folder recursively and collect the modified times in a map
|
|
||||||
const incrementalDir = path.join(profileDir, "incremental");
|
|
||||||
const modifiedTimes = new Map<string, number>();
|
|
||||||
const fillModifiedTimes = async (dir: string) => {
|
|
||||||
const dirEntries = await fs.promises.opendir(dir);
|
|
||||||
for await (const dirent of dirEntries) {
|
|
||||||
if (dirent.isDirectory()) {
|
|
||||||
await fillModifiedTimes(path.join(dir, dirent.name));
|
|
||||||
} else {
|
|
||||||
const fileName = path.join(dir, dirent.name);
|
|
||||||
const { mtime } = await fs.promises.stat(fileName);
|
|
||||||
modifiedTimes.set(fileName, mtime.getTime());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
await fillModifiedTimes(incrementalDir);
|
|
||||||
|
|
||||||
// Write the modified times to the incremental folder
|
|
||||||
core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
|
||||||
for (const file of modifiedTimes.keys()) {
|
|
||||||
core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
|
||||||
}
|
|
||||||
const contents = JSON.stringify({ modifiedTimes });
|
|
||||||
await fs.promises.writeFile(path.join(incrementalDir, "incremental-restore.json"), contents);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
await rmExcept(profileDir, keepProfile);
|
await rmExcept(profileDir, keepProfile);
|
||||||
|
|
||||||
|
|
55
src/save.ts
55
src/save.ts
|
@ -5,6 +5,8 @@ import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||||
import { getCacheProvider, reportError } from "./utils";
|
import { getCacheProvider, reportError } from "./utils";
|
||||||
import { rm } from "fs/promises";
|
import { rm } from "fs/promises";
|
||||||
|
import fs from "fs";
|
||||||
|
import path from "path";
|
||||||
|
|
||||||
process.on("uncaughtException", (e) => {
|
process.on("uncaughtException", (e) => {
|
||||||
core.error(e.message);
|
core.error(e.message);
|
||||||
|
@ -37,6 +39,20 @@ async function run() {
|
||||||
await macOsWorkaround();
|
await macOsWorkaround();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Save the incremental cache before we delete it
|
||||||
|
if (config.incremental) {
|
||||||
|
core.info(`... Saving incremental cache ...`);
|
||||||
|
core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
|
||||||
|
for (const paths of config.incrementalPaths) {
|
||||||
|
await saveIncrementalDirs(paths);
|
||||||
|
}
|
||||||
|
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
||||||
|
for (const path of config.incrementalPaths) {
|
||||||
|
core.debug(` deleting ${path}`);
|
||||||
|
await rm(path);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const allPackages = [];
|
const allPackages = [];
|
||||||
for (const workspace of config.workspaces) {
|
for (const workspace of config.workspaces) {
|
||||||
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
const packages = await workspace.getPackagesOutsideWorkspaceRoot();
|
||||||
|
@ -73,16 +89,6 @@ async function run() {
|
||||||
core.debug(`${(e as any).stack}`);
|
core.debug(`${(e as any).stack}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Save the incremental cache before we delete it
|
|
||||||
if (config.incremental) {
|
|
||||||
core.info(`... Saving incremental cache ...`);
|
|
||||||
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
|
||||||
for (const path of config.incrementalPaths) {
|
|
||||||
core.debug(` deleting ${path}`);
|
|
||||||
await rm(path);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
core.info(`... Saving cache ...`);
|
core.info(`... Saving cache ...`);
|
||||||
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
// Pass a copy of cachePaths to avoid mutating the original array as reported by:
|
||||||
// https://github.com/actions/toolkit/pull/1378
|
// https://github.com/actions/toolkit/pull/1378
|
||||||
|
@ -104,3 +110,32 @@ async function macOsWorkaround() {
|
||||||
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
||||||
} catch { }
|
} catch { }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async function saveIncrementalDirs(profileDir: string) {
|
||||||
|
// Traverse the incremental folder recursively and collect the modified times in a map
|
||||||
|
const incrementalDir = path.join(profileDir, "incremental");
|
||||||
|
const modifiedTimes = new Map<string, number>();
|
||||||
|
const fillModifiedTimes = async (dir: string) => {
|
||||||
|
const dirEntries = await fs.promises.opendir(dir);
|
||||||
|
for await (const dirent of dirEntries) {
|
||||||
|
if (dirent.isDirectory()) {
|
||||||
|
await fillModifiedTimes(path.join(dir, dirent.name));
|
||||||
|
} else {
|
||||||
|
const fileName = path.join(dir, dirent.name);
|
||||||
|
const { mtime } = await fs.promises.stat(fileName);
|
||||||
|
modifiedTimes.set(fileName, mtime.getTime());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
await fillModifiedTimes(incrementalDir);
|
||||||
|
|
||||||
|
// Write the modified times to the incremental folder
|
||||||
|
core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||||
|
for (const file of modifiedTimes.keys()) {
|
||||||
|
core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||||
|
}
|
||||||
|
const contents = JSON.stringify({ modifiedTimes });
|
||||||
|
await fs.promises.writeFile(path.join(incrementalDir, "incremental-restore.json"), contents);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue