mirror of
https://github.com/Swatinem/rust-cache
synced 2025-04-13 07:48:17 +00:00
cache the roots not the files
This commit is contained in:
parent
01addf7215
commit
6095cc4363
5
dist/restore/index.js
vendored
5
dist/restore/index.js
vendored
|
@ -87381,7 +87381,10 @@ async function run() {
|
|||
const restoreJson = external_path_default().join(config_CARGO_HOME, "incremental-restore.json");
|
||||
const restoreString = await external_fs_default().promises.readFile(restoreJson, "utf8");
|
||||
const restoreData = JSON.parse(restoreString);
|
||||
for (const [file, mtime] of Object.entries(restoreData)) {
|
||||
const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||
lib_core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||
for (const [file, mtime] of Object.entries(restoreData.times)) {
|
||||
lib_core.debug(`restoring ${file} with mtime ${mtime}`);
|
||||
await external_fs_default().promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
}
|
||||
|
|
30
dist/save/index.js
vendored
30
dist/save/index.js
vendored
|
@ -87336,12 +87336,21 @@ async function rmRF(dirName) {
|
|||
// import * as io from "@actions/io";
|
||||
|
||||
|
||||
// import { CARGO_HOME } from "./config";
|
||||
// import { exists } from "./utils";
|
||||
// import { Packages } from "./workspace";
|
||||
async function saveMtimes(targetDirs) {
|
||||
let cache = new Map();
|
||||
let stack = targetDirs.slice();
|
||||
let times = new Map();
|
||||
let stack = new Array();
|
||||
// Collect all the incremental files
|
||||
for (const dir of targetDirs) {
|
||||
for (const maybeProfile of await external_fs_default().promises.readdir(dir)) {
|
||||
const profileDir = external_path_default().join(dir, maybeProfile);
|
||||
const incrementalDir = external_path_default().join(profileDir, "incremental");
|
||||
if (external_fs_default().existsSync(incrementalDir)) {
|
||||
stack.push(incrementalDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Save the stack as the roots - we cache these directly
|
||||
let roots = stack.slice();
|
||||
while (stack.length > 0) {
|
||||
const dirName = stack.pop();
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
|
@ -87352,11 +87361,11 @@ async function saveMtimes(targetDirs) {
|
|||
else {
|
||||
const fileName = external_path_default().join(dirName, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
cache.set(fileName, mtime.getTime());
|
||||
times.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
return cache;
|
||||
return { roots, times: times };
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/save.ts
|
||||
|
@ -87399,14 +87408,13 @@ async function run() {
|
|||
try {
|
||||
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||
const cache = await saveMtimes(targetDirs);
|
||||
const paths = Array.from(cache.keys());
|
||||
const saved = await cacheProvider.cache.saveCache(paths, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${paths}`);
|
||||
const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`);
|
||||
// write the incremental-restore.json file
|
||||
const serialized = JSON.stringify(cache);
|
||||
await external_fs_default().promises.writeFile(external_path_default().join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||
// Delete the incremental cache before proceeding
|
||||
for (const [path, _mtime] of cache) {
|
||||
for (const [path, _mtime] of cache.roots) {
|
||||
core.debug(` deleting ${path}`);
|
||||
await (0,promises_.rm)(path);
|
||||
}
|
||||
|
|
|
@ -7,9 +7,28 @@ import path from "path";
|
|||
// import { exists } from "./utils";
|
||||
// import { Packages } from "./workspace";
|
||||
|
||||
export async function saveMtimes(targetDirs: string[]): Promise<Map<string, number>> {
|
||||
let cache = new Map<string, number>();
|
||||
let stack = targetDirs.slice();
|
||||
export type MtimeData = {
|
||||
roots: Array<string>,
|
||||
times: Map<string, number>
|
||||
};
|
||||
|
||||
export async function saveMtimes(targetDirs: string[]): Promise<MtimeData> {
|
||||
let times = new Map<string, number>();
|
||||
let stack = new Array<string>();
|
||||
|
||||
// Collect all the incremental files
|
||||
for (const dir of targetDirs) {
|
||||
for (const maybeProfile of await fs.promises.readdir(dir)) {
|
||||
const profileDir = path.join(dir, maybeProfile);
|
||||
const incrementalDir = path.join(profileDir, "incremental");
|
||||
if (fs.existsSync(incrementalDir)) {
|
||||
stack.push(incrementalDir);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Save the stack as the roots - we cache these directly
|
||||
let roots = stack.slice();
|
||||
|
||||
while (stack.length > 0) {
|
||||
const dirName = stack.pop()!;
|
||||
|
@ -21,10 +40,10 @@ export async function saveMtimes(targetDirs: string[]): Promise<Map<string, numb
|
|||
} else {
|
||||
const fileName = path.join(dirName, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
cache.set(fileName, mtime.getTime());
|
||||
times.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return cache;
|
||||
return { roots, times: times };
|
||||
}
|
||||
|
|
|
@ -6,6 +6,7 @@ import { getCacheProvider, reportError } from "./utils";
|
|||
// import { saveMtimes } from "./incremental";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
import { MtimeData } from "./incremental";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
|
@ -54,12 +55,18 @@ async function run() {
|
|||
// Restore the incremental-restore.json file and write the mtimes to all the files in the list
|
||||
if (config.incremental) {
|
||||
try {
|
||||
const restoreJson = path.join(CARGO_HOME, "incremental-restore.json")
|
||||
const restoreJson = path.join(CARGO_HOME, "incremental-restore.json");
|
||||
const restoreString = await fs.promises.readFile(restoreJson, "utf8");
|
||||
const restoreData: Map<String, number> = JSON.parse(restoreString);
|
||||
for (const [file, mtime] of Object.entries(restoreData)) {
|
||||
const restoreData: MtimeData = JSON.parse(restoreString);
|
||||
|
||||
const incrementalKey = await cacheProvider.cache.restoreCache(restoreData.roots, config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||
core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||
|
||||
for (const [file, mtime] of Object.entries(restoreData.times)) {
|
||||
core.debug(`restoring ${file} with mtime ${mtime}`);
|
||||
await fs.promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
|
||||
} catch (err) {
|
||||
core.debug(`Could not restore incremental cache - ${err}`);
|
||||
core.debug(`${(err as any).stack}`);
|
||||
|
|
|
@ -46,16 +46,15 @@ async function run() {
|
|||
try {
|
||||
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||
const cache = await saveMtimes(targetDirs);
|
||||
const paths = Array.from(cache.keys());
|
||||
const saved = await cacheProvider.cache.saveCache(paths, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${paths}`);
|
||||
const saved = await cacheProvider.cache.saveCache(cache.roots, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${cache.roots}, ${cache.times}`);
|
||||
|
||||
// write the incremental-restore.json file
|
||||
const serialized = JSON.stringify(cache);
|
||||
await fs.promises.writeFile(path.join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||
|
||||
// Delete the incremental cache before proceeding
|
||||
for (const [path, _mtime] of cache) {
|
||||
for (const [path, _mtime] of cache.roots) {
|
||||
core.debug(` deleting ${path}`);
|
||||
await rm(path);
|
||||
}
|
||||
|
|
Loading…
Reference in a new issue