mirror of
https://github.com/Swatinem/rust-cache
synced 2025-04-10 22:57:07 +00:00
new restore technique
This commit is contained in:
parent
64b8867183
commit
01addf7215
75
dist/restore/index.js
vendored
75
dist/restore/index.js
vendored
|
@ -86712,7 +86712,7 @@ class CacheConfig {
|
|||
/** All the paths we want to cache */
|
||||
this.cachePaths = [];
|
||||
/** All the paths we want to cache for incremental builds */
|
||||
this.incrementalPaths = [];
|
||||
// public incrementalPaths: Array<string> = [];
|
||||
/** The primary cache key */
|
||||
this.cacheKey = "";
|
||||
/** The primary cache key for incremental builds */
|
||||
|
@ -86926,11 +86926,8 @@ class CacheConfig {
|
|||
const branchName = lib_core.getInput("incremental-key") || "-shared";
|
||||
const incrementalKey = key + `-incremental--` + branchName;
|
||||
self.incrementalKey = incrementalKey;
|
||||
if (cacheTargets === "true") {
|
||||
for (const target of self.workspaces.map((ws) => ws.target)) {
|
||||
self.incrementalPaths.push(external_path_default().join(target, "incremental"));
|
||||
}
|
||||
}
|
||||
// Add the incremental cache to the cachePaths so we can restore it
|
||||
self.cachePaths.push(external_path_default().join(config_CARGO_HOME, "incremental-restore.json"));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
@ -87334,59 +87331,13 @@ async function rmRF(dirName) {
|
|||
await io.rmRF(dirName);
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/incremental.ts
|
||||
|
||||
// import * as io from "@actions/io";
|
||||
|
||||
|
||||
// import { CARGO_HOME } from "./config";
|
||||
|
||||
// import { Packages } from "./workspace";
|
||||
async function restoreIncremental(targetDir) {
|
||||
lib_core.debug(`restoring incremental directory "${targetDir}"`);
|
||||
let dir = await external_fs_default().promises.opendir(targetDir);
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isDirectory()) {
|
||||
let dirName = external_path_default().join(dir.path, dirent.name);
|
||||
// is it a profile dir, or a nested target dir?
|
||||
let isNestedTarget = (await utils_exists(external_path_default().join(dirName, "CACHEDIR.TAG"))) || (await utils_exists(external_path_default().join(dirName, ".rustc_info.json")));
|
||||
try {
|
||||
if (isNestedTarget) {
|
||||
await restoreIncremental(dirName);
|
||||
}
|
||||
else {
|
||||
await restoreIncrementalProfile(dirName);
|
||||
}
|
||||
restoreIncrementalProfile;
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
}
|
||||
}
|
||||
async function restoreIncrementalProfile(dirName) {
|
||||
lib_core.debug(`restoring incremental profile directory "${dirName}"`);
|
||||
const incrementalJson = external_path_default().join(dirName, "incremental-restore.json");
|
||||
if (await utils_exists(incrementalJson)) {
|
||||
const contents = await external_fs_default().promises.readFile(incrementalJson, "utf8");
|
||||
const { modifiedTimes } = JSON.parse(contents);
|
||||
lib_core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`);
|
||||
// Write the mtimes to all the files in the profile directory
|
||||
for (const fileName of Object.keys(modifiedTimes)) {
|
||||
const mtime = modifiedTimes[fileName];
|
||||
const filePath = external_path_default().join(dirName, fileName);
|
||||
await external_fs_default().promises.utimes(filePath, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
}
|
||||
else {
|
||||
lib_core.debug(`incremental-restore.json not found for ${dirName}`);
|
||||
}
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/restore.ts
|
||||
|
||||
|
||||
|
||||
|
||||
// import { saveMtimes } from "./incremental";
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
lib_core.error(e.message);
|
||||
|
@ -87424,14 +87375,20 @@ async function run() {
|
|||
if (restoreKey) {
|
||||
const match = restoreKey === key;
|
||||
lib_core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
||||
// Restore the incremental-restore.json file and write the mtimes to all the files in the list
|
||||
if (config.incremental) {
|
||||
const incrementalKey = await cacheProvider.cache.restoreCache(config.incrementalPaths.slice(), config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||
lib_core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||
if (incrementalKey) {
|
||||
for (const workspace of config.workspaces) {
|
||||
await restoreIncremental(workspace.target);
|
||||
try {
|
||||
const restoreJson = external_path_default().join(config_CARGO_HOME, "incremental-restore.json");
|
||||
const restoreString = await external_fs_default().promises.readFile(restoreJson, "utf8");
|
||||
const restoreData = JSON.parse(restoreString);
|
||||
for (const [file, mtime] of Object.entries(restoreData)) {
|
||||
await external_fs_default().promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
}
|
||||
catch (err) {
|
||||
lib_core.debug(`Could not restore incremental cache - ${err}`);
|
||||
lib_core.debug(`${err.stack}`);
|
||||
}
|
||||
}
|
||||
if (!match) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
|
|
105
dist/save/index.js
vendored
105
dist/save/index.js
vendored
|
@ -86712,7 +86712,7 @@ class CacheConfig {
|
|||
/** All the paths we want to cache */
|
||||
this.cachePaths = [];
|
||||
/** All the paths we want to cache for incremental builds */
|
||||
this.incrementalPaths = [];
|
||||
// public incrementalPaths: Array<string> = [];
|
||||
/** The primary cache key */
|
||||
this.cacheKey = "";
|
||||
/** The primary cache key for incremental builds */
|
||||
|
@ -86926,11 +86926,8 @@ class CacheConfig {
|
|||
const branchName = core.getInput("incremental-key") || "-shared";
|
||||
const incrementalKey = key + `-incremental--` + branchName;
|
||||
self.incrementalKey = incrementalKey;
|
||||
if (cacheTargets === "true") {
|
||||
for (const target of self.workspaces.map((ws) => ws.target)) {
|
||||
self.incrementalPaths.push(external_path_default().join(target, "incremental"));
|
||||
}
|
||||
}
|
||||
// Add the incremental cache to the cachePaths so we can restore it
|
||||
self.cachePaths.push(external_path_default().join(CARGO_HOME, "incremental-restore.json"));
|
||||
}
|
||||
return self;
|
||||
}
|
||||
|
@ -87334,6 +87331,34 @@ async function rmRF(dirName) {
|
|||
await io.rmRF(dirName);
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/incremental.ts
|
||||
// import * as core from "@actions/core";
|
||||
// import * as io from "@actions/io";
|
||||
|
||||
|
||||
// import { CARGO_HOME } from "./config";
|
||||
// import { exists } from "./utils";
|
||||
// import { Packages } from "./workspace";
|
||||
async function saveMtimes(targetDirs) {
|
||||
let cache = new Map();
|
||||
let stack = targetDirs.slice();
|
||||
while (stack.length > 0) {
|
||||
const dirName = stack.pop();
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isDirectory()) {
|
||||
stack.push(external_path_default().join(dirName, dirent.name));
|
||||
}
|
||||
else {
|
||||
const fileName = external_path_default().join(dirName, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
cache.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
return cache;
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/save.ts
|
||||
|
||||
|
||||
|
@ -87343,6 +87368,7 @@ async function rmRF(dirName) {
|
|||
|
||||
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
if (e.stack) {
|
||||
|
@ -87371,12 +87397,16 @@ async function run() {
|
|||
if (config.incremental) {
|
||||
core.info(`... Saving incremental cache ...`);
|
||||
try {
|
||||
core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
|
||||
for (const paths of config.incrementalPaths) {
|
||||
await saveIncrementalDirs(paths);
|
||||
}
|
||||
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
||||
for (const path of config.incrementalPaths) {
|
||||
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||
const cache = await saveMtimes(targetDirs);
|
||||
const paths = Array.from(cache.keys());
|
||||
const saved = await cacheProvider.cache.saveCache(paths, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${paths}`);
|
||||
// write the incremental-restore.json file
|
||||
const serialized = JSON.stringify(cache);
|
||||
await external_fs_default().promises.writeFile(external_path_default().join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||
// Delete the incremental cache before proceeding
|
||||
for (const [path, _mtime] of cache) {
|
||||
core.debug(` deleting ${path}`);
|
||||
await (0,promises_.rm)(path);
|
||||
}
|
||||
|
@ -87392,7 +87422,7 @@ async function run() {
|
|||
allPackages.push(...packages);
|
||||
try {
|
||||
core.info(`... Cleaning ${workspace.target} ...`);
|
||||
await cleanTargetDir(workspace.target, packages, false);
|
||||
await cleanTargetDir(workspace.target, packages);
|
||||
}
|
||||
catch (e) {
|
||||
core.debug(`${e.stack}`);
|
||||
|
@ -87442,31 +87472,30 @@ async function macOsWorkaround() {
|
|||
}
|
||||
catch { }
|
||||
}
|
||||
async function saveIncrementalDirs(incrementalDir) {
|
||||
// Traverse the incremental folder recursively and collect the modified times in a map
|
||||
const modifiedTimes = new Map();
|
||||
const fillModifiedTimes = async (dir) => {
|
||||
const dirEntries = await external_fs_default().promises.opendir(dir);
|
||||
for await (const dirent of dirEntries) {
|
||||
if (dirent.isDirectory()) {
|
||||
await fillModifiedTimes(external_path_default().join(dir, dirent.name));
|
||||
}
|
||||
else {
|
||||
const fileName = external_path_default().join(dir, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
modifiedTimes.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
};
|
||||
await fillModifiedTimes(incrementalDir);
|
||||
// Write the modified times to the incremental folder
|
||||
core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||
for (const file of modifiedTimes.keys()) {
|
||||
core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||
}
|
||||
const contents = JSON.stringify({ modifiedTimes });
|
||||
await external_fs_default().promises.writeFile(external_path_default().join(incrementalDir, "incremental-restore.json"), contents);
|
||||
}
|
||||
// async function saveIncrementalDirs(incrementalDir: string) {
|
||||
// // Traverse the incremental folder recursively and collect the modified times in a map
|
||||
// const modifiedTimes = new Map<string, number>();
|
||||
// const fillModifiedTimes = async (dir: string) => {
|
||||
// const dirEntries = await fs.promises.opendir(dir);
|
||||
// for await (const dirent of dirEntries) {
|
||||
// if (dirent.isDirectory()) {
|
||||
// await fillModifiedTimes(path.join(dir, dirent.name));
|
||||
// } else {
|
||||
// const fileName = path.join(dir, dirent.name);
|
||||
// const { mtime } = await fs.promises.stat(fileName);
|
||||
// modifiedTimes.set(fileName, mtime.getTime());
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
// await fillModifiedTimes(incrementalDir);
|
||||
// // Write the modified times to the incremental folder
|
||||
// core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||
// for (const file of modifiedTimes.keys()) {
|
||||
// core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||
// }
|
||||
// const contents = JSON.stringify({ modifiedTimes });
|
||||
// await fs.promises.writeFile(path.join(incrementalDir, "incremental-restore.json"), contents);
|
||||
// }
|
||||
|
||||
})();
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ export class CacheConfig {
|
|||
public cachePaths: Array<string> = [];
|
||||
|
||||
/** All the paths we want to cache for incremental builds */
|
||||
public incrementalPaths: Array<string> = [];
|
||||
// public incrementalPaths: Array<string> = [];
|
||||
|
||||
/** The primary cache key */
|
||||
public cacheKey = "";
|
||||
|
@ -292,11 +292,8 @@ export class CacheConfig {
|
|||
const incrementalKey = key + `-incremental--` + branchName;
|
||||
self.incrementalKey = incrementalKey;
|
||||
|
||||
if (cacheTargets === "true") {
|
||||
for (const target of self.workspaces.map((ws) => ws.target)) {
|
||||
self.incrementalPaths.push(path.join(target, "incremental"));
|
||||
}
|
||||
}
|
||||
// Add the incremental cache to the cachePaths so we can restore it
|
||||
self.cachePaths.push(path.join(CARGO_HOME, "incremental-restore.json"));
|
||||
}
|
||||
|
||||
return self;
|
||||
|
|
|
@ -1,50 +1,30 @@
|
|||
import * as core from "@actions/core";
|
||||
// import * as core from "@actions/core";
|
||||
// import * as io from "@actions/io";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
|
||||
// import { CARGO_HOME } from "./config";
|
||||
import { exists } from "./utils";
|
||||
// import { exists } from "./utils";
|
||||
// import { Packages } from "./workspace";
|
||||
|
||||
export async function restoreIncremental(targetDir: string) {
|
||||
core.debug(`restoring incremental directory "${targetDir}"`);
|
||||
export async function saveMtimes(targetDirs: string[]): Promise<Map<string, number>> {
|
||||
let cache = new Map<string, number>();
|
||||
let stack = targetDirs.slice();
|
||||
|
||||
let dir = await fs.promises.opendir(targetDir);
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isDirectory()) {
|
||||
let dirName = path.join(dir.path, dirent.name);
|
||||
// is it a profile dir, or a nested target dir?
|
||||
let isNestedTarget =
|
||||
(await exists(path.join(dirName, "CACHEDIR.TAG"))) || (await exists(path.join(dirName, ".rustc_info.json")));
|
||||
while (stack.length > 0) {
|
||||
const dirName = stack.pop()!;
|
||||
const dir = await fs.promises.opendir(dirName);
|
||||
|
||||
try {
|
||||
if (isNestedTarget) {
|
||||
await restoreIncremental(dirName);
|
||||
} else {
|
||||
await restoreIncrementalProfile(dirName);
|
||||
} restoreIncrementalProfile
|
||||
} catch { }
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isDirectory()) {
|
||||
stack.push(path.join(dirName, dirent.name));
|
||||
} else {
|
||||
const fileName = path.join(dirName, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
cache.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function restoreIncrementalProfile(dirName: string) {
|
||||
core.debug(`restoring incremental profile directory "${dirName}"`);
|
||||
const incrementalJson = path.join(dirName, "incremental-restore.json");
|
||||
if (await exists(incrementalJson)) {
|
||||
const contents = await fs.promises.readFile(incrementalJson, "utf8");
|
||||
const { modifiedTimes } = JSON.parse(contents);
|
||||
|
||||
core.debug(`restoring incremental profile directory "${dirName}" with ${modifiedTimes} files`);
|
||||
|
||||
// Write the mtimes to all the files in the profile directory
|
||||
for (const fileName of Object.keys(modifiedTimes)) {
|
||||
const mtime = modifiedTimes[fileName];
|
||||
const filePath = path.join(dirName, fileName);
|
||||
await fs.promises.utimes(filePath, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
} else {
|
||||
core.debug(`incremental-restore.json not found for ${dirName}`);
|
||||
}
|
||||
|
||||
return cache;
|
||||
}
|
||||
|
|
|
@ -1,9 +1,11 @@
|
|||
import * as core from "@actions/core";
|
||||
|
||||
import { cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig } from "./config";
|
||||
import { CacheConfig, CARGO_HOME } from "./config";
|
||||
import { getCacheProvider, reportError } from "./utils";
|
||||
import { restoreIncremental } from "./incremental";
|
||||
// import { saveMtimes } from "./incremental";
|
||||
import path from "path";
|
||||
import fs from "fs";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
|
@ -49,14 +51,18 @@ async function run() {
|
|||
const match = restoreKey === key;
|
||||
core.info(`${lookupOnly ? "Found" : "Restored from"} cache key "${restoreKey}" full match: ${match}.`);
|
||||
|
||||
// Restore the incremental-restore.json file and write the mtimes to all the files in the list
|
||||
if (config.incremental) {
|
||||
const incrementalKey = await cacheProvider.cache.restoreCache(config.incrementalPaths.slice(), config.incrementalKey, [config.restoreKey], { lookupOnly });
|
||||
core.debug(`restoring incremental builds from ${incrementalKey}`);
|
||||
|
||||
if (incrementalKey) {
|
||||
for (const workspace of config.workspaces) {
|
||||
await restoreIncremental(workspace.target);
|
||||
try {
|
||||
const restoreJson = path.join(CARGO_HOME, "incremental-restore.json")
|
||||
const restoreString = await fs.promises.readFile(restoreJson, "utf8");
|
||||
const restoreData: Map<String, number> = JSON.parse(restoreString);
|
||||
for (const [file, mtime] of Object.entries(restoreData)) {
|
||||
await fs.promises.utimes(file, new Date(mtime), new Date(mtime));
|
||||
}
|
||||
} catch (err) {
|
||||
core.debug(`Could not restore incremental cache - ${err}`);
|
||||
core.debug(`${(err as any).stack}`);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
72
src/save.ts
72
src/save.ts
|
@ -2,11 +2,12 @@ import * as core from "@actions/core";
|
|||
import * as exec from "@actions/exec";
|
||||
|
||||
import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup";
|
||||
import { CacheConfig, isCacheUpToDate } from "./config";
|
||||
import { CacheConfig, CARGO_HOME, isCacheUpToDate } from "./config";
|
||||
import { getCacheProvider, reportError } from "./utils";
|
||||
import { rm } from "fs/promises";
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import { saveMtimes } from "./incremental";
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.error(e.message);
|
||||
|
@ -43,12 +44,18 @@ async function run() {
|
|||
if (config.incremental) {
|
||||
core.info(`... Saving incremental cache ...`);
|
||||
try {
|
||||
core.debug(`paths include ${config.incrementalPaths} with key ${config.incrementalKey}`);
|
||||
for (const paths of config.incrementalPaths) {
|
||||
await saveIncrementalDirs(paths);
|
||||
}
|
||||
await cacheProvider.cache.saveCache(config.incrementalPaths.slice(), config.incrementalKey);
|
||||
for (const path of config.incrementalPaths) {
|
||||
const targetDirs = config.workspaces.map((ws) => ws.target);
|
||||
const cache = await saveMtimes(targetDirs);
|
||||
const paths = Array.from(cache.keys());
|
||||
const saved = await cacheProvider.cache.saveCache(paths, config.incrementalKey);
|
||||
core.debug(`saved incremental cache with key ${saved} with contents ${paths}`);
|
||||
|
||||
// write the incremental-restore.json file
|
||||
const serialized = JSON.stringify(cache);
|
||||
await fs.promises.writeFile(path.join(CARGO_HOME, "incremental-restore.json"), serialized);
|
||||
|
||||
// Delete the incremental cache before proceeding
|
||||
for (const [path, _mtime] of cache) {
|
||||
core.debug(` deleting ${path}`);
|
||||
await rm(path);
|
||||
}
|
||||
|
@ -64,7 +71,7 @@ async function run() {
|
|||
allPackages.push(...packages);
|
||||
try {
|
||||
core.info(`... Cleaning ${workspace.target} ...`);
|
||||
await cleanTargetDir(workspace.target, packages, false);
|
||||
await cleanTargetDir(workspace.target, packages);
|
||||
} catch (e) {
|
||||
core.debug(`${(e as any).stack}`);
|
||||
}
|
||||
|
@ -99,7 +106,6 @@ async function run() {
|
|||
// https://github.com/actions/toolkit/pull/1378
|
||||
// TODO: remove this once the underlying bug is fixed.
|
||||
await cacheProvider.cache.saveCache(config.cachePaths.slice(), config.cacheKey);
|
||||
|
||||
} catch (e) {
|
||||
reportError(e);
|
||||
}
|
||||
|
@ -117,29 +123,29 @@ async function macOsWorkaround() {
|
|||
}
|
||||
|
||||
|
||||
async function saveIncrementalDirs(incrementalDir: string) {
|
||||
// Traverse the incremental folder recursively and collect the modified times in a map
|
||||
const modifiedTimes = new Map<string, number>();
|
||||
const fillModifiedTimes = async (dir: string) => {
|
||||
const dirEntries = await fs.promises.opendir(dir);
|
||||
for await (const dirent of dirEntries) {
|
||||
if (dirent.isDirectory()) {
|
||||
await fillModifiedTimes(path.join(dir, dirent.name));
|
||||
} else {
|
||||
const fileName = path.join(dir, dirent.name);
|
||||
const { mtime } = await fs.promises.stat(fileName);
|
||||
modifiedTimes.set(fileName, mtime.getTime());
|
||||
}
|
||||
}
|
||||
};
|
||||
await fillModifiedTimes(incrementalDir);
|
||||
// async function saveIncrementalDirs(incrementalDir: string) {
|
||||
// // Traverse the incremental folder recursively and collect the modified times in a map
|
||||
// const modifiedTimes = new Map<string, number>();
|
||||
// const fillModifiedTimes = async (dir: string) => {
|
||||
// const dirEntries = await fs.promises.opendir(dir);
|
||||
// for await (const dirent of dirEntries) {
|
||||
// if (dirent.isDirectory()) {
|
||||
// await fillModifiedTimes(path.join(dir, dirent.name));
|
||||
// } else {
|
||||
// const fileName = path.join(dir, dirent.name);
|
||||
// const { mtime } = await fs.promises.stat(fileName);
|
||||
// modifiedTimes.set(fileName, mtime.getTime());
|
||||
// }
|
||||
// }
|
||||
// };
|
||||
// await fillModifiedTimes(incrementalDir);
|
||||
|
||||
// Write the modified times to the incremental folder
|
||||
core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||
for (const file of modifiedTimes.keys()) {
|
||||
core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||
}
|
||||
const contents = JSON.stringify({ modifiedTimes });
|
||||
await fs.promises.writeFile(path.join(incrementalDir, "incremental-restore.json"), contents);
|
||||
// // Write the modified times to the incremental folder
|
||||
// core.debug(`writing incremental-restore.json for ${incrementalDir} files`);
|
||||
// for (const file of modifiedTimes.keys()) {
|
||||
// core.debug(` ${file} -> ${modifiedTimes.get(file)}`);
|
||||
// }
|
||||
// const contents = JSON.stringify({ modifiedTimes });
|
||||
// await fs.promises.writeFile(path.join(incrementalDir, "incremental-restore.json"), contents);
|
||||
|
||||
}
|
||||
// }
|
||||
|
|
Loading…
Reference in a new issue