mirror of
https://github.com/Swatinem/rust-cache
synced 2025-04-18 09:49:02 +00:00
Reset dist files
This commit is contained in:
parent
a5df05bb52
commit
ee6bad3219
460
dist/restore/index.js
vendored
460
dist/restore/index.js
vendored
|
@ -59794,242 +59794,234 @@ var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_);
|
|||
var external_path_ = __nccwpck_require__(5622);
|
||||
var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_);
|
||||
;// CONCATENATED MODULE: ./src/common.ts
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
}
|
||||
});
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
const stateBins = "RUST_CACHE_BINS";
|
||||
const stateKey = "RUST_CACHE_KEY";
|
||||
const stateHash = "RUST_CACHE_HASH";
|
||||
const home = external_os_default().homedir();
|
||||
const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo");
|
||||
const paths = {
|
||||
cargoHome,
|
||||
index: external_path_default().join(cargoHome, "registry/index"),
|
||||
cache: external_path_default().join(cargoHome, "registry/cache"),
|
||||
git: external_path_default().join(cargoHome, "git"),
|
||||
target: targetDir,
|
||||
};
|
||||
const RefKey = "GITHUB_REF";
|
||||
function isValidEvent() {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
async function getCacheConfig() {
|
||||
let lockHash = core.getState(stateHash);
|
||||
if (!lockHash) {
|
||||
lockHash = await getLockfileHash();
|
||||
core.saveState(stateHash, lockHash);
|
||||
}
|
||||
let key = `v0-rust-`;
|
||||
const sharedKey = core.getInput("sharedKey");
|
||||
if (sharedKey) {
|
||||
key += `${sharedKey}-`;
|
||||
}
|
||||
else {
|
||||
const inputKey = core.getInput("key");
|
||||
if (inputKey) {
|
||||
key += `${inputKey}-`;
|
||||
}
|
||||
const job = process.env.GITHUB_JOB;
|
||||
if (job) {
|
||||
key += `${job}-`;
|
||||
}
|
||||
}
|
||||
key += await getRustKey();
|
||||
return {
|
||||
paths: [
|
||||
external_path_default().join(cargoHome, "bin"),
|
||||
external_path_default().join(cargoHome, ".crates2.json"),
|
||||
external_path_default().join(cargoHome, ".crates.toml"),
|
||||
paths.git,
|
||||
paths.cache,
|
||||
paths.index,
|
||||
paths.target,
|
||||
],
|
||||
key: `${key}-${lockHash}`,
|
||||
restoreKeys: [key],
|
||||
};
|
||||
}
|
||||
async function getCargoBins() {
|
||||
try {
|
||||
const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(paths.cargoHome, ".crates2.json"), "utf8"));
|
||||
const bins = new Set();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
async function getRustKey() {
|
||||
const rustc = await getRustVersion();
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
let splits = stdout
|
||||
.split(/[\n\r]+/)
|
||||
.filter(Boolean)
|
||||
.map((s) => s.split(":").map((s) => s.trim()))
|
||||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
let stdout = "";
|
||||
await exec.exec(cmd, args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout(data) {
|
||||
stdout += data.toString();
|
||||
},
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
async function getLockfileHash() {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
const hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of files) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
return hasher.digest("hex").slice(0, 20);
|
||||
}
|
||||
async function getPackages() {
|
||||
const cwd = process.cwd();
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
||||
return meta.packages
|
||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
||||
.map((p) => {
|
||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
||||
return { name: p.name, version: p.version, targets, path: external_path_default().dirname(p.manifest_path) };
|
||||
});
|
||||
}
|
||||
async function cleanTarget(packages) {
|
||||
await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/examples"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/incremental"));
|
||||
let dir;
|
||||
// remove all *files* from debug
|
||||
dir = await external_fs_default().promises.opendir(external_path_default().join(targetDir, "./debug"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/build"), keepPkg);
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/.fingerprint"), keepPkg);
|
||||
const keepDeps = new Set(packages.flatMap((p) => {
|
||||
const names = [];
|
||||
for (const n of [p.name, ...p.targets]) {
|
||||
const name = n.replace(/-/g, "_");
|
||||
names.push(name, `lib${name}`);
|
||||
}
|
||||
return names;
|
||||
}));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/deps"), keepDeps);
|
||||
}
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
async function rmExcept(dirName, keepPrefix) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
// we don’t really know
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function rm(parent, dirent) {
|
||||
try {
|
||||
const fileName = external_path_default().join(parent, dirent.name);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await external_fs_default().promises.unlink(fileName);
|
||||
}
|
||||
else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
});
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
const stateBins = "RUST_CACHE_BINS";
|
||||
const stateKey = "RUST_CACHE_KEY";
|
||||
const stateHash = "RUST_CACHE_HASH";
|
||||
const home = external_os_default().homedir();
|
||||
const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo");
|
||||
const paths = {
|
||||
cargoHome,
|
||||
index: external_path_default().join(cargoHome, "registry/index"),
|
||||
cache: external_path_default().join(cargoHome, "registry/cache"),
|
||||
git: external_path_default().join(cargoHome, "git"),
|
||||
target: targetDir,
|
||||
};
|
||||
const RefKey = "GITHUB_REF";
|
||||
function isValidEvent() {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
async function getCacheConfig() {
|
||||
let lockHash = core.getState(stateHash);
|
||||
if (!lockHash) {
|
||||
lockHash = await getLockfileHash();
|
||||
core.saveState(stateHash, lockHash);
|
||||
}
|
||||
let key = `v0-rust-`;
|
||||
const sharedKey = core.getInput("sharedKey");
|
||||
if (sharedKey) {
|
||||
key += `${sharedKey}-`;
|
||||
}
|
||||
else {
|
||||
const inputKey = core.getInput("key");
|
||||
if (inputKey) {
|
||||
key += `${inputKey}-`;
|
||||
}
|
||||
const job = process.env.GITHUB_JOB;
|
||||
if (job) {
|
||||
key += `${job}-`;
|
||||
}
|
||||
}
|
||||
key += await getRustKey();
|
||||
return {
|
||||
paths: [
|
||||
external_path_default().join(cargoHome, "bin"),
|
||||
external_path_default().join(cargoHome, ".crates2.json"),
|
||||
external_path_default().join(cargoHome, ".crates.toml"),
|
||||
paths.git,
|
||||
paths.cache,
|
||||
paths.index,
|
||||
paths.target,
|
||||
],
|
||||
key: `${key}-${lockHash}`,
|
||||
restoreKeys: [key],
|
||||
};
|
||||
}
|
||||
async function getCargoBins() {
|
||||
try {
|
||||
const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(paths.cargoHome, ".crates2.json"), "utf8"));
|
||||
const bins = new Set();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
async function getRustKey() {
|
||||
const rustc = await getRustVersion();
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
let splits = stdout
|
||||
.split(/[\n\r]+/)
|
||||
.filter(Boolean)
|
||||
.map((s) => s.split(":").map((s) => s.trim()))
|
||||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
let stdout = "";
|
||||
await exec.exec(cmd, args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout(data) {
|
||||
stdout += data.toString();
|
||||
},
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
async function getLockfileHash() {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
const hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of files) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
return hasher.digest("hex").slice(0, 20);
|
||||
}
|
||||
async function getPackages() {
|
||||
const cwd = process.cwd();
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
||||
return meta.packages
|
||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
||||
.map((p) => {
|
||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
||||
return { name: p.name, version: p.version, targets, path: external_path_default().dirname(p.manifest_path) };
|
||||
});
|
||||
}
|
||||
async function cleanTarget(packages) {
|
||||
await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/examples"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/incremental"));
|
||||
let dir;
|
||||
// remove all *files* from debug
|
||||
dir = await external_fs_default().promises.opendir(external_path_default().join(targetDir, "./debug"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/build"), keepPkg);
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/.fingerprint"), keepPkg);
|
||||
const keepDeps = new Set(packages.flatMap((p) => {
|
||||
const names = [];
|
||||
for (const n of [p.name, ...p.targets]) {
|
||||
const name = n.replace(/-/g, "_");
|
||||
names.push(name, `lib${name}`);
|
||||
}
|
||||
return names;
|
||||
}));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/deps"), keepDeps);
|
||||
}
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
async function rmExcept(dirName, keepPrefix) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
// we don’t really know
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function rm(parent, dirent) {
|
||||
try {
|
||||
const fileName = external_path_default().join(parent, dirent.name);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await external_fs_default().promises.unlink(fileName);
|
||||
}
|
||||
else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/restore.ts
|
||||
|
||||
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
var cacheOnFailure = core.getInput("cache-on-failure").toLowerCase();
|
||||
if (cacheOnFailure !== "true") {
|
||||
cacheOnFailure = "false";
|
||||
}
|
||||
core.exportVariable("CACHE_ON_FAILURE", cacheOnFailure);
|
||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
const { paths, key, restoreKeys } = await getCacheConfig();
|
||||
const bins = await getCargoBins();
|
||||
core.saveState(stateBins, JSON.stringify([...bins]));
|
||||
core.info(`Restoring paths:\n ${paths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
|
||||
const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored from cache key "${restoreKey}".`);
|
||||
core.saveState(stateKey, restoreKey);
|
||||
if (restoreKey !== key) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
const packages = await getPackages();
|
||||
await cleanTarget(packages);
|
||||
}
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
}
|
||||
else {
|
||||
core.info("No cache found.");
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
core.info(`[warning] ${e.message}`);
|
||||
}
|
||||
}
|
||||
function setCacheHitOutput(cacheHit) {
|
||||
core.setOutput("cache-hit", cacheHit.toString());
|
||||
}
|
||||
run();
|
||||
|
||||
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
core.exportVariable("CARGO_INCREMENTAL", 0);
|
||||
const { paths, key, restoreKeys } = await getCacheConfig();
|
||||
const bins = await getCargoBins();
|
||||
core.saveState(stateBins, JSON.stringify([...bins]));
|
||||
core.info(`Restoring paths:\n ${paths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using keys:\n ${[key, ...restoreKeys].join("\n ")}`);
|
||||
const restoreKey = await cache.restoreCache(paths, key, restoreKeys);
|
||||
if (restoreKey) {
|
||||
core.info(`Restored from cache key "${restoreKey}".`);
|
||||
core.saveState(stateKey, restoreKey);
|
||||
if (restoreKey !== key) {
|
||||
// pre-clean the target directory on cache mismatch
|
||||
const packages = await getPackages();
|
||||
await cleanTarget(packages);
|
||||
}
|
||||
setCacheHitOutput(restoreKey === key);
|
||||
}
|
||||
else {
|
||||
core.info("No cache found.");
|
||||
setCacheHitOutput(false);
|
||||
}
|
||||
}
|
||||
catch (e) {
|
||||
setCacheHitOutput(false);
|
||||
core.info(`[warning] ${e.message}`);
|
||||
}
|
||||
}
|
||||
function setCacheHitOutput(cacheHit) {
|
||||
core.setOutput("cache-hit", cacheHit.toString());
|
||||
}
|
||||
run();
|
||||
|
||||
})();
|
||||
|
||||
|
|
643
dist/save/index.js
vendored
643
dist/save/index.js
vendored
|
@ -59794,331 +59794,328 @@ var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto
|
|||
var external_os_ = __nccwpck_require__(2087);
|
||||
var external_os_default = /*#__PURE__*/__nccwpck_require__.n(external_os_);
|
||||
;// CONCATENATED MODULE: ./src/common.ts
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
if (e.stack) {
|
||||
core.info(e.stack);
|
||||
}
|
||||
});
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
const stateBins = "RUST_CACHE_BINS";
|
||||
const stateKey = "RUST_CACHE_KEY";
|
||||
const stateHash = "RUST_CACHE_HASH";
|
||||
const home = external_os_default().homedir();
|
||||
const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo");
|
||||
const paths = {
|
||||
cargoHome,
|
||||
index: external_path_default().join(cargoHome, "registry/index"),
|
||||
cache: external_path_default().join(cargoHome, "registry/cache"),
|
||||
git: external_path_default().join(cargoHome, "git"),
|
||||
target: targetDir,
|
||||
};
|
||||
const RefKey = "GITHUB_REF";
|
||||
function isValidEvent() {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
async function getCacheConfig() {
|
||||
let lockHash = core.getState(stateHash);
|
||||
if (!lockHash) {
|
||||
lockHash = await getLockfileHash();
|
||||
core.saveState(stateHash, lockHash);
|
||||
}
|
||||
let key = `v0-rust-`;
|
||||
const sharedKey = core.getInput("sharedKey");
|
||||
if (sharedKey) {
|
||||
key += `${sharedKey}-`;
|
||||
}
|
||||
else {
|
||||
const inputKey = core.getInput("key");
|
||||
if (inputKey) {
|
||||
key += `${inputKey}-`;
|
||||
}
|
||||
const job = process.env.GITHUB_JOB;
|
||||
if (job) {
|
||||
key += `${job}-`;
|
||||
}
|
||||
}
|
||||
key += await getRustKey();
|
||||
return {
|
||||
paths: [
|
||||
external_path_default().join(cargoHome, "bin"),
|
||||
external_path_default().join(cargoHome, ".crates2.json"),
|
||||
external_path_default().join(cargoHome, ".crates.toml"),
|
||||
paths.git,
|
||||
paths.cache,
|
||||
paths.index,
|
||||
paths.target,
|
||||
],
|
||||
key: `${key}-${lockHash}`,
|
||||
restoreKeys: [key],
|
||||
};
|
||||
}
|
||||
async function getCargoBins() {
|
||||
try {
|
||||
const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(paths.cargoHome, ".crates2.json"), "utf8"));
|
||||
const bins = new Set();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
async function getRustKey() {
|
||||
const rustc = await getRustVersion();
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
let splits = stdout
|
||||
.split(/[\n\r]+/)
|
||||
.filter(Boolean)
|
||||
.map((s) => s.split(":").map((s) => s.trim()))
|
||||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
let stdout = "";
|
||||
await exec.exec(cmd, args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout(data) {
|
||||
stdout += data.toString();
|
||||
},
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
async function getLockfileHash() {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
const hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of files) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
return hasher.digest("hex").slice(0, 20);
|
||||
}
|
||||
async function getPackages() {
|
||||
const cwd = process.cwd();
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
||||
return meta.packages
|
||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
||||
.map((p) => {
|
||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
||||
return { name: p.name, version: p.version, targets, path: external_path_default().dirname(p.manifest_path) };
|
||||
});
|
||||
}
|
||||
async function cleanTarget(packages) {
|
||||
await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/examples"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/incremental"));
|
||||
let dir;
|
||||
// remove all *files* from debug
|
||||
dir = await external_fs_default().promises.opendir(external_path_default().join(targetDir, "./debug"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/build"), keepPkg);
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/.fingerprint"), keepPkg);
|
||||
const keepDeps = new Set(packages.flatMap((p) => {
|
||||
const names = [];
|
||||
for (const n of [p.name, ...p.targets]) {
|
||||
const name = n.replace(/-/g, "_");
|
||||
names.push(name, `lib${name}`);
|
||||
}
|
||||
return names;
|
||||
}));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/deps"), keepDeps);
|
||||
}
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
async function rmExcept(dirName, keepPrefix) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
// we don’t really know
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function rm(parent, dirent) {
|
||||
try {
|
||||
const fileName = external_path_default().join(parent, dirent.name);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await external_fs_default().promises.unlink(fileName);
|
||||
}
|
||||
else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
process.on("uncaughtException", (e) => {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
});
|
||||
const cwd = core.getInput("working-directory");
|
||||
// TODO: this could be read from .cargo config file directly
|
||||
const targetDir = core.getInput("target-dir") || "./target";
|
||||
if (cwd) {
|
||||
process.chdir(cwd);
|
||||
}
|
||||
const stateBins = "RUST_CACHE_BINS";
|
||||
const stateKey = "RUST_CACHE_KEY";
|
||||
const stateHash = "RUST_CACHE_HASH";
|
||||
const home = external_os_default().homedir();
|
||||
const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo");
|
||||
const paths = {
|
||||
cargoHome,
|
||||
index: external_path_default().join(cargoHome, "registry/index"),
|
||||
cache: external_path_default().join(cargoHome, "registry/cache"),
|
||||
git: external_path_default().join(cargoHome, "git"),
|
||||
target: targetDir,
|
||||
};
|
||||
const RefKey = "GITHUB_REF";
|
||||
function isValidEvent() {
|
||||
return RefKey in process.env && Boolean(process.env[RefKey]);
|
||||
}
|
||||
async function getCacheConfig() {
|
||||
let lockHash = core.getState(stateHash);
|
||||
if (!lockHash) {
|
||||
lockHash = await getLockfileHash();
|
||||
core.saveState(stateHash, lockHash);
|
||||
}
|
||||
let key = `v0-rust-`;
|
||||
const sharedKey = core.getInput("sharedKey");
|
||||
if (sharedKey) {
|
||||
key += `${sharedKey}-`;
|
||||
}
|
||||
else {
|
||||
const inputKey = core.getInput("key");
|
||||
if (inputKey) {
|
||||
key += `${inputKey}-`;
|
||||
}
|
||||
const job = process.env.GITHUB_JOB;
|
||||
if (job) {
|
||||
key += `${job}-`;
|
||||
}
|
||||
}
|
||||
key += await getRustKey();
|
||||
return {
|
||||
paths: [
|
||||
external_path_default().join(cargoHome, "bin"),
|
||||
external_path_default().join(cargoHome, ".crates2.json"),
|
||||
external_path_default().join(cargoHome, ".crates.toml"),
|
||||
paths.git,
|
||||
paths.cache,
|
||||
paths.index,
|
||||
paths.target,
|
||||
],
|
||||
key: `${key}-${lockHash}`,
|
||||
restoreKeys: [key],
|
||||
};
|
||||
}
|
||||
async function getCargoBins() {
|
||||
try {
|
||||
const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(paths.cargoHome, ".crates2.json"), "utf8"));
|
||||
const bins = new Set();
|
||||
for (const pkg of Object.values(installs)) {
|
||||
for (const bin of pkg.bins) {
|
||||
bins.add(bin);
|
||||
}
|
||||
}
|
||||
return bins;
|
||||
}
|
||||
catch {
|
||||
return new Set();
|
||||
}
|
||||
}
|
||||
async function getRustKey() {
|
||||
const rustc = await getRustVersion();
|
||||
return `${rustc.release}-${rustc.host}-${rustc["commit-hash"].slice(0, 12)}`;
|
||||
}
|
||||
async function getRustVersion() {
|
||||
const stdout = await getCmdOutput("rustc", ["-vV"]);
|
||||
let splits = stdout
|
||||
.split(/[\n\r]+/)
|
||||
.filter(Boolean)
|
||||
.map((s) => s.split(":").map((s) => s.trim()))
|
||||
.filter((s) => s.length === 2);
|
||||
return Object.fromEntries(splits);
|
||||
}
|
||||
async function getCmdOutput(cmd, args = [], options = {}) {
|
||||
let stdout = "";
|
||||
await exec.exec(cmd, args, {
|
||||
silent: true,
|
||||
listeners: {
|
||||
stdout(data) {
|
||||
stdout += data.toString();
|
||||
},
|
||||
},
|
||||
...options,
|
||||
});
|
||||
return stdout;
|
||||
}
|
||||
async function getLockfileHash() {
|
||||
const globber = await glob.create("**/Cargo.toml\n**/Cargo.lock\nrust-toolchain\nrust-toolchain.toml", {
|
||||
followSymbolicLinks: false,
|
||||
});
|
||||
const files = await globber.glob();
|
||||
files.sort((a, b) => a.localeCompare(b));
|
||||
const hasher = external_crypto_default().createHash("sha1");
|
||||
for (const file of files) {
|
||||
for await (const chunk of external_fs_default().createReadStream(file)) {
|
||||
hasher.update(chunk);
|
||||
}
|
||||
}
|
||||
return hasher.digest("hex").slice(0, 20);
|
||||
}
|
||||
async function getPackages() {
|
||||
const cwd = process.cwd();
|
||||
const meta = JSON.parse(await getCmdOutput("cargo", ["metadata", "--all-features", "--format-version", "1"]));
|
||||
return meta.packages
|
||||
.filter((p) => !p.manifest_path.startsWith(cwd))
|
||||
.map((p) => {
|
||||
const targets = p.targets.filter((t) => t.kind[0] === "lib").map((t) => t.name);
|
||||
return { name: p.name, version: p.version, targets, path: external_path_default().dirname(p.manifest_path) };
|
||||
});
|
||||
}
|
||||
async function cleanTarget(packages) {
|
||||
await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/examples"));
|
||||
await io.rmRF(external_path_default().join(targetDir, "./debug/incremental"));
|
||||
let dir;
|
||||
// remove all *files* from debug
|
||||
dir = await external_fs_default().promises.opendir(external_path_default().join(targetDir, "./debug"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile()) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
const keepPkg = new Set(packages.map((p) => p.name));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/build"), keepPkg);
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/.fingerprint"), keepPkg);
|
||||
const keepDeps = new Set(packages.flatMap((p) => {
|
||||
const names = [];
|
||||
for (const n of [p.name, ...p.targets]) {
|
||||
const name = n.replace(/-/g, "_");
|
||||
names.push(name, `lib${name}`);
|
||||
}
|
||||
return names;
|
||||
}));
|
||||
await rmExcept(external_path_default().join(targetDir, "./debug/deps"), keepDeps);
|
||||
}
|
||||
const oneWeek = 7 * 24 * 3600 * 1000;
|
||||
async function rmExcept(dirName, keepPrefix) {
|
||||
const dir = await external_fs_default().promises.opendir(dirName);
|
||||
for await (const dirent of dir) {
|
||||
let name = dirent.name;
|
||||
const idx = name.lastIndexOf("-");
|
||||
if (idx !== -1) {
|
||||
name = name.slice(0, idx);
|
||||
}
|
||||
const fileName = external_path_default().join(dir.path, dirent.name);
|
||||
const { mtime } = await external_fs_default().promises.stat(fileName);
|
||||
// we don’t really know
|
||||
if (!keepPrefix.has(name) || Date.now() - mtime.getTime() > oneWeek) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function rm(parent, dirent) {
|
||||
try {
|
||||
const fileName = external_path_default().join(parent, dirent.name);
|
||||
core.debug(`deleting "${fileName}"`);
|
||||
if (dirent.isFile()) {
|
||||
await external_fs_default().promises.unlink(fileName);
|
||||
}
|
||||
else if (dirent.isDirectory()) {
|
||||
await io.rmRF(fileName);
|
||||
}
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
;// CONCATENATED MODULE: ./src/save.ts
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
const { paths: savePaths, key } = await getCacheConfig();
|
||||
if (core.getState(stateKey) === key) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
const registryName = await getRegistryName();
|
||||
const packages = await getPackages();
|
||||
try {
|
||||
await cleanRegistry(registryName, packages);
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanBin();
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanGit(packages);
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanTarget(packages);
|
||||
}
|
||||
catch { }
|
||||
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using key:\n ${key}`);
|
||||
await cache.saveCache(savePaths, key);
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
}
|
||||
}
|
||||
run();
|
||||
async function getRegistryName() {
|
||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
||||
const files = await globber.glob();
|
||||
if (files.length > 1) {
|
||||
core.warning(`got multiple registries: "${files.join('", "')}"`);
|
||||
}
|
||||
const first = files.shift();
|
||||
return external_path_default().basename(external_path_default().dirname(first));
|
||||
}
|
||||
async function cleanBin() {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(stateBins));
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
}
|
||||
const dir = await external_fs_default().promises.opendir(external_path_default().join(paths.cargoHome, "bin"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !bins.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function cleanRegistry(registryName, packages) {
|
||||
await io.rmRF(external_path_default().join(paths.index, registryName, ".cache"));
|
||||
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
|
||||
const dir = await external_fs_default().promises.opendir(external_path_default().join(paths.cache, registryName));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function cleanGit(packages) {
|
||||
const coPath = external_path_default().join(paths.git, "checkouts");
|
||||
const dbPath = external_path_default().join(paths.git, "db");
|
||||
const repos = new Map();
|
||||
for (const p of packages) {
|
||||
if (!p.path.startsWith(coPath)) {
|
||||
continue;
|
||||
}
|
||||
const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep);
|
||||
const refs = repos.get(repo);
|
||||
if (refs) {
|
||||
refs.add(ref);
|
||||
}
|
||||
else {
|
||||
repos.set(repo, new Set([ref]));
|
||||
}
|
||||
}
|
||||
// we have to keep both the clone, and the checkout, removing either will
|
||||
// trigger a rebuild
|
||||
let dir;
|
||||
// clean the db
|
||||
dir = await external_fs_default().promises.opendir(dbPath);
|
||||
for await (const dirent of dir) {
|
||||
if (!repos.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
// clean the checkouts
|
||||
dir = await external_fs_default().promises.opendir(coPath);
|
||||
for await (const dirent of dir) {
|
||||
const refs = repos.get(dirent.name);
|
||||
if (!refs) {
|
||||
await rm(dir.path, dirent);
|
||||
continue;
|
||||
}
|
||||
if (!dirent.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name));
|
||||
for await (const dirent of refsDir) {
|
||||
if (!refs.has(dirent.name)) {
|
||||
await rm(refsDir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async function macOsWorkaround() {
|
||||
try {
|
||||
// Workaround for https://github.com/actions/cache/issues/403
|
||||
// Also see https://github.com/rust-lang/cargo/issues/8603
|
||||
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
async function run() {
|
||||
try {
|
||||
const { paths: savePaths, key } = await getCacheConfig();
|
||||
if (core.getState(stateKey) === key) {
|
||||
core.info(`Cache up-to-date.`);
|
||||
return;
|
||||
}
|
||||
// TODO: remove this once https://github.com/actions/toolkit/pull/553 lands
|
||||
await macOsWorkaround();
|
||||
const registryName = await getRegistryName();
|
||||
const packages = await getPackages();
|
||||
try {
|
||||
await cleanRegistry(registryName, packages);
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanBin();
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanGit(packages);
|
||||
}
|
||||
catch { }
|
||||
try {
|
||||
await cleanTarget(packages);
|
||||
}
|
||||
catch { }
|
||||
core.info(`Saving paths:\n ${savePaths.join("\n ")}`);
|
||||
core.info(`In directory:\n ${process.cwd()}`);
|
||||
core.info(`Using key:\n ${key}`);
|
||||
await cache.saveCache(savePaths, key);
|
||||
}
|
||||
catch (e) {
|
||||
core.info(`[warning] ${e.message}`);
|
||||
}
|
||||
}
|
||||
run();
|
||||
async function getRegistryName() {
|
||||
const globber = await glob.create(`${paths.index}/**/.last-updated`, { followSymbolicLinks: false });
|
||||
const files = await globber.glob();
|
||||
if (files.length > 1) {
|
||||
core.warning(`got multiple registries: "${files.join('", "')}"`);
|
||||
}
|
||||
const first = files.shift();
|
||||
return external_path_default().basename(external_path_default().dirname(first));
|
||||
}
|
||||
async function cleanBin() {
|
||||
const bins = await getCargoBins();
|
||||
const oldBins = JSON.parse(core.getState(stateBins));
|
||||
for (const bin of oldBins) {
|
||||
bins.delete(bin);
|
||||
}
|
||||
const dir = await external_fs_default().promises.opendir(external_path_default().join(paths.cargoHome, "bin"));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !bins.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function cleanRegistry(registryName, packages) {
|
||||
await io.rmRF(external_path_default().join(paths.index, registryName, ".cache"));
|
||||
const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`));
|
||||
const dir = await external_fs_default().promises.opendir(external_path_default().join(paths.cache, registryName));
|
||||
for await (const dirent of dir) {
|
||||
if (dirent.isFile() && !pkgSet.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
async function cleanGit(packages) {
|
||||
const coPath = external_path_default().join(paths.git, "checkouts");
|
||||
const dbPath = external_path_default().join(paths.git, "db");
|
||||
const repos = new Map();
|
||||
for (const p of packages) {
|
||||
if (!p.path.startsWith(coPath)) {
|
||||
continue;
|
||||
}
|
||||
const [repo, ref] = p.path.slice(coPath.length + 1).split((external_path_default()).sep);
|
||||
const refs = repos.get(repo);
|
||||
if (refs) {
|
||||
refs.add(ref);
|
||||
}
|
||||
else {
|
||||
repos.set(repo, new Set([ref]));
|
||||
}
|
||||
}
|
||||
// we have to keep both the clone, and the checkout, removing either will
|
||||
// trigger a rebuild
|
||||
let dir;
|
||||
// clean the db
|
||||
dir = await external_fs_default().promises.opendir(dbPath);
|
||||
for await (const dirent of dir) {
|
||||
if (!repos.has(dirent.name)) {
|
||||
await rm(dir.path, dirent);
|
||||
}
|
||||
}
|
||||
// clean the checkouts
|
||||
dir = await external_fs_default().promises.opendir(coPath);
|
||||
for await (const dirent of dir) {
|
||||
const refs = repos.get(dirent.name);
|
||||
if (!refs) {
|
||||
await rm(dir.path, dirent);
|
||||
continue;
|
||||
}
|
||||
if (!dirent.isDirectory()) {
|
||||
continue;
|
||||
}
|
||||
const refsDir = await external_fs_default().promises.opendir(external_path_default().join(dir.path, dirent.name));
|
||||
for await (const dirent of refsDir) {
|
||||
if (!refs.has(dirent.name)) {
|
||||
await rm(refsDir.path, dirent);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
async function macOsWorkaround() {
|
||||
try {
|
||||
// Workaround for https://github.com/actions/cache/issues/403
|
||||
// Also see https://github.com/rust-lang/cargo/issues/8603
|
||||
await exec.exec("sudo", ["/usr/sbin/purge"], { silent: true });
|
||||
}
|
||||
catch { }
|
||||
}
|
||||
|
||||
})();
|
||||
|
||||
|
|
Loading…
Reference in a new issue