diff --git a/TODO.md b/TODO.md new file mode 100644 index 0000000..c640d8b --- /dev/null +++ b/TODO.md @@ -0,0 +1,6 @@ +- Update readme with better docs ;-) +- better .cargo/bin handling: + - get a list of all the files on "pre"/"restore" + - move the files out of the way on "post"/"save" and move them back afterwards +- better .cargo/registry handling: + - rather implement better cleaning logic for the registry diff --git a/dist/restore/index.js b/dist/restore/index.js index cb97279..2f8ffa8 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -61667,6 +61667,8 @@ class Workspace { +const HOME = external_os_default().homedir(); +const config_CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; const config_STATE_BINS = "RUST_CACHE_BINS"; @@ -61679,14 +61681,6 @@ class CacheConfig { this.cacheKey = ""; /** The secondary (restore) key that only contains the prefix and environment */ this.restoreKey = ""; - /** The `~/.cargo` directory */ - this.cargoHome = ""; - /** The cargo registry index directory */ - this.cargoIndex = ""; - /** The cargo registry cache directory */ - this.cargoCache = ""; - /** The cargo git checkouts directory */ - this.cargoGit = ""; /** The workspace configurations */ this.workspaces = []; /** The prefix portion of the cache key */ @@ -61782,14 +61776,8 @@ class CacheConfig { self.keyFiles = keyFiles; key += `-${lockHash}`; self.cacheKey = key; - // Constructs some generic paths, workspace config and paths to restore: + // Constructs the workspace config and paths to restore: // The workspaces are given using a `$workspace -> $target` syntax. - const home = external_os_default().homedir(); - const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo"); - self.cargoHome = cargoHome; - self.cargoIndex = external_path_default().join(cargoHome, "registry/index"); - self.cargoCache = external_path_default().join(cargoHome, "registry/cache"); - self.cargoGit = external_path_default().join(cargoHome, "git"); const workspaces = []; const workspacesInput = lib_core.getInput("workspaces") || "."; for (const workspace of workspacesInput.trim().split("\n")) { @@ -61799,15 +61787,7 @@ class CacheConfig { workspaces.push(new Workspace(root, target)); } self.workspaces = workspaces; - self.cachePaths = [ - external_path_default().join(cargoHome, "bin"), - external_path_default().join(cargoHome, ".crates2.json"), - external_path_default().join(cargoHome, ".crates.toml"), - self.cargoIndex, - self.cargoCache, - self.cargoGit, - ...workspaces.map((ws) => ws.target), - ]; + self.cachePaths = [config_CARGO_HOME, ...workspaces.map((ws) => ws.target)]; return self; } printInfo() { @@ -61837,19 +61817,6 @@ class CacheConfig { } lib_core.endGroup(); } - async getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(this.cargoHome, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; - } } async function getRustVersion() { const stdout = await getCmdOutput("rustc", ["-vV"]); @@ -61890,7 +61857,6 @@ async function cleanTargetDir(targetDir, packages) { await rm(dir.path, dirent); } } - await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json")); } async function cleanProfileTarget(profileDir, packages) { await lib_io.rmRF(external_path_default().join(profileDir, "examples")); @@ -61916,32 +61882,67 @@ async function cleanProfileTarget(profileDir, packages) { })); await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps); } -async function cleanBin(config) { - const bins = await config.getCargoBins(); +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(config_CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); const oldBins = JSON.parse(core.getState(STATE_BINS)); for (const bin of oldBins) { bins.delete(bin); } - const dir = await fs.promises.opendir(path.join(config.cargoHome, "bin")); + const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); for await (const dirent of dir) { if (dirent.isFile() && !bins.has(dirent.name)) { await rm(dir.path, dirent); } } } -async function cleanRegistry(config, registryName, packages) { - await io.rmRF(path.join(config.cargoIndex, registryName, ".cache")); +async function cleanRegistry(packages) { + // `.cargo/registry/src` + const srcDir = path.join(CARGO_HOME, "registry", "src"); + await io.rmRF(srcDir); + // `.cargo/registry/index` + const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(indexDir.path, dirent.name)); + // TODO: check for `.git` etc, for now we just always remove the `.cache` + // and leave other stuff untouched. + await io.rmRF(path.join(dir.path, ".cache")); + } + } const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - const dir = await fs.promises.opendir(path.join(config.cargoCache, registryName)); - for await (const dirent of dir) { - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); + // `.cargo/registry/cache` + const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } } } } -async function cleanGit(config, packages) { - const coPath = path.join(config.cargoGit, "checkouts"); - const dbPath = path.join(config.cargoGit, "db"); +async function cleanGit(packages) { + const coPath = path.join(CARGO_HOME, "git", "checkouts"); + const dbPath = path.join(CARGO_HOME, "git", "db"); const repos = new Map(); for (const p of packages) { if (!p.path.startsWith(coPath)) { @@ -62051,7 +62052,7 @@ async function run() { const config = await CacheConfig["new"](); config.printInfo(); lib_core.info(""); - const bins = await config.getCargoBins(); + const bins = await getCargoBins(); lib_core.saveState(config_STATE_BINS, JSON.stringify([...bins])); lib_core.info(`... Restoring cache ...`); const key = config.cacheKey; diff --git a/dist/save/index.js b/dist/save/index.js index a0ec253..99c6aae 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -61587,16 +61587,16 @@ var cache = __nccwpck_require__(7799); var core = __nccwpck_require__(2186); // EXTERNAL MODULE: ./node_modules/@actions/exec/lib/exec.js var exec = __nccwpck_require__(1514); -// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js -var glob = __nccwpck_require__(8090); -// EXTERNAL MODULE: external "path" -var external_path_ = __nccwpck_require__(1017); -var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_); // EXTERNAL MODULE: ./node_modules/@actions/io/lib/io.js var io = __nccwpck_require__(7436); // EXTERNAL MODULE: external "fs" var external_fs_ = __nccwpck_require__(7147); var external_fs_default = /*#__PURE__*/__nccwpck_require__.n(external_fs_); +// EXTERNAL MODULE: external "path" +var external_path_ = __nccwpck_require__(1017); +var external_path_default = /*#__PURE__*/__nccwpck_require__.n(external_path_); +// EXTERNAL MODULE: ./node_modules/@actions/glob/lib/glob.js +var glob = __nccwpck_require__(8090); // EXTERNAL MODULE: external "crypto" var external_crypto_ = __nccwpck_require__(6113); var external_crypto_default = /*#__PURE__*/__nccwpck_require__.n(external_crypto_); @@ -61667,6 +61667,8 @@ class Workspace { +const HOME = external_os_default().homedir(); +const CARGO_HOME = process.env.CARGO_HOME || external_path_default().join(HOME, ".cargo"); const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; const STATE_BINS = "RUST_CACHE_BINS"; @@ -61679,14 +61681,6 @@ class CacheConfig { this.cacheKey = ""; /** The secondary (restore) key that only contains the prefix and environment */ this.restoreKey = ""; - /** The `~/.cargo` directory */ - this.cargoHome = ""; - /** The cargo registry index directory */ - this.cargoIndex = ""; - /** The cargo registry cache directory */ - this.cargoCache = ""; - /** The cargo git checkouts directory */ - this.cargoGit = ""; /** The workspace configurations */ this.workspaces = []; /** The prefix portion of the cache key */ @@ -61782,14 +61776,8 @@ class CacheConfig { self.keyFiles = keyFiles; key += `-${lockHash}`; self.cacheKey = key; - // Constructs some generic paths, workspace config and paths to restore: + // Constructs the workspace config and paths to restore: // The workspaces are given using a `$workspace -> $target` syntax. - const home = external_os_default().homedir(); - const cargoHome = process.env.CARGO_HOME || external_path_default().join(home, ".cargo"); - self.cargoHome = cargoHome; - self.cargoIndex = external_path_default().join(cargoHome, "registry/index"); - self.cargoCache = external_path_default().join(cargoHome, "registry/cache"); - self.cargoGit = external_path_default().join(cargoHome, "git"); const workspaces = []; const workspacesInput = core.getInput("workspaces") || "."; for (const workspace of workspacesInput.trim().split("\n")) { @@ -61799,15 +61787,7 @@ class CacheConfig { workspaces.push(new Workspace(root, target)); } self.workspaces = workspaces; - self.cachePaths = [ - external_path_default().join(cargoHome, "bin"), - external_path_default().join(cargoHome, ".crates2.json"), - external_path_default().join(cargoHome, ".crates.toml"), - self.cargoIndex, - self.cargoCache, - self.cargoGit, - ...workspaces.map((ws) => ws.target), - ]; + self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; return self; } printInfo() { @@ -61837,19 +61817,6 @@ class CacheConfig { } core.endGroup(); } - async getCargoBins() { - const bins = new Set(); - try { - const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(this.cargoHome, ".crates2.json"), "utf8")); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } - catch { } - return bins; - } } async function getRustVersion() { const stdout = await getCmdOutput("rustc", ["-vV"]); @@ -61890,7 +61857,6 @@ async function cleanTargetDir(targetDir, packages) { await rm(dir.path, dirent); } } - await external_fs_default().promises.unlink(external_path_default().join(targetDir, "./.rustc_info.json")); } async function cleanProfileTarget(profileDir, packages) { await io.rmRF(external_path_default().join(profileDir, "examples")); @@ -61916,32 +61882,67 @@ async function cleanProfileTarget(profileDir, packages) { })); await rmExcept(external_path_default().join(profileDir, "deps"), keepDeps); } -async function cleanBin(config) { - const bins = await config.getCargoBins(); +async function getCargoBins() { + const bins = new Set(); + try { + const { installs } = JSON.parse(await external_fs_default().promises.readFile(external_path_default().join(CARGO_HOME, ".crates2.json"), "utf8")); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } + catch { } + return bins; +} +async function cleanBin() { + const bins = await getCargoBins(); const oldBins = JSON.parse(core.getState(STATE_BINS)); for (const bin of oldBins) { bins.delete(bin); } - const dir = await external_fs_default().promises.opendir(external_path_default().join(config.cargoHome, "bin")); + const dir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "bin")); for await (const dirent of dir) { if (dirent.isFile() && !bins.has(dirent.name)) { await rm(dir.path, dirent); } } } -async function cleanRegistry(config, registryName, packages) { - await io.rmRF(external_path_default().join(config.cargoIndex, registryName, ".cache")); +async function cleanRegistry(packages) { + // `.cargo/registry/src` + const srcDir = external_path_default().join(CARGO_HOME, "registry", "src"); + await io.rmRF(srcDir); + // `.cargo/registry/index` + const indexDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dir = await external_fs_default().promises.opendir(external_path_default().join(indexDir.path, dirent.name)); + // TODO: check for `.git` etc, for now we just always remove the `.cache` + // and leave other stuff untouched. + await io.rmRF(external_path_default().join(dir.path, ".cache")); + } + } const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - const dir = await external_fs_default().promises.opendir(external_path_default().join(config.cargoCache, registryName)); - for await (const dirent of dir) { - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); + // `.cargo/registry/cache` + const cacheDir = await external_fs_default().promises.opendir(external_path_default().join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await external_fs_default().promises.opendir(external_path_default().join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } } } } -async function cleanGit(config, packages) { - const coPath = external_path_default().join(config.cargoGit, "checkouts"); - const dbPath = external_path_default().join(config.cargoGit, "db"); +async function cleanGit(packages) { + const coPath = external_path_default().join(CARGO_HOME, "git", "checkouts"); + const dbPath = external_path_default().join(CARGO_HOME, "git", "db"); const repos = new Map(); for (const p of packages) { if (!p.path.startsWith(coPath)) { @@ -62031,8 +62032,6 @@ async function exists(path) { - - process.on("uncaughtException", (e) => { core.info(`[warning] ${e.message}`); if (e.stack) { @@ -62065,26 +62064,23 @@ async function run() { core.info(`[warning] ${e.stack}`); } } - const registryName = await getRegistryName(config); - if (registryName) { - try { - core.info(`... Cleaning cargo registry ...`); - await cleanRegistry(config, registryName, allPackages); - } - catch (e) { - core.info(`[warning] ${e.stack}`); - } + try { + core.info(`... Cleaning cargo registry ...`); + await cleanRegistry(allPackages); + } + catch (e) { + core.info(`[warning] ${e.stack}`); } try { core.info(`... Cleaning cargo/bin ...`); - await cleanBin(config); + await cleanBin(); } catch (e) { core.info(`[warning] ${e.stack}`); } try { core.info(`... Cleaning cargo git cache ...`); - await cleanGit(config, allPackages); + await cleanGit(allPackages); } catch (e) { core.info(`[warning] ${e.stack}`); @@ -62097,18 +62093,6 @@ async function run() { } } run(); -async function getRegistryName(config) { - const globber = await glob.create(`${config.cargoIndex}/**/.last-updated`, { followSymbolicLinks: false }); - const files = await globber.glob(); - if (files.length > 1) { - core.warning(`got multiple registries: "${files.join('", "')}"`); - } - const first = files.shift(); - if (!first) { - return null; - } - return external_path_default().basename(external_path_default().dirname(first)); -} async function macOsWorkaround() { try { // Workaround for https://github.com/actions/cache/issues/403 diff --git a/src/cleanup.ts b/src/cleanup.ts index bc30533..67ca48c 100644 --- a/src/cleanup.ts +++ b/src/cleanup.ts @@ -2,8 +2,8 @@ import * as core from "@actions/core"; import * as io from "@actions/io"; import fs from "fs"; import path from "path"; -import { CacheConfig, STATE_BINS } from "./config"; +import { CARGO_HOME, STATE_BINS } from "./config"; import { Packages } from "./workspace"; export async function cleanTargetDir(targetDir: string, packages: Packages) { @@ -60,15 +60,30 @@ async function cleanProfileTarget(profileDir: string, packages: Packages) { await rmExcept(path.join(profileDir, "deps"), keepDeps); } -export async function cleanBin(config: CacheConfig) { - const bins = await config.getCargoBins(); +export async function getCargoBins(): Promise> { + const bins = new Set(); + try { + const { installs }: { installs: { [key: string]: { bins: Array } } } = JSON.parse( + await fs.promises.readFile(path.join(CARGO_HOME, ".crates2.json"), "utf8"), + ); + for (const pkg of Object.values(installs)) { + for (const bin of pkg.bins) { + bins.add(bin); + } + } + } catch {} + return bins; +} + +export async function cleanBin() { + const bins = await getCargoBins(); const oldBins = JSON.parse(core.getState(STATE_BINS)); for (const bin of oldBins) { bins.delete(bin); } - const dir = await fs.promises.opendir(path.join(config.cargoHome, "bin")); + const dir = await fs.promises.opendir(path.join(CARGO_HOME, "bin")); for await (const dirent of dir) { if (dirent.isFile() && !bins.has(dirent.name)) { await rm(dir.path, dirent); @@ -76,22 +91,46 @@ export async function cleanBin(config: CacheConfig) { } } -export async function cleanRegistry(config: CacheConfig, registryName: string, packages: Packages) { - await io.rmRF(path.join(config.cargoIndex, registryName, ".cache")); +export async function cleanRegistry(packages: Packages) { + // `.cargo/registry/src` + const srcDir = path.join(CARGO_HOME, "registry", "src"); + await io.rmRF(srcDir); + + // `.cargo/registry/index` + const indexDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "index")); + for await (const dirent of indexDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/index/github.com-1ecc6299db9ec823` + // or `.cargo/registry/index/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(indexDir.path, dirent.name)); + + // TODO: check for `.git` etc, for now we just always remove the `.cache` + // and leave other stuff untouched. + await io.rmRF(path.join(dir.path, ".cache")); + } + } const pkgSet = new Set(packages.map((p) => `${p.name}-${p.version}.crate`)); - const dir = await fs.promises.opendir(path.join(config.cargoCache, registryName)); - for await (const dirent of dir) { - if (dirent.isFile() && !pkgSet.has(dirent.name)) { - await rm(dir.path, dirent); + // `.cargo/registry/cache` + const cacheDir = await fs.promises.opendir(path.join(CARGO_HOME, "registry", "cache")); + for await (const dirent of cacheDir) { + if (dirent.isDirectory()) { + // eg `.cargo/registry/cache/github.com-1ecc6299db9ec823` + // or `.cargo/registry/cache/index.crates.io-e139d0d48fed7772` + const dir = await fs.promises.opendir(path.join(cacheDir.path, dirent.name)); + for await (const dirent of dir) { + if (dirent.isFile() && !pkgSet.has(dirent.name)) { + await rm(dir.path, dirent); + } + } } } } -export async function cleanGit(config: CacheConfig, packages: Packages) { - const coPath = path.join(config.cargoGit, "checkouts"); - const dbPath = path.join(config.cargoGit, "db"); +export async function cleanGit(packages: Packages) { + const coPath = path.join(CARGO_HOME, "git", "checkouts"); + const dbPath = path.join(CARGO_HOME, "git", "db"); const repos = new Map>(); for (const p of packages) { if (!p.path.startsWith(coPath)) { diff --git a/src/config.ts b/src/config.ts index 514f755..22a5ea8 100644 --- a/src/config.ts +++ b/src/config.ts @@ -8,6 +8,9 @@ import path from "path"; import { getCmdOutput } from "./utils"; import { Workspace } from "./workspace"; +const HOME = os.homedir(); +export const CARGO_HOME = process.env.CARGO_HOME || path.join(HOME, ".cargo"); + const STATE_LOCKFILE_HASH = "RUST_CACHE_LOCKFILE_HASH"; const STATE_LOCKFILES = "RUST_CACHE_LOCKFILES"; export const STATE_BINS = "RUST_CACHE_BINS"; @@ -21,14 +24,6 @@ export class CacheConfig { /** The secondary (restore) key that only contains the prefix and environment */ public restoreKey = ""; - /** The `~/.cargo` directory */ - public cargoHome = ""; - /** The cargo registry index directory */ - public cargoIndex = ""; - /** The cargo registry cache directory */ - public cargoCache = ""; - /** The cargo git checkouts directory */ - public cargoGit = ""; /** The workspace configurations */ public workspaces: Array = []; @@ -146,16 +141,9 @@ export class CacheConfig { key += `-${lockHash}`; self.cacheKey = key; - // Constructs some generic paths, workspace config and paths to restore: + // Constructs the workspace config and paths to restore: // The workspaces are given using a `$workspace -> $target` syntax. - const home = os.homedir(); - const cargoHome = process.env.CARGO_HOME || path.join(home, ".cargo"); - self.cargoHome = cargoHome; - self.cargoIndex = path.join(cargoHome, "registry/index"); - self.cargoCache = path.join(cargoHome, "registry/cache"); - self.cargoGit = path.join(cargoHome, "git"); - const workspaces: Array = []; const workspacesInput = core.getInput("workspaces") || "."; for (const workspace of workspacesInput.trim().split("\n")) { @@ -166,15 +154,7 @@ export class CacheConfig { } self.workspaces = workspaces; - self.cachePaths = [ - path.join(cargoHome, "bin"), - path.join(cargoHome, ".crates2.json"), - path.join(cargoHome, ".crates.toml"), - self.cargoIndex, - self.cargoCache, - self.cargoGit, - ...workspaces.map((ws) => ws.target), - ]; + self.cachePaths = [CARGO_HOME, ...workspaces.map((ws) => ws.target)]; return self; } @@ -206,21 +186,6 @@ export class CacheConfig { } core.endGroup(); } - - public async getCargoBins(): Promise> { - const bins = new Set(); - try { - const { installs }: { installs: { [key: string]: { bins: Array } } } = JSON.parse( - await fs.promises.readFile(path.join(this.cargoHome, ".crates2.json"), "utf8"), - ); - for (const pkg of Object.values(installs)) { - for (const bin of pkg.bins) { - bins.add(bin); - } - } - } catch {} - return bins; - } } interface RustVersion { diff --git a/src/restore.ts b/src/restore.ts index 3c5d3b6..855144d 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -1,7 +1,7 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; -import { cleanTargetDir } from "./cleanup"; +import { cleanTargetDir, getCargoBins } from "./cleanup"; import { CacheConfig, STATE_BINS, STATE_KEY } from "./config"; process.on("uncaughtException", (e) => { @@ -29,7 +29,7 @@ async function run() { config.printInfo(); core.info(""); - const bins = await config.getCargoBins(); + const bins = await getCargoBins(); core.saveState(STATE_BINS, JSON.stringify([...bins])); core.info(`... Restoring cache ...`); diff --git a/src/save.ts b/src/save.ts index d4ee0dc..261512f 100644 --- a/src/save.ts +++ b/src/save.ts @@ -1,8 +1,6 @@ import * as cache from "@actions/cache"; import * as core from "@actions/core"; import * as exec from "@actions/exec"; -import * as glob from "@actions/glob"; -import path from "path"; import { cleanBin, cleanGit, cleanRegistry, cleanTargetDir } from "./cleanup"; import { CacheConfig, STATE_KEY } from "./config"; @@ -44,26 +42,23 @@ async function run() { } } - const registryName = await getRegistryName(config); - if (registryName) { - try { - core.info(`... Cleaning cargo registry ...`); - await cleanRegistry(config, registryName, allPackages); - } catch (e) { - core.info(`[warning] ${(e as any).stack}`); - } + try { + core.info(`... Cleaning cargo registry ...`); + await cleanRegistry(allPackages); + } catch (e) { + core.info(`[warning] ${(e as any).stack}`); } try { core.info(`... Cleaning cargo/bin ...`); - await cleanBin(config); + await cleanBin(); } catch (e) { core.info(`[warning] ${(e as any).stack}`); } try { core.info(`... Cleaning cargo git cache ...`); - await cleanGit(config, allPackages); + await cleanGit(allPackages); } catch (e) { core.info(`[warning] ${(e as any).stack}`); } @@ -77,20 +72,6 @@ async function run() { run(); -async function getRegistryName(config: CacheConfig): Promise { - const globber = await glob.create(`${config.cargoIndex}/**/.last-updated`, { followSymbolicLinks: false }); - const files = await globber.glob(); - if (files.length > 1) { - core.warning(`got multiple registries: "${files.join('", "')}"`); - } - - const first = files.shift()!; - if (!first) { - return null; - } - return path.basename(path.dirname(first)); -} - async function macOsWorkaround() { try { // Workaround for https://github.com/actions/cache/issues/403