3
0
Fork 0
mirror of https://github.com/Swatinem/rust-cache synced 2025-11-05 15:06:02 +00:00

feat: Implement ability to disable adding job ID + rust environment hashes to cache names (#279)

This commit is contained in:
Ryan Brice 2025-11-03 21:24:00 +08:00 committed by GitHub
parent 94162284cf
commit 27f6075dd2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 440 additions and 272 deletions

74
.github/workflows/multi-job-cache.yml vendored Normal file
View file

@ -0,0 +1,74 @@
name: multi-job-cache
on: [push, pull_request]
permissions: {}
jobs:
multi-job-cache-1:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test multi-job cache (1) on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- name: checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: install rust toolchain
run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
- name: cache
uses: ./
with:
workspaces: |
tests
add-job-id-key: "false"
add-rust-environment-hash-key: "false"
- name: cargo check (tests)
working-directory: tests
run: cargo check
multi-job-cache-2:
if: github.repository == 'Swatinem/rust-cache'
strategy:
fail-fast: false
matrix:
os: [ubuntu-latest, macos-latest, windows-latest]
name: Test multi-job cache (2) on ${{ matrix.os }}
runs-on: ${{ matrix.os }}
env:
CARGO_TERM_COLOR: always
steps:
- name: checkout
uses: actions/checkout@08c6903cd8c0fde910a37f88322edcfb5dd907a8 # v5.0.0
with:
persist-credentials: false
- name: install rust toolchain
run: rustup toolchain install stable --profile minimal --target wasm32-unknown-unknown --no-self-update
- name: cache
uses: ./
with:
workspaces: |
tests/wasm-workspace
add-job-id-key: "false"
add-rust-environment-hash-key: "false"
- name: cargo check (tests/wasm-workspace)
working-directory: tests/wasm-workspace
run: cargo check

1
.gitignore vendored
View file

@ -1,5 +1,6 @@
node_modules/
target/
src/*.js
# Editors
.idea/

View file

@ -28,6 +28,16 @@ sensible defaults.
# default: empty
key: ""
# If the automatic `job`-based cache key should include the job id.
# default: "true"
add-job-id-key: ""
# Weather the a hash of the rust environment should be included in the cache key.
# This includes a hash of all Cargo.toml/Cargo.lock files, rust-toolchain files,
# and .cargo/config.toml files (if present), as well as the specified 'env-vars'.
# default: "true"
add-rust-environment-hash-key: ""
# A whitespace separated list of env-var *prefixes* who's value contributes
# to the environment cache key.
# The env-vars are matched by *prefix*, so the default `RUST` var will
@ -121,12 +131,14 @@ This action currently caches the following files/directories:
This cache is automatically keyed by:
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id),
- the github [`job_id`](https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_id)
(if `add-job-id-key` is `"true"`),
- the rustc release / host / hash,
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
- the following values, if `add-rust-environment-hash-key` is `"true"`:
- the value of some compiler-specific environment variables (eg. RUSTFLAGS, etc), and
- a hash of all `Cargo.lock` / `Cargo.toml` files found anywhere in the repository (if present).
- a hash of all `rust-toolchain` / `rust-toolchain.toml` files in the root of the repository (if present).
- a hash of all `.cargo/config.toml` files in the root of the repository (if present).
An additional input `key` can be provided if the builtin keys are not sufficient.

View file

@ -12,6 +12,14 @@ inputs:
key:
description: "An additional cache key that is added alongside the automatic `job`-based cache key and can be used to further differentiate jobs."
required: false
add-job-id-key:
description: "If the automatic `job`-based cache key should include the job id. Defaults to true."
required: false
default: "true"
add-rust-environment-hash-key:
description: "Weather the a hash of the rust environment should be included in the cache key. This includes a hash of all Cargo.toml/Cargo.lock files, rust-toolchain files, and .cargo/config.toml files (if present), as well as the specified 'env-vars'. Defaults to true."
required: false
default: "true"
env-vars:
description: "Additional environment variables to include in the cache key, separated by spaces."
required: false

213
dist/restore/index.js vendored
View file

@ -2737,7 +2737,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.FinalizeCacheError = exports.ReserveCacheError = exports.ValidationError = void 0;
const core = __importStar(__nccwpck_require__(37484));
const path = __importStar(__nccwpck_require__(16928));
const utils = __importStar(__nccwpck_require__(98299));
@ -2745,7 +2745,6 @@ const cacheHttpClient = __importStar(__nccwpck_require__(73171));
const cacheTwirpClient = __importStar(__nccwpck_require__(96819));
const config_1 = __nccwpck_require__(17606);
const tar_1 = __nccwpck_require__(95321);
const constants_1 = __nccwpck_require__(58287);
const http_client_1 = __nccwpck_require__(54844);
class ValidationError extends Error {
constructor(message) {
@ -2763,6 +2762,14 @@ class ReserveCacheError extends Error {
}
}
exports.ReserveCacheError = ReserveCacheError;
class FinalizeCacheError extends Error {
constructor(message) {
super(message);
this.name = 'FinalizeCacheError';
Object.setPrototypeOf(this, FinalizeCacheError.prototype);
}
}
exports.FinalizeCacheError = FinalizeCacheError;
function checkPaths(paths) {
if (!paths || paths.length === 0) {
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
@ -3139,10 +3146,6 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
@ -3155,7 +3158,10 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
try {
const response = yield twirpClient.CreateCacheEntry(request);
if (!response.ok) {
throw new Error('Response was not ok');
if (response.message) {
core.warning(`Cache reservation failed: ${response.message}`);
}
throw new Error(response.message || 'Response was not ok');
}
signedUploadUrl = response.signedUploadUrl;
}
@ -3173,6 +3179,9 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
if (!finalizeResponse.ok) {
if (finalizeResponse.message) {
throw new FinalizeCacheError(finalizeResponse.message);
}
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
}
cacheId = parseInt(finalizeResponse.entryId);
@ -3185,6 +3194,9 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else if (typedError.name === FinalizeCacheError.name) {
core.warning(typedError.message);
}
else {
// Log server errors (5xx) as errors, all other errors as warnings
if (typedError instanceof http_client_1.HttpClientError &&
@ -3296,11 +3308,12 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
const message = { ok: false, signedUploadUrl: "", message: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
@ -3317,6 +3330,9 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
case /* string message */ 3:
message.message = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -3335,6 +3351,9 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
/* string message = 3; */
if (message.message !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -3418,11 +3437,12 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
const message = { ok: false, entryId: "0", message: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
@ -3439,6 +3459,9 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
case /* string message */ 3:
message.message = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -3457,6 +3480,9 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
/* string message = 3; */
if (message.message !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -147306,7 +147332,7 @@ module.exports = axios;
/***/ ((module) => {
"use strict";
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4.0.5","preview":true,"description":"Actions cache lib","keywords":["github","actions","cache"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/cache","license":"MIT","main":"lib/cache.js","types":"lib/cache.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/cache"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"echo \\"Error: run tests from root\\" && exit 1","tsc":"tsc"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.11.1","@actions/exec":"^1.0.1","@actions/glob":"^0.1.0","@protobuf-ts/runtime-rpc":"^2.11.1","@actions/http-client":"^2.1.1","@actions/io":"^1.0.1","@azure/abort-controller":"^1.1.0","@azure/ms-rest-js":"^2.6.0","@azure/storage-blob":"^12.13.0","semver":"^6.3.1"},"devDependencies":{"@types/node":"^22.13.9","@types/semver":"^6.0.0","@protobuf-ts/plugin":"^2.9.4","typescript":"^5.2.2"}}');
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4.1.0","preview":true,"description":"Actions cache lib","keywords":["github","actions","cache"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/cache","license":"MIT","main":"lib/cache.js","types":"lib/cache.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/cache"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"echo \\"Error: run tests from root\\" && exit 1","tsc":"tsc"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.11.1","@actions/exec":"^1.0.1","@actions/glob":"^0.1.0","@protobuf-ts/runtime-rpc":"^2.11.1","@actions/http-client":"^2.1.1","@actions/io":"^1.0.1","@azure/abort-controller":"^1.1.0","@azure/ms-rest-js":"^2.6.0","@azure/storage-blob":"^12.13.0","semver":"^6.3.1"},"devDependencies":{"@types/node":"^22.13.9","@types/semver":"^6.0.0","@protobuf-ts/plugin":"^2.9.4","typescript":"^5.2.2"}}');
/***/ }),
@ -148590,7 +148616,7 @@ var lib_cache = __nccwpck_require__(24318);
var warpbuild_cache_lib_cache = __nccwpck_require__(22343);
// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js
var cache_lib_cache = __nccwpck_require__(5116);
;// CONCATENATED MODULE: ./src/utils.ts
;// CONCATENATED MODULE: ./src/utils.js
@ -148664,7 +148690,7 @@ async function utils_exists(path) {
}
}
;// CONCATENATED MODULE: ./src/workspace.ts
;// CONCATENATED MODULE: ./src/workspace.js
@ -148701,7 +148727,7 @@ class Workspace {
}
}
;// CONCATENATED MODULE: ./src/config.ts
;// CONCATENATED MODULE: ./src/config.js
@ -148761,7 +148787,7 @@ class CacheConfig {
key += `-${inputKey}`;
}
const job = process.env.GITHUB_JOB;
if (job) {
if ((job) && lib_core.getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`;
}
}
@ -148798,7 +148824,10 @@ class CacheConfig {
}
}
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
// Add job hash suffix if 'add-rust-environment-hash-key' is true
if (lib_core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
key += `-${digest(hasher)}`;
}
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
@ -148815,90 +148844,94 @@ class CacheConfig {
workspaces.push(new Workspace(root, target));
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = external_crypto_default().createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = parse(content);
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
// Add hash suffix of all rust environment lockfiles + manifests if
// 'add-rust-environment-hash-key' is true
if (lib_core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = external_crypto_default().createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = parse(content);
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
catch (_e) {
// Not an object, probably a string (version),
// continue.
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
}
catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock");
if (await utils_exists(cargo_lock)) {
try {
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
const parsed = parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock");
if (await utils_exists(cargo_lock)) {
try {
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
const parsed = parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
lib_core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = parsed.package.filter((p) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = parsed.package.filter((p) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
}
catch (e) {
// Fallback to caching them as regular file
lib_core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
let lockHash = digest(hasher);
key += `-${lockHash}`;
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [external_path_default().join(config_CARGO_HOME, "registry"), external_path_default().join(config_CARGO_HOME, "git")];
if (self.cacheBin) {
@ -149031,7 +149064,7 @@ function sort_and_uniq(a) {
}, []);
}
;// CONCATENATED MODULE: ./src/cleanup.ts
;// CONCATENATED MODULE: ./src/cleanup.js

213
dist/save/index.js vendored
View file

@ -2737,7 +2737,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.ReserveCacheError = exports.ValidationError = void 0;
exports.saveCache = exports.restoreCache = exports.isFeatureAvailable = exports.FinalizeCacheError = exports.ReserveCacheError = exports.ValidationError = void 0;
const core = __importStar(__nccwpck_require__(37484));
const path = __importStar(__nccwpck_require__(16928));
const utils = __importStar(__nccwpck_require__(98299));
@ -2745,7 +2745,6 @@ const cacheHttpClient = __importStar(__nccwpck_require__(73171));
const cacheTwirpClient = __importStar(__nccwpck_require__(96819));
const config_1 = __nccwpck_require__(17606);
const tar_1 = __nccwpck_require__(95321);
const constants_1 = __nccwpck_require__(58287);
const http_client_1 = __nccwpck_require__(54844);
class ValidationError extends Error {
constructor(message) {
@ -2763,6 +2762,14 @@ class ReserveCacheError extends Error {
}
}
exports.ReserveCacheError = ReserveCacheError;
class FinalizeCacheError extends Error {
constructor(message) {
super(message);
this.name = 'FinalizeCacheError';
Object.setPrototypeOf(this, FinalizeCacheError.prototype);
}
}
exports.FinalizeCacheError = FinalizeCacheError;
function checkPaths(paths) {
if (!paths || paths.length === 0) {
throw new ValidationError(`Path Validation Error: At least one directory or file path is required`);
@ -3139,10 +3146,6 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
}
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
core.debug(`File Size: ${archiveFileSize}`);
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
if (archiveFileSize > constants_1.CacheFileSizeLimit && !(0, config_1.isGhes)()) {
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
}
// Set the archive size in the options, will be used to display the upload progress
options.archiveSizeBytes = archiveFileSize;
core.debug('Reserving Cache');
@ -3155,7 +3158,10 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
try {
const response = yield twirpClient.CreateCacheEntry(request);
if (!response.ok) {
throw new Error('Response was not ok');
if (response.message) {
core.warning(`Cache reservation failed: ${response.message}`);
}
throw new Error(response.message || 'Response was not ok');
}
signedUploadUrl = response.signedUploadUrl;
}
@ -3173,6 +3179,9 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
const finalizeResponse = yield twirpClient.FinalizeCacheEntryUpload(finalizeRequest);
core.debug(`FinalizeCacheEntryUploadResponse: ${finalizeResponse.ok}`);
if (!finalizeResponse.ok) {
if (finalizeResponse.message) {
throw new FinalizeCacheError(finalizeResponse.message);
}
throw new Error(`Unable to finalize cache with key ${key}, another job may be finalizing this cache.`);
}
cacheId = parseInt(finalizeResponse.entryId);
@ -3185,6 +3194,9 @@ function saveCacheV2(paths, key, options, enableCrossOsArchive = false) {
else if (typedError.name === ReserveCacheError.name) {
core.info(`Failed to save: ${typedError.message}`);
}
else if (typedError.name === FinalizeCacheError.name) {
core.warning(typedError.message);
}
else {
// Log server errors (5xx) as errors, all other errors as warnings
if (typedError instanceof http_client_1.HttpClientError &&
@ -3296,11 +3308,12 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.CreateCacheEntryResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
{ no: 2, name: "signed_upload_url", kind: "scalar", T: 9 /*ScalarType.STRING*/ },
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, signedUploadUrl: "" };
const message = { ok: false, signedUploadUrl: "", message: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
@ -3317,6 +3330,9 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
case /* string signed_upload_url */ 2:
message.signedUploadUrl = reader.string();
break;
case /* string message */ 3:
message.message = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -3335,6 +3351,9 @@ class CreateCacheEntryResponse$Type extends runtime_5.MessageType {
/* string signed_upload_url = 2; */
if (message.signedUploadUrl !== "")
writer.tag(2, runtime_1.WireType.LengthDelimited).string(message.signedUploadUrl);
/* string message = 3; */
if (message.message !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -3418,11 +3437,12 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
constructor() {
super("github.actions.results.api.v1.FinalizeCacheEntryUploadResponse", [
{ no: 1, name: "ok", kind: "scalar", T: 8 /*ScalarType.BOOL*/ },
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ }
{ no: 2, name: "entry_id", kind: "scalar", T: 3 /*ScalarType.INT64*/ },
{ no: 3, name: "message", kind: "scalar", T: 9 /*ScalarType.STRING*/ }
]);
}
create(value) {
const message = { ok: false, entryId: "0" };
const message = { ok: false, entryId: "0", message: "" };
globalThis.Object.defineProperty(message, runtime_4.MESSAGE_TYPE, { enumerable: false, value: this });
if (value !== undefined)
(0, runtime_3.reflectionMergePartial)(this, message, value);
@ -3439,6 +3459,9 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
case /* int64 entry_id */ 2:
message.entryId = reader.int64().toString();
break;
case /* string message */ 3:
message.message = reader.string();
break;
default:
let u = options.readUnknownField;
if (u === "throw")
@ -3457,6 +3480,9 @@ class FinalizeCacheEntryUploadResponse$Type extends runtime_5.MessageType {
/* int64 entry_id = 2; */
if (message.entryId !== "0")
writer.tag(2, runtime_1.WireType.Varint).int64(message.entryId);
/* string message = 3; */
if (message.message !== "")
writer.tag(3, runtime_1.WireType.LengthDelimited).string(message.message);
let u = options.writeUnknownFields;
if (u !== false)
(u == true ? runtime_2.UnknownFieldHandler.onWrite : u)(this.typeName, message, writer);
@ -147306,7 +147332,7 @@ module.exports = axios;
/***/ ((module) => {
"use strict";
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4.0.5","preview":true,"description":"Actions cache lib","keywords":["github","actions","cache"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/cache","license":"MIT","main":"lib/cache.js","types":"lib/cache.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/cache"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"echo \\"Error: run tests from root\\" && exit 1","tsc":"tsc"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.11.1","@actions/exec":"^1.0.1","@actions/glob":"^0.1.0","@protobuf-ts/runtime-rpc":"^2.11.1","@actions/http-client":"^2.1.1","@actions/io":"^1.0.1","@azure/abort-controller":"^1.1.0","@azure/ms-rest-js":"^2.6.0","@azure/storage-blob":"^12.13.0","semver":"^6.3.1"},"devDependencies":{"@types/node":"^22.13.9","@types/semver":"^6.0.0","@protobuf-ts/plugin":"^2.9.4","typescript":"^5.2.2"}}');
module.exports = /*#__PURE__*/JSON.parse('{"name":"@actions/cache","version":"4.1.0","preview":true,"description":"Actions cache lib","keywords":["github","actions","cache"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/cache","license":"MIT","main":"lib/cache.js","types":"lib/cache.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/cache"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"echo \\"Error: run tests from root\\" && exit 1","tsc":"tsc"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.11.1","@actions/exec":"^1.0.1","@actions/glob":"^0.1.0","@protobuf-ts/runtime-rpc":"^2.11.1","@actions/http-client":"^2.1.1","@actions/io":"^1.0.1","@azure/abort-controller":"^1.1.0","@azure/ms-rest-js":"^2.6.0","@azure/storage-blob":"^12.13.0","semver":"^6.3.1"},"devDependencies":{"@types/node":"^22.13.9","@types/semver":"^6.0.0","@protobuf-ts/plugin":"^2.9.4","typescript":"^5.2.2"}}');
/***/ }),
@ -148590,7 +148616,7 @@ var lib_cache = __nccwpck_require__(24318);
var warpbuild_cache_lib_cache = __nccwpck_require__(22343);
// EXTERNAL MODULE: ./node_modules/@actions/cache/lib/cache.js
var cache_lib_cache = __nccwpck_require__(5116);
;// CONCATENATED MODULE: ./src/utils.ts
;// CONCATENATED MODULE: ./src/utils.js
@ -148664,7 +148690,7 @@ async function exists(path) {
}
}
;// CONCATENATED MODULE: ./src/workspace.ts
;// CONCATENATED MODULE: ./src/workspace.js
@ -148701,7 +148727,7 @@ class Workspace {
}
}
;// CONCATENATED MODULE: ./src/config.ts
;// CONCATENATED MODULE: ./src/config.js
@ -148761,7 +148787,7 @@ class CacheConfig {
key += `-${inputKey}`;
}
const job = process.env.GITHUB_JOB;
if (job) {
if ((job) && core.getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`;
}
}
@ -148798,7 +148824,10 @@ class CacheConfig {
}
}
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
// Add job hash suffix if 'add-rust-environment-hash-key' is true
if (core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
key += `-${digest(hasher)}`;
}
self.restoreKey = key;
// Construct the lockfiles portion of the key:
// This considers all the files found via globbing for various manifests
@ -148815,90 +148844,94 @@ class CacheConfig {
workspaces.push(new Workspace(root, target));
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = external_crypto_default().createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = parse(content);
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
// Add hash suffix of all rust environment lockfiles + manifests if
// 'add-rust-environment-hash-key' is true
if (core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = external_crypto_default().createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(...(await globFiles(`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`)));
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => external_path_default().join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await promises_default().readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = parse(content);
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
catch (_e) {
// Not an object, probably a string (version),
// continue.
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
}
catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
const parsed = parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
const cargo_lock = external_path_default().join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await promises_default().readFile(cargo_lock, { encoding: "utf8" });
const parsed = parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = parsed.package.filter((p) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = parsed.package.filter((p) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
}
catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of external_fs_default().createReadStream(file)) {
hasher.update(chunk);
}
}
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
let lockHash = digest(hasher);
key += `-${lockHash}`;
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [external_path_default().join(CARGO_HOME, "registry"), external_path_default().join(CARGO_HOME, "git")];
if (self.cacheBin) {
@ -149031,7 +149064,7 @@ function sort_and_uniq(a) {
}, []);
}
;// CONCATENATED MODULE: ./src/cleanup.ts
;// CONCATENATED MODULE: ./src/cleanup.js

View file

@ -69,7 +69,7 @@ export class CacheConfig {
}
const job = process.env.GITHUB_JOB;
if (job) {
if ((job) && core.getInput("add-job-id-key").toLowerCase() == "true") {
key += `-${job}`;
}
}
@ -116,7 +116,10 @@ export class CacheConfig {
self.keyEnvs = keyEnvs;
key += `-${digest(hasher)}`;
// Add job hash suffix if 'add-rust-environment-hash-key' is true
if (core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
key += `-${digest(hasher)}`;
}
self.restoreKey = key;
@ -139,111 +142,115 @@ export class CacheConfig {
}
self.workspaces = workspaces;
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
// Add hash suffix of all rust environment lockfiles + manifests if
// 'add-rust-environment-hash-key' is true
if (core.getInput("add-rust-environment-hash-key").toLowerCase() == "true") {
let keyFiles = await globFiles(".cargo/config.toml\nrust-toolchain\nrust-toolchain.toml");
const parsedKeyFiles = []; // keyFiles that are parsed, pre-processed and hashed
hasher = crypto.createHash("sha1");
hasher = crypto.createHash("sha1");
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
for (const workspace of workspaces) {
const root = workspace.root;
keyFiles.push(
...(await globFiles(
`${root}/**/.cargo/config.toml\n${root}/**/rust-toolchain\n${root}/**/rust-toolchain.toml`,
)),
);
const workspaceMembers = await workspace.getWorkspaceMembers();
const workspaceMembers = await workspace.getWorkspaceMembers();
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
const cargo_manifests = sort_and_uniq(workspaceMembers.map((member) => path.join(member.path, "Cargo.toml")));
for (const cargo_manifest of cargo_manifests) {
try {
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content) as { [key: string]: any };
for (const cargo_manifest of cargo_manifests) {
try {
const content = await fs_promises.readFile(cargo_manifest, { encoding: "utf8" });
// Use any since TomlPrimitive is not exposed
const parsed = toml.parse(content) as { [key: string]: any };
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
} catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
if ("package" in parsed) {
const pack = parsed.package;
if ("version" in pack) {
pack["version"] = "0.0.0";
}
}
for (const prefix of ["", "build-", "dev-"]) {
const section_name = `${prefix}dependencies`;
if (!(section_name in parsed)) {
continue;
}
const deps = parsed[section_name];
for (const key of Object.keys(deps)) {
const dep = deps[key];
try {
if ("path" in dep) {
dep.version = "0.0.0";
dep.path = "";
}
} catch (_e) {
// Not an object, probably a string (version),
// continue.
continue;
}
}
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
hasher.update(JSON.stringify(parsed));
parsedKeyFiles.push(cargo_manifest);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.toml manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_manifest);
}
}
const cargo_lock = path.join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
const cargo_lock = path.join(workspace.root, "Cargo.lock");
if (await exists(cargo_lock)) {
try {
const content = await fs_promises.readFile(cargo_lock, { encoding: "utf8" });
const parsed = toml.parse(content);
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
if ((parsed.version !== 3 && parsed.version !== 4) || !("package" in parsed)) {
// Fallback to caching them as regular file since this action
// can only handle Cargo.lock format version 3
core.warning("Unsupported Cargo.lock format, fallback to caching entire file");
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
continue;
}
// Package without `[[package]].source` and `[[package]].checksum`
// are the one with `path = "..."` to crates within the workspace.
const packages = (parsed.package as any[]).filter((p: any) => "source" in p || "checksum" in p);
hasher.update(JSON.stringify(packages));
parsedKeyFiles.push(cargo_lock);
} catch (e) {
// Fallback to caching them as regular file
core.warning(`Error parsing Cargo.lock manifest, fallback to caching entire file: ${e}`);
keyFiles.push(cargo_lock);
}
}
}
keyFiles = sort_and_uniq(keyFiles);
keyFiles = sort_and_uniq(keyFiles);
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
for (const file of keyFiles) {
for await (const chunk of fs.createReadStream(file)) {
hasher.update(chunk);
}
}
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
let lockHash = digest(hasher);
key += `-${lockHash}`;
}
let lockHash = digest(hasher);
keyFiles.push(...parsedKeyFiles);
self.keyFiles = sort_and_uniq(keyFiles);
key += `-${lockHash}`;
self.cacheKey = key;
self.cachePaths = [path.join(CARGO_HOME, "registry"), path.join(CARGO_HOME, "git")];