3
0
Fork 0
mirror of https://code.forgejo.org/actions/cache.git synced 2025-04-11 06:13:35 +00:00
This commit is contained in:
Gregorio Litenstein 2025-02-12 23:41:23 +00:00 committed by GitHub
commit 755333eb16
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
82 changed files with 239791 additions and 162634 deletions

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/@azure/core-xml.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/@fastify/busboy.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/action.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/auth-action.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/auth-token.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/core.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/endpoint.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/graphql.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/@octokit/request.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/types-8.2.1.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/@octokit/types-9.3.2.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/agent-base-6.0.2.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/agent-base-7.1.3.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/debug.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/deprecation.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/fast-xml-parser.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/is-plain-object.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/ms.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/safe-buffer.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/strnum.dep.yml generated Normal file

Binary file not shown.

Binary file not shown.

Binary file not shown.

BIN
.licenses/npm/undici.dep.yml generated Normal file

Binary file not shown.

BIN
.licenses/npm/universal-user-agent.dep.yml generated Normal file

Binary file not shown.

View file

@ -38,6 +38,7 @@ Read more about the change & access the migration guide: [reference to the annou
### v3
* Added a workaround to allow updating/refreshing existing caches, via the `refresh-cache` option and requiring a valid Github API token.
* Integrated with the new cache service (v2) APIs.
* Added support for caching in GHES 3.5+.
* Fixed download issue for files > 2GB during restore.
@ -76,10 +77,12 @@ If you are using a `self-hosted` Windows runner, `GNU tar` and `zstd` are requir
* `enableCrossOsArchive` - An optional boolean when enabled, allows Windows runners to save or restore caches that can be restored or saved respectively on other platforms. Default: `false`
* `fail-on-cache-miss` - Fail the workflow if cache entry is not found. Default: `false`
* `lookup-only` - If true, only checks if cache entry exists and skips download. Does not change save cache behavior. Default: `false`
* `refresh-cache` - An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false
#### Environment Variables
* `SEGMENT_DOWNLOAD_TIMEOUT_MINS` - Segment download timeout (in minutes, default `10`) to abort download of the segment if not completed in the defined number of minutes. [Read more](https://github.com/actions/cache/blob/main/tips-and-workarounds.md#cache-segment-restore-timeout)
* `GITHUB_TOKEN` - A Github API token, required for authenticating to the API when the `refresh-cache` option is enabled.
### Outputs

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, RefKey } from "../src/constants";
import * as actionUtils from "../src/utils/actionUtils";
@ -12,9 +13,13 @@ let pristineEnv: NodeJS.ProcessEnv;
beforeAll(() => {
pristineEnv = process.env;
nock.disableNetConnect();
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
testUtils.mockServer.listen({
onUnhandledRequest: "warn"
});
});
beforeEach(() => {
@ -22,10 +27,15 @@ beforeEach(() => {
process.env = pristineEnv;
delete process.env[Events.Key];
delete process.env[RefKey];
delete process.env["GITHUB_REPOSITORY"];
delete process.env["GITHUB_TOKEN"];
delete process.env["GITHUB_ACTION"];
});
afterAll(() => {
process.env = pristineEnv;
testUtils.mockServer.close();
nock.enableNetConnect();
});
test("isGhes returns true if server url is not github.com", () => {
@ -203,6 +213,133 @@ test("getInputAsBool throws if required and value missing", () => {
).toThrowError();
});
test("deleteCacheByKey produces 'HttpError: 404' when cache is not found.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.failureCacheKey,
"owner",
"repo"
);
expect(logWarningMock).toHaveBeenCalledWith(
expect.stringMatching(/404: Not Found/i)
);
expect(response).toBe(undefined);
});
test("deleteCacheByKey does not delete anything if it finds more than one entry for the given key.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "";
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.failureCacheKey,
"owner",
"repo"
);
expect(logWarningMock).toHaveBeenCalledWith(
`More than one cache entry found for key ${testUtils.failureCacheKey}`
);
expect(response).toBe(undefined);
});
test("deleteCacheByKey does not delete anything if the key matches a cache belonging to another ref.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.wrongRefCacheKey,
"owner",
"repo"
);
expect(logWarningMock).toHaveBeenCalledWith(
`No cache entries for key ${testUtils.wrongRefCacheKey} belong to gitref ${process.env[RefKey]}.`
);
expect(response).toBe(undefined);
});
test("deleteCacheByKey produces 'HttpError: 404' when cache is not found.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.failureCacheKey,
"owner",
"repo"
);
expect(logWarningMock).toHaveBeenCalledWith(
expect.stringMatching(/404: Not Found/i)
);
expect(response).toBe(undefined);
});
test("deleteCacheByKey produces 'HttpError: 401' on an invalid non-mocked request.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
await nock.enableNetConnect();
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.passThroughCacheKey,
"owner",
"repo"
);
expect(logWarningMock).toHaveBeenCalledWith(
expect.stringMatching(/401: Bad Credentials/i)
);
expect(response).toBe(undefined);
nock.disableNetConnect();
});
test("deleteCacheByKey returns 204 / No Content when successful.", async () => {
const event = Events.Push;
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
process.env[Events.Key] = event;
process.env[RefKey] = "ref/heads/feature";
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const response = await actionUtils.deleteCacheByKey(
testUtils.successCacheKey,
"owner",
"repo"
);
expect(response).toBe(204);
expect(logWarningMock).toHaveBeenCalledTimes(0);
});
test("isCacheFeatureAvailable for ac enabled", () => {
jest.spyOn(cache, "isFeatureAvailable").mockImplementation(() => true);

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, RefKey } from "../src/constants";
import { restoreRun } from "../src/restoreImpl";
@ -9,6 +10,7 @@ import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
@ -53,6 +55,10 @@ afterEach(() => {
delete process.env[RefKey];
});
afterAll(() => {
nock.enableNetConnect();
});
test("restore with no cache found", async () => {
const path = "node_modules";
const key = "node-test";

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, Inputs, RefKey } from "../src/constants";
import { restoreImpl } from "../src/restoreImpl";
@ -10,6 +11,7 @@ import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
@ -54,6 +56,10 @@ afterEach(() => {
delete process.env[RefKey];
});
afterAll(() => {
nock.enableNetConnect();
});
test("restore with invalid event outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, RefKey } from "../src/constants";
import { restoreOnlyRun } from "../src/restoreImpl";
@ -9,6 +10,7 @@ import * as testUtils from "../src/utils/testUtils";
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation(
(key, cacheResult) => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
@ -54,6 +56,10 @@ afterEach(() => {
delete process.env[RefKey];
});
afterAll(() => {
nock.enableNetConnect();
});
test("restore with no cache found", async () => {
const path = "node_modules";
const key = "node-test";

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, Inputs, RefKey } from "../src/constants";
import { saveRun } from "../src/saveImpl";
@ -11,6 +12,7 @@ jest.mock("@actions/cache");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
@ -73,10 +75,14 @@ afterEach(() => {
delete process.env[RefKey];
});
afterAll(() => {
nock.enableNetConnect();
});
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")

View file

@ -1,9 +1,10 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, Inputs, RefKey } from "../src/constants";
import { saveImpl } from "../src/saveImpl";
import { StateProvider } from "../src/stateProvider";
import { NullStateProvider, StateProvider } from "../src/stateProvider";
import * as actionUtils from "../src/utils/actionUtils";
import * as testUtils from "../src/utils/testUtils";
@ -12,6 +13,19 @@ jest.mock("@actions/cache");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
testUtils.mockServer.listen({
onUnhandledRequest: "warn"
});
jest.spyOn(actionUtils, "deleteCacheByKey").mockImplementation(
(key: string, owner: string, repo: string) => {
return jest
.requireActual("../src/utils/actionUtils")
.deleteCacheByKey(key, owner, repo);
}
);
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
@ -52,6 +66,14 @@ beforeAll(() => {
const actualUtils = jest.requireActual("../src/utils/actionUtils");
return actualUtils.isValidEvent();
});
jest.spyOn(actionUtils, "logWarning").mockImplementation(
(message: string) => {
return jest
.requireActual("../src/utils/actionUtils")
.logWarning(message);
}
);
});
beforeEach(() => {
@ -69,6 +91,13 @@ afterEach(() => {
testUtils.clearInputs();
delete process.env[Events.Key];
delete process.env[RefKey];
delete process.env["GITHUB_TOKEN"];
delete process.env["GITHUB_REPOSITORY"];
});
afterAll(() => {
testUtils.mockServer.close();
nock.enableNetConnect();
});
test("save with invalid event outputs warning", async () => {
@ -88,7 +117,7 @@ test("save with no primary key in state outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const savedCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = testUtils.successCacheKey;
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
@ -137,7 +166,7 @@ test("save on GHES with AC available", async () => {
jest.spyOn(actionUtils, "isGhes").mockImplementation(() => true);
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -179,8 +208,10 @@ test("save on GHES with AC available", async () => {
test("save with exact match returns early", async () => {
const infoMock = jest.spyOn(core, "info");
const failedMock = jest.spyOn(core, "setFailed");
testUtils.setInput(Inputs.RefreshCache, "false");
const primaryKey = testUtils.successCacheKey;
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const savedCacheKey = primaryKey;
jest.spyOn(core, "getState")
@ -207,7 +238,7 @@ test("save with missing input outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -235,7 +266,7 @@ test("save with large cache outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -280,7 +311,7 @@ test("save with reserve cache failure outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -327,7 +358,7 @@ test("save with server error outputs warning", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -368,7 +399,7 @@ test("save with server error outputs warning", async () => {
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = "Linux-node-";
jest.spyOn(core, "getState")
@ -406,3 +437,179 @@ test("save with valid inputs uploads a cache", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with cache hit and refresh-cache will try to delete and re-create entry", async () => {
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = primaryKey;
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
})
.mockImplementationOnce(() => {
return primaryKey;
});
const inputPath = "node_modules";
testUtils.setInput(Inputs.RefreshCache, "true");
testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
const cacheId = 4;
const saveCacheMock = jest
.spyOn(cache, "saveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
await saveImpl(new StateProvider());
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
{
uploadChunkSize: 4000000
},
false
);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledTimes(3);
expect(infoMock).toHaveBeenNthCalledWith(
1,
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
);
expect(infoMock).toHaveBeenNthCalledWith(
2,
expect.stringMatching(
new RegExp(
`Succesfully deleted cache with key: ${primaryKey}, id: \\d+`
)
)
);
expect(infoMock).toHaveBeenNthCalledWith(
3,
`Cache saved with key: ${primaryKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("Granular save will use lookup to determine if cache needs to be updated or (not) saved.", async () => {
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
const infoMock = jest.spyOn(core, "info");
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = testUtils.successCacheKey;
const inputPath = "node_modules";
testUtils.setInput(Inputs.Key, primaryKey);
testUtils.setInput(Inputs.RefreshCache, "true");
testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
const restoreCacheMock = jest
.spyOn(cache, "restoreCache")
.mockImplementation(() => {
return Promise.resolve(primaryKey);
});
const cacheId = 4;
const saveCacheMock = jest
.spyOn(cache, "saveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
await saveImpl(new NullStateProvider());
expect(restoreCacheMock).toHaveBeenCalledTimes(1);
expect(restoreCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
[],
{
lookupOnly: true
},
false
);
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
{
uploadChunkSize: 4000000
},
false
);
expect(logWarningMock).toHaveBeenCalledTimes(0);
expect(infoMock).toHaveBeenCalledTimes(3);
expect(infoMock).toHaveBeenNthCalledWith(
1,
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
);
expect(infoMock).toHaveBeenNthCalledWith(
2,
expect.stringMatching(
new RegExp(
`Succesfully deleted cache with key: ${primaryKey}, id: \\d+`
)
)
);
expect(infoMock).toHaveBeenNthCalledWith(
3,
`Cache saved with key: ${primaryKey}`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save with cache hit and refresh-cache will throw a warning if there's no GITHUB_TOKEN", async () => {
const logWarningMock = jest.spyOn(actionUtils, "logWarning");
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = testUtils.successCacheKey;
const savedCacheKey = primaryKey;
const inputPath = "node_modules";
testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.RefreshCache, "true");
jest.spyOn(core, "getState")
// Cache Entry State
.mockImplementationOnce(() => {
return savedCacheKey;
})
// Cache Key State
.mockImplementationOnce(() => {
return primaryKey;
});
const saveCacheMock = jest.spyOn(cache, "saveCache");
await saveImpl(new StateProvider());
expect(saveCacheMock).toHaveBeenCalledTimes(0);
expect(logWarningMock).toHaveBeenCalledWith(
`Can't refresh cache, either the repository info or a valid token are missing.`
);
expect(failedMock).toHaveBeenCalledTimes(0);
});

View file

@ -1,5 +1,6 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import nock from "nock";
import { Events, Inputs, RefKey } from "../src/constants";
import { saveOnlyRun } from "../src/saveImpl";
@ -11,6 +12,7 @@ jest.mock("@actions/cache");
jest.mock("../src/utils/actionUtils");
beforeAll(() => {
nock.disableNetConnect();
jest.spyOn(core, "getInput").mockImplementation((name, options) => {
return jest.requireActual("@actions/core").getInput(name, options);
});
@ -73,6 +75,10 @@ afterEach(() => {
delete process.env[RefKey];
});
afterAll(() => {
nock.enableNetConnect();
});
test("save with valid inputs uploads a cache", async () => {
const failedMock = jest.spyOn(core, "setFailed");
@ -105,6 +111,45 @@ test("save with valid inputs uploads a cache", async () => {
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("Granular save with refreshCache is able to save cache", async () => {
process.env["GITHUB_REPOSITORY"] = "owner/repo";
process.env["GITHUB_TOKEN"] =
"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";
process.env["GITHUB_ACTION"] = "__owner___run-repo";
const failedMock = jest.spyOn(core, "setFailed");
const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
const inputPath = "node_modules";
process.env.CACHE_RESTORE_ONLY_MATCHED_KEY = primaryKey;
testUtils.setInput(Inputs.Key, primaryKey);
testUtils.setInput(Inputs.RefreshCache, "true");
testUtils.setInput(Inputs.Path, inputPath);
testUtils.setInput(Inputs.UploadChunkSize, "4000000");
const cacheId = 4;
const saveCacheMock = jest
.spyOn(cache, "saveCache")
.mockImplementationOnce(() => {
return Promise.resolve(cacheId);
});
await saveOnlyRun();
expect(saveCacheMock).toHaveBeenCalledTimes(1);
expect(saveCacheMock).toHaveBeenCalledWith(
[inputPath],
primaryKey,
{
uploadChunkSize: 4000000
},
false
);
expect(failedMock).toHaveBeenCalledTimes(0);
});
test("save failing logs the warning message", async () => {
const warningMock = jest.spyOn(core, "warning");

View file

@ -34,6 +34,10 @@ inputs:
save-always does not work as intended and will be removed in a future release.
A separate `actions/cache/restore` step should be used instead.
See https://github.com/actions/cache/tree/main/save#always-save-cache for more details.
refresh-cache:
description: 'An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false'
required: false
default: 'false'
outputs:
cache-hit:
description: 'A boolean value to indicate an exact match was found for the primary key'

File diff suppressed because one or more lines are too long

96188
dist/restore/index.js vendored

File diff suppressed because one or more lines are too long

96223
dist/save-only/index.js vendored

File diff suppressed because one or more lines are too long

96223
dist/save/index.js vendored

File diff suppressed because one or more lines are too long

View file

@ -1,5 +1,3 @@
require("nock").disableNetConnect();
module.exports = {
clearMocks: true,
moduleFileExtensions: ["js", "ts"],

16934
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -26,12 +26,13 @@
"@actions/cache": "^4.0.0",
"@actions/core": "^1.11.1",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.3"
"@actions/io": "^1.1.2",
"@octokit/action": "^4.0.10"
},
"devDependencies": {
"@types/jest": "^27.5.2",
"@types/nock": "^11.1.0",
"@types/node": "^16.18.3",
"@types/node": "^20.14.8",
"@typescript-eslint/eslint-plugin": "^5.45.0",
"@typescript-eslint/parser": "^5.45.0",
"@vercel/ncc": "^0.38.3",
@ -43,9 +44,10 @@
"eslint-plugin-simple-import-sort": "^7.0.0",
"jest": "^28.1.3",
"jest-circus": "^27.5.1",
"msw": "^1.3.5",
"nock": "^13.2.9",
"prettier": "^2.8.0",
"ts-jest": "^28.0.8",
"typescript": "^4.9.3"
"typescript": "^4.9.5"
}
}

View file

@ -9,6 +9,11 @@ The save action saves a cache. It works similarly to the `cache` action except t
* `key` - An explicit key for a cache entry. See [creating a cache key](../README.md#creating-a-cache-key).
* `path` - A list of files, directories, and wildcard patterns to cache. See [`@actions/glob`](https://github.com/actions/toolkit/tree/main/packages/glob) for supported patterns.
* `upload-chunk-size` - The chunk size used to split up large files during upload, in bytes
* `refresh-cache` - An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false
#### Environment Variables
* `GITHUB_TOKEN` - A Github API token, required for authenticating to the API when the `refresh-cache` option is enabled.
### Outputs

View file

@ -15,6 +15,10 @@ inputs:
description: 'An optional boolean when enabled, allows windows runners to save caches that can be restored on other platforms'
default: 'false'
required: false
refresh-cache:
description: 'An optional boolean, when enabled it will result in a matched key being deleted after being restored, allowing it to be reused with refreshed/updated content. Default: false'
required: false
default: 'false'
runs:
using: 'node20'
main: '../dist/save-only/index.js'

View file

@ -5,7 +5,8 @@ export enum Inputs {
UploadChunkSize = "upload-chunk-size", // Input for cache, save action
EnableCrossOsArchive = "enableCrossOsArchive", // Input for cache, restore, save action
FailOnCacheMiss = "fail-on-cache-miss", // Input for cache, restore action
LookupOnly = "lookup-only" // Input for cache, restore action
LookupOnly = "lookup-only", // Input for cache, restore action
RefreshCache = "refresh-cache" // Input for cache, save action
}
export enum Outputs {

View file

@ -82,7 +82,6 @@ export async function restoreImpl(
} else {
core.info(`Cache restored from key: ${cacheKey}`);
}
return cacheKey;
} catch (error: unknown) {
core.setFailed((error as Error).message);

View file

@ -43,15 +43,55 @@ export async function saveImpl(
return;
}
// If matched restore key is same as primary key, then do not save cache
// NO-OP in case of SaveOnly action
const restoredKey = stateProvider.getCacheState();
const refreshCache: boolean = utils.getInputAsBool(
Inputs.RefreshCache,
{ required: false }
);
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
core.info(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
// If matched restore key is same as primary key, either try to refresh the cache, or just notify and do not save.
let restoredKey = stateProvider.getCacheState();
if (refreshCache && !restoredKey) {
// If getCacheState didn't give us a key, we're likely using granular actions. Do a lookup to see if we need to refresh or just do a regular save.
const cachePaths = utils.getInputAsArray(Inputs.Path, {
required: true
});
const enableCrossOsArchive = utils.getInputAsBool(
Inputs.EnableCrossOsArchive
);
return;
restoredKey = await cache.restoreCache(
cachePaths,
primaryKey,
[],
{ lookupOnly: true },
enableCrossOsArchive
);
}
if (utils.isExactKeyMatch(primaryKey, restoredKey)) {
/* istanbul ignore next */
const { GITHUB_TOKEN, GITHUB_REPOSITORY } = process.env || null;
if (GITHUB_TOKEN && GITHUB_REPOSITORY && refreshCache === true) {
core.info(
`Cache hit occurred on the primary key ${primaryKey}, attempting to refresh the contents of the cache.`
);
const [_owner, _repo] = GITHUB_REPOSITORY.split(`/`);
if (_owner && _repo) {
await utils.deleteCacheByKey(primaryKey, _owner, _repo);
}
} else {
if (refreshCache === true) {
utils.logWarning(
`Can't refresh cache, either the repository info or a valid token are missing.`
);
return;
} else {
core.info(
`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`
);
return;
}
}
}
const cachePaths = utils.getInputAsArray(Inputs.Path, {

View file

@ -1,7 +1,10 @@
import * as cache from "@actions/cache";
import * as core from "@actions/core";
import { RequestError } from "@octokit/request-error"
import { OctokitResponse } from "@octokit/types"
import { RefKey } from "../constants";
const { Octokit } = require("@octokit/action");
export function isGhes(): boolean {
const ghUrl = new URL(
@ -30,6 +33,58 @@ export function logWarning(message: string): void {
core.info(`${warningPrefix}${message}`);
}
export async function deleteCacheByKey(key: string, owner: string, repo: string) : Promise <number | void> {
const octokit = new Octokit();
let response;
try {
const gitRef = process.env[RefKey];
let cacheEntry = await octokit.rest.actions.getActionsCacheList({
owner: owner,
repo: repo,
key: key,
ref: gitRef
});
const { data: {
total_count,
actions_caches
}
} = cacheEntry;
if (total_count !== 1 || total_count !== actions_caches.length) { // leave all find logic to the actual cache implementation. We just want to make sure we're returned a single element so we don't accidentally delete an entry that belongs to a different gitref.
if (total_count > 1) {
exports.logWarning(`More than one cache entry found for key ${key}`);
}
else if (total_count === 0 || actions_caches.length === 0) {
exports.logWarning(`No cache entries for key ${key} belong to gitref ${gitRef}.`);
}
// This situation is likely never actually going to come up.
// Istanbul is being dumb and I can't ignore this path.
else if (total_count !== actions_caches.length) {
exports.logWarning(`Reported cache entry matches for ${key} does not match length of 'actions_caches' array in API response.`);
}
core.info(`Skip trying to delete cache entry for key ${key}.`)
return;
}
let id = actions_caches[0].id;
response = await octokit.rest.actions.deleteActionsCacheById({
owner: owner,
repo: repo,
cache_id: id
});
if (response.status === 204) {
core.info(`Succesfully deleted cache with key: ${key}, id: ${id}`);
return 204;
}
} catch (e) {
if (e instanceof RequestError) {
let err = e as RequestError;
let errData = err.response?.data as any | undefined;
exports.logWarning(`Github API reported error: ${err.name} '${err.status}: ${errData?.message}'`);
}
core.info(`Couldn't delete cache entry for key ${key}.`)
return;
}
}
// Cache token authorized for all events that are tied to a ref
// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context
export function isValidEvent(): boolean {

View file

@ -1,4 +1,16 @@
/* istanbul ignore file */
import { Inputs } from "../constants";
import { rest } from "msw";
import { setupServer } from "msw/node";
import nock from "nock";
export const successCacheKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43";
export const wrongRefCacheKey = "Linux-latest-node-bb828da54c148048dd17899ba9fda624811cfb43";
export const failureCacheKey = "Windows-node-bb828da54c148048dd17899ba9fda624811cfb43";
export const passThroughCacheKey = "macOS-node-bb828da54c148048dd17899ba9fda624811cfb43";
const successCacheId = 1337;
const failureCacheId = 69;
// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67
function getInputName(name: string): string {
@ -16,6 +28,7 @@ interface CacheInput {
enableCrossOsArchive?: boolean;
failOnCacheMiss?: boolean;
lookupOnly?: boolean;
refreshCache?: boolean;
}
export function setInputs(input: CacheInput): void {
@ -32,6 +45,8 @@ export function setInputs(input: CacheInput): void {
setInput(Inputs.FailOnCacheMiss, input.failOnCacheMiss.toString());
input.lookupOnly !== undefined &&
setInput(Inputs.LookupOnly, input.lookupOnly.toString());
input.refreshCache !== undefined &&
setInput(Inputs.RefreshCache, input.refreshCache.toString());
}
export function clearInputs(): void {
@ -42,4 +57,83 @@ export function clearInputs(): void {
delete process.env[getInputName(Inputs.EnableCrossOsArchive)];
delete process.env[getInputName(Inputs.FailOnCacheMiss)];
delete process.env[getInputName(Inputs.LookupOnly)];
delete process.env[getInputName(Inputs.RefreshCache)];
}
export const mockServer = setupServer(
rest.delete('https://api.github.com/repos/owner/repo/actions/caches/', (req, res, ctx) => {
return res(ctx.status(422),
ctx.json({
message: "Invalid request.\n\nMissing required query parameter key",
documentation_url: "https://docs.github.com/rest/actions/cache#delete-github-actions-caches-for-a-repository-using-a-cache-key",
})
)
}),
rest.delete('https://api.github.com/repos/owner/repo/actions/caches/:id', (req, res, ctx) => {
const { id } = req.params;
if (parseInt(id as string) === failureCacheId) {
return res(ctx.status(404),
ctx.json({
message: "Not Found",
documentation_url: "https://docs.github.com/rest/actions/cache#delete-a-github-actions-cache-for-a-repository-using-a-cache-id"
}));
}
return res(ctx.status(204));
}),
// This endpoint always returns 200/OK, what we're checking here is whether we can get a unique cache ID, to avoid deleting the wrong entry.
rest.get('https://api.github.com/repos/owner/repo/actions/caches', (req, res, ctx) => {
let key : string = req.url?.searchParams?.get('key') || '';
let ref : string = req.url?.searchParams?.get('ref') || '';
if (key === '' || ref === '') {
return res(ctx.status(200),
ctx.json({
total_count: 2,
actions_caches: [{
id: 15,
ref: "refs/heads/main",
key: failureCacheKey,
version: "73885106f58cc52a7df9ec4d4a5622a5614813162cb516c759a30af6bf56e6f0",
last_accessed_at: "2022-12-29T22:06:42.683333300Z",
created_at: "2022-12-29T22:06:42.683333300Z",
size_in_bytes: 6057793
},
{
id: 16,
ref: "refs/heads/another-feature-branch",
key: failureCacheKey,
version: "73885106f58cc52a7df9ec4d4a5622a5614813162cb516c759a30af6bf56e6f0",
last_accessed_at: "2022-12-29T22:06:42.683333300Z",
created_at: "2022-12-29T22:06:42.683333300Z",
size_in_bytes: 6057793
}]
})
);
}
// This is the behavior seen when search doesn't find anything, but it is seen both when no key matches, as well as when the key matches but the entry belongs to another (likely the base) branch.
else if (key === wrongRefCacheKey) {
return res(ctx.status(200),
ctx.json({
total_count: 0,
actions_caches: []
})
);
}
else if (key === successCacheKey || key === failureCacheKey) {
return res(ctx.status(200),
ctx.json({
total_count: 1,
actions_caches: [{
id: (key === successCacheKey ? successCacheId : failureCacheId),
ref: ref,
key: key,
version: "93a0f912fdb70083e929c1bf564bca2050be1c4e0932f7f9e78465ddcfbcc8f6",
last_accessed_at: "2022-12-29T22:06:42.683333300Z",
created_at: "2022-12-29T22:06:42.683333300Z",
size_in_bytes: 6057793
}]
})
);
}
return req.passthrough();
})
);

View file

@ -21,7 +21,8 @@ A cache today is immutable and cannot be updated. But some use cases require the
```
Please note that this will create a new cache on every run and hence will consume the cache [quota](./README.md#cache-limits).
* As a way to get around this limitation, the `refresh-cache` option exists. The way this works is, after matching a key and restoring a cache, it makes a request directly to the Github API and deletes the contents of that cache entry, then leaving said key free to be updated.
## Use cache across feature branches
Reusing cache across feature branches is not allowed today to provide cache [isolation](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache). However if both feature branches are from the default branch, a good way to achieve this is to ensure that the default branch has a cache. This cache will then be consumable by both feature branches.