mirror of
https://github.com/Swatinem/rust-cache
synced 2025-06-25 09:43:43 +00:00
configure blacksmith to be the default over github
This commit is contained in:
parent
e5591783d4
commit
50188d7e37
6 changed files with 304 additions and 145 deletions
|
@ -70,8 +70,8 @@ sensible defaults.
|
||||||
save-if: ${{ github.ref == 'refs/heads/master' }}
|
save-if: ${{ github.ref == 'refs/heads/master' }}
|
||||||
|
|
||||||
# Specifies what to use as the backend providing cache
|
# Specifies what to use as the backend providing cache
|
||||||
# Can be set to either "github" or "buildjet"
|
# Can be set to either "github" or "blacksmith"
|
||||||
# default: "github"
|
# default: "blacksmith"
|
||||||
cache-provider: ""
|
cache-provider: ""
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -37,9 +37,9 @@ inputs:
|
||||||
required: false
|
required: false
|
||||||
default: "true"
|
default: "true"
|
||||||
cache-provider:
|
cache-provider:
|
||||||
description: "Determines which provider to use for caching. Options are github or buildjet, defaults to github."
|
description: "Determines which provider to use for caching. Options are github or blacksmith, defaults to blacksmith."
|
||||||
required: false
|
required: false
|
||||||
default: "github"
|
default: "blacksmith"
|
||||||
outputs:
|
outputs:
|
||||||
cache-hit:
|
cache-hit:
|
||||||
description: "A boolean value that indicates an exact match was found."
|
description: "A boolean value that indicates an exact match was found."
|
||||||
|
|
184
dist/restore/index.js
vendored
184
dist/restore/index.js
vendored
|
@ -169,7 +169,7 @@ exports.restoreCache = restoreCache;
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
|
@ -189,12 +189,12 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
const fileSizeLimit = 25 * 1024 * 1024 * 1024; // 25GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 25GB limit, not saving cache.`);
|
||||||
}
|
}
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
|
@ -212,7 +212,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
||||||
}
|
}
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, (_g = (_f = reserveCacheResponse.result) === null || _f === void 0 ? void 0 : _f.uploadUrls) !== null && _g !== void 0 ? _g : [], (_j = (_h = reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.uploadId) !== null && _j !== void 0 ? _j : '');
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
|
@ -221,6 +221,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
else if (typedError.name === ReserveCacheError.name) {
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
core.info(`Failed to save: ${typedError.message}`);
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
core.debug(JSON.stringify(error));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
@ -294,29 +295,32 @@ const options_1 = __nccwpck_require__(5006);
|
||||||
const requestUtils_1 = __nccwpck_require__(5197);
|
const requestUtils_1 = __nccwpck_require__(5197);
|
||||||
const versionSalt = '1.0';
|
const versionSalt = '1.0';
|
||||||
function getCacheApiUrl(resource) {
|
function getCacheApiUrl(resource) {
|
||||||
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
const baseUrl = process.env['BLACKSMITH_CACHE_URL'] || 'https://api.blacksmith.sh/cache';
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||||
}
|
}
|
||||||
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
const url = `${baseUrl}/${resource}`;
|
||||||
core.debug(`Resource Url: ${url}`);
|
core.debug(`Blacksmith cache resource URL: ${url}; version: 3.2.40`);
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
function createAcceptHeader(type, apiVersion) {
|
function createAcceptHeader(type, apiVersion) {
|
||||||
return `${type};api-version=${apiVersion}`;
|
return `${type};api-version=${apiVersion}`;
|
||||||
}
|
}
|
||||||
function getRequestOptions() {
|
function getRequestOptions() {
|
||||||
|
core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`);
|
||||||
const requestOptions = {
|
const requestOptions = {
|
||||||
headers: {
|
headers: {
|
||||||
Accept: createAcceptHeader('application/json', '6.0-preview.1')
|
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
|
||||||
|
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME']
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return requestOptions;
|
return requestOptions;
|
||||||
}
|
}
|
||||||
function createHttpClient() {
|
function createHttpClient() {
|
||||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
|
const token = process.env['BLACKSMITH_CACHE_TOKEN'];
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
core.debug(`BLACKSMITH_CACHE_TOKEN: ${token}`);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : '');
|
||||||
|
return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths;
|
const components = paths;
|
||||||
|
@ -338,7 +342,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
@ -429,41 +433,47 @@ function getContentRange(start, end) {
|
||||||
// Content-Range: bytes 0-199/*
|
// Content-Range: bytes 0-199/*
|
||||||
return `bytes ${start}-${end}/*`;
|
return `bytes ${start}-${end}/*`;
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Length': end - start + 1
|
||||||
};
|
};
|
||||||
|
const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache');
|
||||||
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
|
return s3HttpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders);
|
||||||
}));
|
}));
|
||||||
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
||||||
|
core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`);
|
||||||
|
core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`);
|
||||||
|
core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`);
|
||||||
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
||||||
}
|
}
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
return uploadChunkResponse.message.headers.etag;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
function uploadFile(archivePath, urls) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
|
core.debug(`archivePath: ${archivePath}`);
|
||||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
|
||||||
const fd = fs.openSync(archivePath, 'r');
|
const fd = fs.openSync(archivePath, 'r');
|
||||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service.
|
||||||
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
|
|
||||||
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
|
|
||||||
const parallelUploads = [...new Array(concurrency).keys()];
|
|
||||||
core.debug('Awaiting all uploads');
|
core.debug('Awaiting all uploads');
|
||||||
let offset = 0;
|
let eTags = [];
|
||||||
try {
|
try {
|
||||||
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
|
eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () {
|
||||||
while (offset < fileSize) {
|
const offset = index * maxChunkSize;
|
||||||
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
|
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
let end = offset + chunkSize - 1;
|
||||||
offset += maxChunkSize;
|
if (chunkSize !== maxChunkSize) {
|
||||||
yield uploadChunk(httpClient, resourceUrl, () => fs
|
end = fileSize - 1;
|
||||||
|
}
|
||||||
|
core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`);
|
||||||
|
const eTag = yield uploadChunk(url, () => fs
|
||||||
.createReadStream(archivePath, {
|
.createReadStream(archivePath, {
|
||||||
fd,
|
fd,
|
||||||
start,
|
start,
|
||||||
|
@ -473,33 +483,42 @@ function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||||
.on('error', error => {
|
.on('error', error => {
|
||||||
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
|
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
|
||||||
}), start, end);
|
}), start, end);
|
||||||
}
|
core.debug(`Upload to ${url} complete`);
|
||||||
|
return eTag !== null && eTag !== void 0 ? eTag : '';
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Cache upload failed: ${JSON.stringify(error)}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
}
|
}
|
||||||
return;
|
return eTags;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function commitCache(httpClient, cacheId, filesize) {
|
function commitCache(httpClient, cacheId, filesize, eTags, uploadId) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commitCacheRequest = { size: filesize };
|
const commitCacheRequest = {
|
||||||
|
size: filesize,
|
||||||
|
eTags,
|
||||||
|
uploadId
|
||||||
|
};
|
||||||
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath, options) {
|
function saveCache(cacheId, archivePath, urls, uploadId) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
core.debug('Upload cache');
|
core.debug('Upload cache');
|
||||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
const eTags = yield uploadFile(archivePath, urls);
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache');
|
core.debug('Commiting cache');
|
||||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId);
|
||||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||||
}
|
}
|
||||||
|
@ -807,7 +826,6 @@ const buffer = __importStar(__nccwpck_require__(4300));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
const utils = __importStar(__nccwpck_require__(1492));
|
|
||||||
const constants_1 = __nccwpck_require__(2370);
|
const constants_1 = __nccwpck_require__(2370);
|
||||||
const requestUtils_1 = __nccwpck_require__(5197);
|
const requestUtils_1 = __nccwpck_require__(5197);
|
||||||
const abort_controller_1 = __nccwpck_require__(978);
|
const abort_controller_1 = __nccwpck_require__(978);
|
||||||
|
@ -817,10 +835,19 @@ const abort_controller_1 = __nccwpck_require__(978);
|
||||||
* @param response the HTTP response
|
* @param response the HTTP response
|
||||||
* @param output the writable stream
|
* @param output the writable stream
|
||||||
*/
|
*/
|
||||||
function pipeResponseToStream(response, output) {
|
function pipeResponseToStream(response, output, progress) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const pipeline = util.promisify(stream.pipeline);
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
yield pipeline(response.message, output);
|
const reportProgress = new stream.Transform({
|
||||||
|
transform(chunk, _encoding, callback) {
|
||||||
|
if (progress) {
|
||||||
|
progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length);
|
||||||
|
}
|
||||||
|
this.push(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
yield pipeline(response.message, reportProgress, output);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -932,26 +959,67 @@ exports.DownloadProgress = DownloadProgress;
|
||||||
*/
|
*/
|
||||||
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const writeStream = fs.createWriteStream(archivePath);
|
const CONCURRENCY = 8;
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache');
|
const fdesc = yield fs.promises.open(archivePath, 'w+');
|
||||||
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
// Set file permissions so that other users can untar the cache
|
||||||
|
yield fdesc.chmod(0o644);
|
||||||
|
let progressLogger;
|
||||||
|
try {
|
||||||
|
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
|
||||||
|
const httpClient = new http_client_1.HttpClient('useblacksmith/cache');
|
||||||
|
const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return httpClient.get(archiveLocation, {
|
||||||
|
Range: 'bytes=0-1'
|
||||||
|
});
|
||||||
|
}));
|
||||||
// Abort download if no traffic received over the socket.
|
// Abort download if no traffic received over the socket.
|
||||||
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||||
downloadResponse.message.destroy();
|
metadataResponse.message.destroy();
|
||||||
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
||||||
});
|
});
|
||||||
yield pipeResponseToStream(downloadResponse, writeStream);
|
const contentRangeHeader = metadataResponse.message.headers['content-range'];
|
||||||
// Validate download size.
|
if (!contentRangeHeader) {
|
||||||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
throw new Error('Content-Range is not defined; unable to determine file size');
|
||||||
if (contentLengthHeader) {
|
|
||||||
const expectedLength = parseInt(contentLengthHeader);
|
|
||||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
|
||||||
if (actualLength !== expectedLength) {
|
|
||||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
|
||||||
}
|
}
|
||||||
|
// Parse the total file size from the Content-Range header
|
||||||
|
const fileSize = parseInt(contentRangeHeader.split('/')[1]);
|
||||||
|
if (isNaN(fileSize)) {
|
||||||
|
throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`);
|
||||||
}
|
}
|
||||||
else {
|
core.debug(`fileSize: ${fileSize}`);
|
||||||
core.debug('Unable to validate download, no Content-Length header');
|
// Truncate the file to the correct size
|
||||||
|
yield fdesc.truncate(fileSize);
|
||||||
|
yield fdesc.sync();
|
||||||
|
progressLogger = new DownloadProgress(fileSize);
|
||||||
|
progressLogger.startDisplayTimer();
|
||||||
|
// Divvy up the download into chunks based on CONCURRENCY
|
||||||
|
const chunkSize = Math.ceil(fileSize / CONCURRENCY);
|
||||||
|
const chunkRanges = [];
|
||||||
|
for (let i = 0; i < CONCURRENCY; i++) {
|
||||||
|
const start = i * chunkSize;
|
||||||
|
const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1;
|
||||||
|
chunkRanges.push(`bytes=${start}-${end}`);
|
||||||
|
}
|
||||||
|
const downloads = chunkRanges.map((range) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Downloading range: ${range}`);
|
||||||
|
const response = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return httpClient.get(archiveLocation, {
|
||||||
|
Range: range
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
const writeStream = fs.createWriteStream(archivePath, {
|
||||||
|
fd: fdesc.fd,
|
||||||
|
start: parseInt(range.split('=')[1].split('-')[0]),
|
||||||
|
autoClose: false
|
||||||
|
});
|
||||||
|
yield pipeResponseToStream(response, writeStream, progressLogger);
|
||||||
|
core.debug(`Finished downloading range: ${range}`);
|
||||||
|
}));
|
||||||
|
yield Promise.all(downloads);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
yield fdesc.close();
|
||||||
|
progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -966,6 +1034,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||||
|
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
|
@ -1265,6 +1334,7 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry
|
||||||
// If the error object contains the statusCode property, extract it and return
|
// If the error object contains the statusCode property, extract it and return
|
||||||
// an TypedResponse<T> so it can be processed by the retry logic.
|
// an TypedResponse<T> so it can be processed by the retry logic.
|
||||||
(error) => {
|
(error) => {
|
||||||
|
core.debug(`Error occurred during ${name}: ${JSON.stringify(error)}`);
|
||||||
if (error instanceof http_client_1.HttpClientError) {
|
if (error instanceof http_client_1.HttpClientError) {
|
||||||
return {
|
return {
|
||||||
statusCode: error.statusCode,
|
statusCode: error.statusCode,
|
||||||
|
@ -1632,7 +1702,7 @@ function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: false,
|
||||||
concurrentBlobDownloads: true,
|
concurrentBlobDownloads: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 10,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
lookupOnly: false
|
lookupOnly: false
|
||||||
|
@ -90407,7 +90477,13 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
||||||
}
|
}
|
||||||
function getCacheProvider() {
|
function getCacheProvider() {
|
||||||
const cacheProvider = lib_core.getInput("cache-provider");
|
const cacheProvider = lib_core.getInput("cache-provider");
|
||||||
const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : cacheProvider === "blacksmith" ? blacksmith_cache_lib_cache : undefined;
|
const cache = cacheProvider === "github"
|
||||||
|
? cache_lib_cache
|
||||||
|
: cacheProvider === "buildjet"
|
||||||
|
? lib_cache
|
||||||
|
: cacheProvider === "blacksmith"
|
||||||
|
? blacksmith_cache_lib_cache
|
||||||
|
: undefined;
|
||||||
if (!cache) {
|
if (!cache) {
|
||||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||||
}
|
}
|
||||||
|
|
184
dist/save/index.js
vendored
184
dist/save/index.js
vendored
|
@ -169,7 +169,7 @@ exports.restoreCache = restoreCache;
|
||||||
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
* @returns number returns cacheId if the cache was saved successfully and throws an error if save fails
|
||||||
*/
|
*/
|
||||||
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
var _a, _b, _c, _d, _e;
|
var _a, _b, _c, _d, _e, _f, _g, _h, _j;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
checkPaths(paths);
|
checkPaths(paths);
|
||||||
checkKey(key);
|
checkKey(key);
|
||||||
|
@ -189,12 +189,12 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
if (core.isDebug()) {
|
if (core.isDebug()) {
|
||||||
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
yield (0, tar_1.listTar)(archivePath, compressionMethod);
|
||||||
}
|
}
|
||||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
const fileSizeLimit = 25 * 1024 * 1024 * 1024; // 25GB per repo limit
|
||||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.debug(`File Size: ${archiveFileSize}`);
|
core.debug(`File Size: ${archiveFileSize}`);
|
||||||
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
// For GHES, this check will take place in ReserveCache API with enterprise file size limit
|
||||||
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
if (archiveFileSize > fileSizeLimit && !utils.isGhes()) {
|
||||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 25GB limit, not saving cache.`);
|
||||||
}
|
}
|
||||||
core.debug('Reserving Cache');
|
core.debug('Reserving Cache');
|
||||||
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
const reserveCacheResponse = yield cacheHttpClient.reserveCache(key, paths, {
|
||||||
|
@ -212,7 +212,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
throw new ReserveCacheError(`Unable to reserve cache with key ${key}, another job may be creating this cache. More details: ${(_e = reserveCacheResponse === null || reserveCacheResponse === void 0 ? void 0 : reserveCacheResponse.error) === null || _e === void 0 ? void 0 : _e.message}`);
|
||||||
}
|
}
|
||||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
yield cacheHttpClient.saveCache(cacheId, archivePath, (_g = (_f = reserveCacheResponse.result) === null || _f === void 0 ? void 0 : _f.uploadUrls) !== null && _g !== void 0 ? _g : [], (_j = (_h = reserveCacheResponse.result) === null || _h === void 0 ? void 0 : _h.uploadId) !== null && _j !== void 0 ? _j : '');
|
||||||
}
|
}
|
||||||
catch (error) {
|
catch (error) {
|
||||||
const typedError = error;
|
const typedError = error;
|
||||||
|
@ -221,6 +221,7 @@ function saveCache(paths, key, options, enableCrossOsArchive = false) {
|
||||||
}
|
}
|
||||||
else if (typedError.name === ReserveCacheError.name) {
|
else if (typedError.name === ReserveCacheError.name) {
|
||||||
core.info(`Failed to save: ${typedError.message}`);
|
core.info(`Failed to save: ${typedError.message}`);
|
||||||
|
core.debug(JSON.stringify(error));
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
core.warning(`Failed to save: ${typedError.message}`);
|
core.warning(`Failed to save: ${typedError.message}`);
|
||||||
|
@ -294,29 +295,32 @@ const options_1 = __nccwpck_require__(5006);
|
||||||
const requestUtils_1 = __nccwpck_require__(5197);
|
const requestUtils_1 = __nccwpck_require__(5197);
|
||||||
const versionSalt = '1.0';
|
const versionSalt = '1.0';
|
||||||
function getCacheApiUrl(resource) {
|
function getCacheApiUrl(resource) {
|
||||||
const baseUrl = process.env['ACTIONS_CACHE_URL'] || '';
|
const baseUrl = process.env['BLACKSMITH_CACHE_URL'] || 'https://api.blacksmith.sh/cache';
|
||||||
if (!baseUrl) {
|
if (!baseUrl) {
|
||||||
throw new Error('Cache Service Url not found, unable to restore cache.');
|
throw new Error('Cache Service Url not found, unable to restore cache.');
|
||||||
}
|
}
|
||||||
const url = `${baseUrl}_apis/artifactcache/${resource}`;
|
const url = `${baseUrl}/${resource}`;
|
||||||
core.debug(`Resource Url: ${url}`);
|
core.debug(`Blacksmith cache resource URL: ${url}; version: 3.2.40`);
|
||||||
return url;
|
return url;
|
||||||
}
|
}
|
||||||
function createAcceptHeader(type, apiVersion) {
|
function createAcceptHeader(type, apiVersion) {
|
||||||
return `${type};api-version=${apiVersion}`;
|
return `${type};api-version=${apiVersion}`;
|
||||||
}
|
}
|
||||||
function getRequestOptions() {
|
function getRequestOptions() {
|
||||||
|
core.debug(`Setting GITHUB_REPO_NAME: ${process.env['GITHUB_REPO_NAME']}`);
|
||||||
const requestOptions = {
|
const requestOptions = {
|
||||||
headers: {
|
headers: {
|
||||||
Accept: createAcceptHeader('application/json', '6.0-preview.1')
|
Accept: createAcceptHeader('application/json', '6.0-preview.1'),
|
||||||
|
'X-Github-Repo-Name': process.env['GITHUB_REPO_NAME']
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
return requestOptions;
|
return requestOptions;
|
||||||
}
|
}
|
||||||
function createHttpClient() {
|
function createHttpClient() {
|
||||||
const token = process.env['ACTIONS_RUNTIME_TOKEN'] || '';
|
const token = process.env['BLACKSMITH_CACHE_TOKEN'];
|
||||||
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token);
|
core.debug(`BLACKSMITH_CACHE_TOKEN: ${token}`);
|
||||||
return new http_client_1.HttpClient('actions/cache', [bearerCredentialHandler], getRequestOptions());
|
const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token !== null && token !== void 0 ? token : '');
|
||||||
|
return new http_client_1.HttpClient('useblacksmith/cache', [bearerCredentialHandler], getRequestOptions());
|
||||||
}
|
}
|
||||||
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) {
|
||||||
const components = paths;
|
const components = paths;
|
||||||
|
@ -338,7 +342,7 @@ function getCacheEntry(keys, paths, options) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
const version = getCacheVersion(paths, options === null || options === void 0 ? void 0 : options.compressionMethod, options === null || options === void 0 ? void 0 : options.enableCrossOsArchive);
|
||||||
const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
const resource = `?keys=${encodeURIComponent(keys.join(','))}&version=${version}`;
|
||||||
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
const response = yield (0, requestUtils_1.retryTypedResponse)('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); }));
|
||||||
// Cache not found
|
// Cache not found
|
||||||
if (response.statusCode === 204) {
|
if (response.statusCode === 204) {
|
||||||
|
@ -429,41 +433,47 @@ function getContentRange(start, end) {
|
||||||
// Content-Range: bytes 0-199/*
|
// Content-Range: bytes 0-199/*
|
||||||
return `bytes ${start}-${end}/*`;
|
return `bytes ${start}-${end}/*`;
|
||||||
}
|
}
|
||||||
function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
function uploadChunk(resourceUrl, openStream, start, end) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
|
||||||
const additionalHeaders = {
|
const additionalHeaders = {
|
||||||
'Content-Type': 'application/octet-stream',
|
'Content-Type': 'application/octet-stream',
|
||||||
'Content-Range': getContentRange(start, end)
|
'Content-Length': end - start + 1
|
||||||
};
|
};
|
||||||
|
const s3HttpClient = new http_client_1.HttpClient('useblacksmith/cache');
|
||||||
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
const uploadChunkResponse = yield (0, requestUtils_1.retryHttpClientResponse)(`uploadChunk (start: ${start}, end: ${end})`, () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return httpClient.sendStream('PATCH', resourceUrl, openStream(), additionalHeaders);
|
return s3HttpClient.sendStream('PUT', resourceUrl, openStream(), additionalHeaders);
|
||||||
}));
|
}));
|
||||||
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
if (!(0, requestUtils_1.isSuccessStatusCode)(uploadChunkResponse.message.statusCode)) {
|
||||||
|
core.debug(`Upload chunk failed with status message: ${JSON.stringify(uploadChunkResponse.message.statusMessage)}`);
|
||||||
|
core.debug(`Upload chunk failed with headers: ${JSON.stringify(uploadChunkResponse.message.headers)}`);
|
||||||
|
core.debug(`Upload chunk failed with response body: ${yield uploadChunkResponse.readBody()}`);
|
||||||
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
throw new Error(`Cache service responded with ${uploadChunkResponse.message.statusCode} during upload chunk.`);
|
||||||
}
|
}
|
||||||
|
// eslint-disable-next-line @typescript-eslint/no-non-null-assertion
|
||||||
|
return uploadChunkResponse.message.headers.etag;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
function uploadFile(archivePath, urls) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
// Upload Chunks
|
// Upload Chunks
|
||||||
|
core.debug(`archivePath: ${archivePath}`);
|
||||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
|
||||||
const fd = fs.openSync(archivePath, 'r');
|
const fd = fs.openSync(archivePath, 'r');
|
||||||
const uploadOptions = (0, options_1.getUploadOptions)(options);
|
const maxChunkSize = 25 * 1024 * 1024; // Matches the chunkSize in our cache service.
|
||||||
const concurrency = utils.assertDefined('uploadConcurrency', uploadOptions.uploadConcurrency);
|
|
||||||
const maxChunkSize = utils.assertDefined('uploadChunkSize', uploadOptions.uploadChunkSize);
|
|
||||||
const parallelUploads = [...new Array(concurrency).keys()];
|
|
||||||
core.debug('Awaiting all uploads');
|
core.debug('Awaiting all uploads');
|
||||||
let offset = 0;
|
let eTags = [];
|
||||||
try {
|
try {
|
||||||
yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () {
|
eTags = yield Promise.all(urls.map((url, index) => __awaiter(this, void 0, void 0, function* () {
|
||||||
while (offset < fileSize) {
|
const offset = index * maxChunkSize;
|
||||||
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
|
const chunkSize = Math.min(fileSize - offset, maxChunkSize);
|
||||||
const start = offset;
|
const start = offset;
|
||||||
const end = offset + chunkSize - 1;
|
let end = offset + chunkSize - 1;
|
||||||
offset += maxChunkSize;
|
if (chunkSize !== maxChunkSize) {
|
||||||
yield uploadChunk(httpClient, resourceUrl, () => fs
|
end = fileSize - 1;
|
||||||
|
}
|
||||||
|
core.debug(`Uploading chunk to ${url}: ${start}-${end}/${fileSize}`);
|
||||||
|
const eTag = yield uploadChunk(url, () => fs
|
||||||
.createReadStream(archivePath, {
|
.createReadStream(archivePath, {
|
||||||
fd,
|
fd,
|
||||||
start,
|
start,
|
||||||
|
@ -473,33 +483,42 @@ function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||||
.on('error', error => {
|
.on('error', error => {
|
||||||
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
|
throw new Error(`Cache upload failed because file read failed with ${error.message}`);
|
||||||
}), start, end);
|
}), start, end);
|
||||||
}
|
core.debug(`Upload to ${url} complete`);
|
||||||
|
return eTag !== null && eTag !== void 0 ? eTag : '';
|
||||||
})));
|
})));
|
||||||
}
|
}
|
||||||
|
catch (error) {
|
||||||
|
core.debug(`Cache upload failed: ${JSON.stringify(error)}`);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
finally {
|
finally {
|
||||||
fs.closeSync(fd);
|
fs.closeSync(fd);
|
||||||
}
|
}
|
||||||
return;
|
return eTags;
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function commitCache(httpClient, cacheId, filesize) {
|
function commitCache(httpClient, cacheId, filesize, eTags, uploadId) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const commitCacheRequest = { size: filesize };
|
const commitCacheRequest = {
|
||||||
|
size: filesize,
|
||||||
|
eTags,
|
||||||
|
uploadId
|
||||||
|
};
|
||||||
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
return yield (0, requestUtils_1.retryTypedResponse)('commitCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
return httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest);
|
||||||
}));
|
}));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
function saveCache(cacheId, archivePath, options) {
|
function saveCache(cacheId, archivePath, urls, uploadId) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const httpClient = createHttpClient();
|
const httpClient = createHttpClient();
|
||||||
core.debug('Upload cache');
|
core.debug('Upload cache');
|
||||||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
const eTags = yield uploadFile(archivePath, urls);
|
||||||
// Commit Cache
|
// Commit Cache
|
||||||
core.debug('Commiting cache');
|
core.debug('Commiting cache');
|
||||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize, eTags, uploadId);
|
||||||
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
if (!(0, requestUtils_1.isSuccessStatusCode)(commitCacheResponse.statusCode)) {
|
||||||
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`);
|
||||||
}
|
}
|
||||||
|
@ -807,7 +826,6 @@ const buffer = __importStar(__nccwpck_require__(4300));
|
||||||
const fs = __importStar(__nccwpck_require__(7147));
|
const fs = __importStar(__nccwpck_require__(7147));
|
||||||
const stream = __importStar(__nccwpck_require__(2781));
|
const stream = __importStar(__nccwpck_require__(2781));
|
||||||
const util = __importStar(__nccwpck_require__(3837));
|
const util = __importStar(__nccwpck_require__(3837));
|
||||||
const utils = __importStar(__nccwpck_require__(1492));
|
|
||||||
const constants_1 = __nccwpck_require__(2370);
|
const constants_1 = __nccwpck_require__(2370);
|
||||||
const requestUtils_1 = __nccwpck_require__(5197);
|
const requestUtils_1 = __nccwpck_require__(5197);
|
||||||
const abort_controller_1 = __nccwpck_require__(978);
|
const abort_controller_1 = __nccwpck_require__(978);
|
||||||
|
@ -817,10 +835,19 @@ const abort_controller_1 = __nccwpck_require__(978);
|
||||||
* @param response the HTTP response
|
* @param response the HTTP response
|
||||||
* @param output the writable stream
|
* @param output the writable stream
|
||||||
*/
|
*/
|
||||||
function pipeResponseToStream(response, output) {
|
function pipeResponseToStream(response, output, progress) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const pipeline = util.promisify(stream.pipeline);
|
const pipeline = util.promisify(stream.pipeline);
|
||||||
yield pipeline(response.message, output);
|
const reportProgress = new stream.Transform({
|
||||||
|
transform(chunk, _encoding, callback) {
|
||||||
|
if (progress) {
|
||||||
|
progress.setReceivedBytes(progress.getTransferredBytes() + chunk.length);
|
||||||
|
}
|
||||||
|
this.push(chunk);
|
||||||
|
callback();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
yield pipeline(response.message, reportProgress, output);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
/**
|
/**
|
||||||
|
@ -932,26 +959,67 @@ exports.DownloadProgress = DownloadProgress;
|
||||||
*/
|
*/
|
||||||
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
function downloadCacheHttpClient(archiveLocation, archivePath) {
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const writeStream = fs.createWriteStream(archivePath);
|
const CONCURRENCY = 8;
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache');
|
const fdesc = yield fs.promises.open(archivePath, 'w+');
|
||||||
const downloadResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.get(archiveLocation); }));
|
// Set file permissions so that other users can untar the cache
|
||||||
|
yield fdesc.chmod(0o644);
|
||||||
|
let progressLogger;
|
||||||
|
try {
|
||||||
|
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
|
||||||
|
const httpClient = new http_client_1.HttpClient('useblacksmith/cache');
|
||||||
|
const metadataResponse = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return httpClient.get(archiveLocation, {
|
||||||
|
Range: 'bytes=0-1'
|
||||||
|
});
|
||||||
|
}));
|
||||||
// Abort download if no traffic received over the socket.
|
// Abort download if no traffic received over the socket.
|
||||||
downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
metadataResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => {
|
||||||
downloadResponse.message.destroy();
|
metadataResponse.message.destroy();
|
||||||
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`);
|
||||||
});
|
});
|
||||||
yield pipeResponseToStream(downloadResponse, writeStream);
|
const contentRangeHeader = metadataResponse.message.headers['content-range'];
|
||||||
// Validate download size.
|
if (!contentRangeHeader) {
|
||||||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
throw new Error('Content-Range is not defined; unable to determine file size');
|
||||||
if (contentLengthHeader) {
|
|
||||||
const expectedLength = parseInt(contentLengthHeader);
|
|
||||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
|
||||||
if (actualLength !== expectedLength) {
|
|
||||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
|
||||||
}
|
}
|
||||||
|
// Parse the total file size from the Content-Range header
|
||||||
|
const fileSize = parseInt(contentRangeHeader.split('/')[1]);
|
||||||
|
if (isNaN(fileSize)) {
|
||||||
|
throw new Error(`Content-Range is not a number; unable to determine file size: ${contentRangeHeader}`);
|
||||||
}
|
}
|
||||||
else {
|
core.debug(`fileSize: ${fileSize}`);
|
||||||
core.debug('Unable to validate download, no Content-Length header');
|
// Truncate the file to the correct size
|
||||||
|
yield fdesc.truncate(fileSize);
|
||||||
|
yield fdesc.sync();
|
||||||
|
progressLogger = new DownloadProgress(fileSize);
|
||||||
|
progressLogger.startDisplayTimer();
|
||||||
|
// Divvy up the download into chunks based on CONCURRENCY
|
||||||
|
const chunkSize = Math.ceil(fileSize / CONCURRENCY);
|
||||||
|
const chunkRanges = [];
|
||||||
|
for (let i = 0; i < CONCURRENCY; i++) {
|
||||||
|
const start = i * chunkSize;
|
||||||
|
const end = i === CONCURRENCY - 1 ? fileSize - 1 : (i + 1) * chunkSize - 1;
|
||||||
|
chunkRanges.push(`bytes=${start}-${end}`);
|
||||||
|
}
|
||||||
|
const downloads = chunkRanges.map((range) => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
core.debug(`Downloading range: ${range}`);
|
||||||
|
const response = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCache', () => __awaiter(this, void 0, void 0, function* () {
|
||||||
|
return httpClient.get(archiveLocation, {
|
||||||
|
Range: range
|
||||||
|
});
|
||||||
|
}));
|
||||||
|
const writeStream = fs.createWriteStream(archivePath, {
|
||||||
|
fd: fdesc.fd,
|
||||||
|
start: parseInt(range.split('=')[1].split('-')[0]),
|
||||||
|
autoClose: false
|
||||||
|
});
|
||||||
|
yield pipeResponseToStream(response, writeStream, progressLogger);
|
||||||
|
core.debug(`Finished downloading range: ${range}`);
|
||||||
|
}));
|
||||||
|
yield Promise.all(downloads);
|
||||||
|
}
|
||||||
|
finally {
|
||||||
|
yield fdesc.close();
|
||||||
|
progressLogger === null || progressLogger === void 0 ? void 0 : progressLogger.stopDisplayTimer();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -966,6 +1034,7 @@ function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options
|
||||||
var _a;
|
var _a;
|
||||||
return __awaiter(this, void 0, void 0, function* () {
|
return __awaiter(this, void 0, void 0, function* () {
|
||||||
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
const archiveDescriptor = yield fs.promises.open(archivePath, 'w');
|
||||||
|
core.debug(`Downloading from ${archiveLocation} to ${archivePath}`);
|
||||||
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
const httpClient = new http_client_1.HttpClient('actions/cache', undefined, {
|
||||||
socketTimeout: options.timeoutInMs,
|
socketTimeout: options.timeoutInMs,
|
||||||
keepAlive: true
|
keepAlive: true
|
||||||
|
@ -1265,6 +1334,7 @@ function retryTypedResponse(name, method, maxAttempts = constants_1.DefaultRetry
|
||||||
// If the error object contains the statusCode property, extract it and return
|
// If the error object contains the statusCode property, extract it and return
|
||||||
// an TypedResponse<T> so it can be processed by the retry logic.
|
// an TypedResponse<T> so it can be processed by the retry logic.
|
||||||
(error) => {
|
(error) => {
|
||||||
|
core.debug(`Error occurred during ${name}: ${JSON.stringify(error)}`);
|
||||||
if (error instanceof http_client_1.HttpClientError) {
|
if (error instanceof http_client_1.HttpClientError) {
|
||||||
return {
|
return {
|
||||||
statusCode: error.statusCode,
|
statusCode: error.statusCode,
|
||||||
|
@ -1632,7 +1702,7 @@ function getDownloadOptions(copy) {
|
||||||
const result = {
|
const result = {
|
||||||
useAzureSdk: false,
|
useAzureSdk: false,
|
||||||
concurrentBlobDownloads: true,
|
concurrentBlobDownloads: true,
|
||||||
downloadConcurrency: 8,
|
downloadConcurrency: 10,
|
||||||
timeoutInMs: 30000,
|
timeoutInMs: 30000,
|
||||||
segmentTimeoutInMs: 600000,
|
segmentTimeoutInMs: 600000,
|
||||||
lookupOnly: false
|
lookupOnly: false
|
||||||
|
@ -90407,7 +90477,13 @@ async function getCmdOutput(cmd, args = [], options = {}) {
|
||||||
}
|
}
|
||||||
function getCacheProvider() {
|
function getCacheProvider() {
|
||||||
const cacheProvider = core.getInput("cache-provider");
|
const cacheProvider = core.getInput("cache-provider");
|
||||||
const cache = cacheProvider === "github" ? cache_lib_cache : cacheProvider === "buildjet" ? lib_cache : cacheProvider === "blacksmith" ? blacksmith_cache_lib_cache : undefined;
|
const cache = cacheProvider === "github"
|
||||||
|
? cache_lib_cache
|
||||||
|
: cacheProvider === "buildjet"
|
||||||
|
? lib_cache
|
||||||
|
: cacheProvider === "blacksmith"
|
||||||
|
? blacksmith_cache_lib_cache
|
||||||
|
: undefined;
|
||||||
if (!cache) {
|
if (!cache) {
|
||||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||||
}
|
}
|
||||||
|
|
8
package-lock.json
generated
8
package-lock.json
generated
|
@ -9,7 +9,7 @@
|
||||||
"version": "2.7.2",
|
"version": "2.7.2",
|
||||||
"license": "LGPL-3.0",
|
"license": "LGPL-3.0",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/blacksmith-cache": "npm:@useblacksmith/cache@3.2.4",
|
"@actions/blacksmith-cache": "npm:@useblacksmith/cache@3.2.41",
|
||||||
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0",
|
"@actions/buildjet-cache": "npm:github-actions.cache-buildjet@0.2.0",
|
||||||
"@actions/cache": "^3.2.3",
|
"@actions/cache": "^3.2.3",
|
||||||
"@actions/core": "^1.10.1",
|
"@actions/core": "^1.10.1",
|
||||||
|
@ -29,9 +29,9 @@
|
||||||
},
|
},
|
||||||
"node_modules/@actions/blacksmith-cache": {
|
"node_modules/@actions/blacksmith-cache": {
|
||||||
"name": "@useblacksmith/cache",
|
"name": "@useblacksmith/cache",
|
||||||
"version": "3.2.4",
|
"version": "3.2.41",
|
||||||
"resolved": "https://registry.npmjs.org/@useblacksmith/cache/-/cache-3.2.4.tgz",
|
"resolved": "https://registry.npmjs.org/@useblacksmith/cache/-/cache-3.2.41.tgz",
|
||||||
"integrity": "sha512-dF6utEJF4l2Wu7jJq5+ghiol5YqgAHLpNqYXAdgWBI7+n323T5TjUjRnx2NrT9x62VHK8UltVgq7uehCbF5alA==",
|
"integrity": "sha512-uGMhrp+3wnRN67i96mCsf3OswU4UREuQxwujQE1Ti15ESJBdhuDp6mt5L57RbUhNEfLqIRxZMqQCDCj/OLqKXQ==",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@actions/core": "^1.10.0",
|
"@actions/core": "^1.10.0",
|
||||||
"@actions/exec": "^1.0.1",
|
"@actions/exec": "^1.0.1",
|
||||||
|
|
13
src/utils.ts
13
src/utils.ts
|
@ -2,7 +2,7 @@ import * as core from "@actions/core";
|
||||||
import * as exec from "@actions/exec";
|
import * as exec from "@actions/exec";
|
||||||
import * as buildjetCache from "@actions/buildjet-cache";
|
import * as buildjetCache from "@actions/buildjet-cache";
|
||||||
import * as ghCache from "@actions/cache";
|
import * as ghCache from "@actions/cache";
|
||||||
import * as bsCache from "@actions/blacksmith-cache"
|
import * as bsCache from "@actions/blacksmith-cache";
|
||||||
import fs from "fs";
|
import fs from "fs";
|
||||||
|
|
||||||
export function reportError(e: any) {
|
export function reportError(e: any) {
|
||||||
|
@ -18,7 +18,7 @@ export function reportError(e: any) {
|
||||||
export async function getCmdOutput(
|
export async function getCmdOutput(
|
||||||
cmd: string,
|
cmd: string,
|
||||||
args: Array<string> = [],
|
args: Array<string> = [],
|
||||||
options: exec.ExecOptions = {},
|
options: exec.ExecOptions = {}
|
||||||
): Promise<string> {
|
): Promise<string> {
|
||||||
let stdout = "";
|
let stdout = "";
|
||||||
let stderr = "";
|
let stderr = "";
|
||||||
|
@ -52,7 +52,14 @@ export interface CacheProvider {
|
||||||
|
|
||||||
export function getCacheProvider(): CacheProvider {
|
export function getCacheProvider(): CacheProvider {
|
||||||
const cacheProvider = core.getInput("cache-provider");
|
const cacheProvider = core.getInput("cache-provider");
|
||||||
const cache = cacheProvider === "github" ? ghCache : cacheProvider === "buildjet" ? buildjetCache : cacheProvider === "blacksmith" ? bsCache : undefined;
|
const cache =
|
||||||
|
cacheProvider === "github"
|
||||||
|
? ghCache
|
||||||
|
: cacheProvider === "buildjet"
|
||||||
|
? buildjetCache
|
||||||
|
: cacheProvider === "blacksmith"
|
||||||
|
? bsCache
|
||||||
|
: undefined;
|
||||||
|
|
||||||
if (!cache) {
|
if (!cache) {
|
||||||
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
throw new Error(`The \`cache-provider\` \`{cacheProvider}\` is not valid.`);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue