mirror of
https://github.com/actions/setup-python.git
synced 2024-12-22 12:19:55 +00:00
Update @actions/cache version to 1.0.8 (#283)
This commit is contained in:
parent
3ef38b826b
commit
f382193329
7 changed files with 96 additions and 58 deletions
BIN
.licenses/npm/@actions/cache.dep.yml
generated
BIN
.licenses/npm/@actions/cache.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
BIN
.licenses/npm/@azure/core-http.dep.yml
generated
Binary file not shown.
BIN
.licenses/npm/node-fetch.dep.yml
generated
BIN
.licenses/npm/node-fetch.dep.yml
generated
Binary file not shown.
67
dist/cache-save/index.js
vendored
67
dist/cache-save/index.js
vendored
|
@ -1041,10 +1041,10 @@ function createTempDirectory() {
|
|||
});
|
||||
}
|
||||
exports.createTempDirectory = createTempDirectory;
|
||||
function getArchiveFileSizeIsBytes(filePath) {
|
||||
function getArchiveFileSizeInBytes(filePath) {
|
||||
return fs.statSync(filePath).size;
|
||||
}
|
||||
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
|
@ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
|||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Upload Chunks
|
||||
const fileSize = fs.statSync(archivePath).size;
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, 'r');
|
||||
const uploadOptions = options_1.getUploadOptions(options);
|
||||
|
@ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) {
|
|||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache');
|
||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
|
@ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader);
|
||||
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
if (actualLength !== expectedLength) {
|
||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||
}
|
||||
|
@ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||
});
|
||||
|
||||
const INTERNALS$2 = Symbol('Request internals');
|
||||
const URL = whatwgUrl.URL;
|
||||
const URL = Url.URL || whatwgUrl.URL;
|
||||
|
||||
// fix an issue where "format", "parse" aren't a named export for node <10
|
||||
const parse_url = Url.parse;
|
||||
|
@ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
|
@ -41496,18 +41496,29 @@ function saveCache(paths, key, options) {
|
|||
const archiveFolder = yield utils.createTempDirectory();
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
try {
|
||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
}
|
||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
}
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
|
@ -53218,7 +53229,12 @@ class HttpHeaders {
|
|||
* Create a deep clone/copy of this HttpHeaders collection.
|
||||
*/
|
||||
clone() {
|
||||
return new HttpHeaders(this.rawHeaders());
|
||||
const resultPreservingCasing = {};
|
||||
for (const headerKey in this._headersMap) {
|
||||
const header = this._headersMap[headerKey];
|
||||
resultPreservingCasing[header.name] = header.value;
|
||||
}
|
||||
return new HttpHeaders(resultPreservingCasing);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -53255,7 +53271,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "2.2.1",
|
||||
coreHttpVersion: "2.2.2",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -55568,7 +55584,7 @@ class FetchHttpClient {
|
|||
}
|
||||
let downloadStreamDone = Promise.resolve();
|
||||
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
|
||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
|
||||
}
|
||||
Promise.all([uploadStreamDone, downloadStreamDone])
|
||||
.then(() => {
|
||||
|
@ -55586,11 +55602,14 @@ class FetchHttpClient {
|
|||
function isReadableStream(body) {
|
||||
return body && typeof body.pipe === "function";
|
||||
}
|
||||
function isStreamComplete(stream) {
|
||||
function isStreamComplete(stream, aborter) {
|
||||
return new Promise((resolve) => {
|
||||
stream.on("close", resolve);
|
||||
stream.on("end", resolve);
|
||||
stream.on("error", resolve);
|
||||
stream.once("close", () => {
|
||||
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
|
||||
resolve();
|
||||
});
|
||||
stream.once("end", resolve);
|
||||
stream.once("error", resolve);
|
||||
});
|
||||
}
|
||||
function parseHeaders(headers) {
|
||||
|
|
67
dist/setup/index.js
vendored
67
dist/setup/index.js
vendored
|
@ -1041,10 +1041,10 @@ function createTempDirectory() {
|
|||
});
|
||||
}
|
||||
exports.createTempDirectory = createTempDirectory;
|
||||
function getArchiveFileSizeIsBytes(filePath) {
|
||||
function getArchiveFileSizeInBytes(filePath) {
|
||||
return fs.statSync(filePath).size;
|
||||
}
|
||||
exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes;
|
||||
exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes;
|
||||
function resolvePaths(patterns) {
|
||||
var e_1, _a;
|
||||
var _b;
|
||||
|
@ -4552,7 +4552,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) {
|
|||
function uploadFile(httpClient, cacheId, archivePath, options) {
|
||||
return __awaiter(this, void 0, void 0, function* () {
|
||||
// Upload Chunks
|
||||
const fileSize = fs.statSync(archivePath).size;
|
||||
const fileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`);
|
||||
const fd = fs.openSync(archivePath, 'r');
|
||||
const uploadOptions = options_1.getUploadOptions(options);
|
||||
|
@ -4602,7 +4602,7 @@ function saveCache(cacheId, archivePath, options) {
|
|||
yield uploadFile(httpClient, cacheId, archivePath, options);
|
||||
// Commit Cache
|
||||
core.debug('Commiting cache');
|
||||
const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const cacheSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`);
|
||||
const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize);
|
||||
if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) {
|
||||
|
@ -7551,7 +7551,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) {
|
|||
const contentLengthHeader = downloadResponse.message.headers['content-length'];
|
||||
if (contentLengthHeader) {
|
||||
const expectedLength = parseInt(contentLengthHeader);
|
||||
const actualLength = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const actualLength = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
if (actualLength !== expectedLength) {
|
||||
throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`);
|
||||
}
|
||||
|
@ -36873,7 +36873,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, {
|
|||
});
|
||||
|
||||
const INTERNALS$2 = Symbol('Request internals');
|
||||
const URL = whatwgUrl.URL;
|
||||
const URL = Url.URL || whatwgUrl.URL;
|
||||
|
||||
// fix an issue where "format", "parse" aren't a named export for node <10
|
||||
const parse_url = Url.parse;
|
||||
|
@ -47198,7 +47198,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) {
|
|||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
}
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`);
|
||||
yield tar_1.extractTar(archivePath, compressionMethod);
|
||||
core.info('Cache restored successfully');
|
||||
|
@ -47243,18 +47243,29 @@ function saveCache(paths, key, options) {
|
|||
const archiveFolder = yield utils.createTempDirectory();
|
||||
const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod));
|
||||
core.debug(`Archive Path: ${archivePath}`);
|
||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
try {
|
||||
yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod);
|
||||
if (core.isDebug()) {
|
||||
yield tar_1.listTar(archivePath, compressionMethod);
|
||||
}
|
||||
const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`);
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
}
|
||||
const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
|
||||
const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath);
|
||||
core.debug(`File Size: ${archiveFileSize}`);
|
||||
if (archiveFileSize > fileSizeLimit) {
|
||||
throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`);
|
||||
finally {
|
||||
// Try to delete the archive to save space
|
||||
try {
|
||||
yield utils.unlinkFile(archivePath);
|
||||
}
|
||||
catch (error) {
|
||||
core.debug(`Failed to delete archive: ${error}`);
|
||||
}
|
||||
}
|
||||
core.debug(`Saving Cache (ID: ${cacheId})`);
|
||||
yield cacheHttpClient.saveCache(cacheId, archivePath, options);
|
||||
return cacheId;
|
||||
});
|
||||
}
|
||||
|
@ -59947,7 +59958,12 @@ class HttpHeaders {
|
|||
* Create a deep clone/copy of this HttpHeaders collection.
|
||||
*/
|
||||
clone() {
|
||||
return new HttpHeaders(this.rawHeaders());
|
||||
const resultPreservingCasing = {};
|
||||
for (const headerKey in this._headersMap) {
|
||||
const header = this._headersMap[headerKey];
|
||||
resultPreservingCasing[header.name] = header.value;
|
||||
}
|
||||
return new HttpHeaders(resultPreservingCasing);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -59984,7 +60000,7 @@ const Constants = {
|
|||
/**
|
||||
* The core-http version
|
||||
*/
|
||||
coreHttpVersion: "2.2.1",
|
||||
coreHttpVersion: "2.2.2",
|
||||
/**
|
||||
* Specifies HTTP.
|
||||
*/
|
||||
|
@ -62297,7 +62313,7 @@ class FetchHttpClient {
|
|||
}
|
||||
let downloadStreamDone = Promise.resolve();
|
||||
if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) {
|
||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody);
|
||||
downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1);
|
||||
}
|
||||
Promise.all([uploadStreamDone, downloadStreamDone])
|
||||
.then(() => {
|
||||
|
@ -62315,11 +62331,14 @@ class FetchHttpClient {
|
|||
function isReadableStream(body) {
|
||||
return body && typeof body.pipe === "function";
|
||||
}
|
||||
function isStreamComplete(stream) {
|
||||
function isStreamComplete(stream, aborter) {
|
||||
return new Promise((resolve) => {
|
||||
stream.on("close", resolve);
|
||||
stream.on("end", resolve);
|
||||
stream.on("error", resolve);
|
||||
stream.once("close", () => {
|
||||
aborter === null || aborter === void 0 ? void 0 : aborter.abort();
|
||||
resolve();
|
||||
});
|
||||
stream.once("end", resolve);
|
||||
stream.once("error", resolve);
|
||||
});
|
||||
}
|
||||
function parseHeaders(headers) {
|
||||
|
|
18
package-lock.json
generated
18
package-lock.json
generated
|
@ -5,9 +5,9 @@
|
|||
"requires": true,
|
||||
"dependencies": {
|
||||
"@actions/cache": {
|
||||
"version": "1.0.7",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.7.tgz",
|
||||
"integrity": "sha512-MY69kxuubqUFq84pFlu8m6Poxl5sR/xyhpC4JEvno7Yg9ASYdGizEmKgt0m8ovewpYKf15UAOcSC0hzS+DuosA==",
|
||||
"version": "1.0.8",
|
||||
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-1.0.8.tgz",
|
||||
"integrity": "sha512-GWNNB67w93HGJRQXlsV56YqrdAuDoP3esK/mo5mzU8WoDCVjtQgJGsTdkYUX7brswtT7xnI30bWNo1WLKQ8FZQ==",
|
||||
"requires": {
|
||||
"@actions/core": "^1.2.6",
|
||||
"@actions/exec": "^1.0.1",
|
||||
|
@ -136,9 +136,9 @@
|
|||
}
|
||||
},
|
||||
"@azure/core-http": {
|
||||
"version": "2.2.1",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.1.tgz",
|
||||
"integrity": "sha512-7ATnV3OGzCO2K9kMrh3NKUM8b4v+xasmlUhkNZz6uMbm+8XH/AexLkhRGsoo0GyKNlEGvyGEfytqTk0nUY2I4A==",
|
||||
"version": "2.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@azure/core-http/-/core-http-2.2.2.tgz",
|
||||
"integrity": "sha512-V1DdoO9V/sFimKpdWoNBgsE+QUjQgpXYnxrTdUp5RyhsTJjvEVn/HKmTQXIHuLUUo6IyIWj+B+Dg4VaXse9dIA==",
|
||||
"requires": {
|
||||
"@azure/abort-controller": "^1.0.0",
|
||||
"@azure/core-asynciterator-polyfill": "^1.0.0",
|
||||
|
@ -8137,9 +8137,9 @@
|
|||
"dev": true
|
||||
},
|
||||
"node-fetch": {
|
||||
"version": "2.6.5",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.5.tgz",
|
||||
"integrity": "sha512-mmlIVHJEu5rnIxgEgez6b9GgWXbkZj5YZ7fx+2r94a2E+Uirsp6HsPTPlomfdHtpt/B0cdKviwkoaM6pyvUOpQ==",
|
||||
"version": "2.6.6",
|
||||
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.6.tgz",
|
||||
"integrity": "sha512-Z8/6vRlTUChSdIgMa51jxQ4lrw/Jy5SOW10ObaA47/RElsAN2c5Pn8bTgFGWn/ibwzXTE8qwr1Yzx28vsecXEA==",
|
||||
"requires": {
|
||||
"whatwg-url": "^5.0.0"
|
||||
},
|
||||
|
|
|
@ -23,7 +23,7 @@
|
|||
"author": "GitHub",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@actions/cache": "^1.0.7",
|
||||
"@actions/cache": "^1.0.8",
|
||||
"@actions/core": "^1.2.3",
|
||||
"@actions/exec": "^1.1.0",
|
||||
"@actions/glob": "^0.2.0",
|
||||
|
|
Loading…
Reference in a new issue