Upgraded `actions/cache` to 3.0.4

This commit is contained in:
Sankalp Kotewar 2022-10-17 15:14:19 +05:30
parent 88646268d9
commit 40635cc059
5 changed files with 172 additions and 104 deletions

View File

@ -1,6 +1,6 @@
--- ---
name: "@actions/cache" name: "@actions/cache"
version: 3.0.0 version: 3.0.4
type: npm type: npm
summary: Actions cache lib summary: Actions cache lib
homepage: https://github.com/actions/toolkit/tree/main/packages/cache homepage: https://github.com/actions/toolkit/tree/main/packages/cache
@ -17,4 +17,4 @@ licenses:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
notices: [] notices: []

128
dist/cleanup/index.js vendored
View File

@ -525,7 +525,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/'); .replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`); core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace. // Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`); if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
} }
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518)); const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840); const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981); const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/** /**
* Pipes the body of a HTTP response to a stream * Pipes the body of a HTTP response to a stream
* *
@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w'); const fd = fs.openSync(archivePath, 'w');
try { try {
downloadProgress.startDisplayTimer(); downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) { while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize); downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, { const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency, concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress() onProgress: downloadProgress.onProgress()
}); }));
fs.writeFileSync(fd, result); if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
} }
} }
finally { finally {
@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
}); });
} }
exports.downloadCacheStorageSDK = downloadCacheStorageSDK; exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map //# sourceMappingURL=downloadUtils.js.map
/***/ }), /***/ }),
@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518)); const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840); const constants_1 = __nccwpck_require__(8840);
const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { function getTarPath(args, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
var _a; var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
default:
return ['-z'];
}
}
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [ const args = [
...getCompressionProgram(), ...getCompressionProgram(compressionMethod),
'-xf', '-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P', '-P',
@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() { function getCompressionProgram() {
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30']; return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0']; return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default: default:
return ['-z']; return ['-z'];
} }
@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
//# sourceMappingURL=tar.js.map //# sourceMappingURL=tar.js.map
/***/ }), /***/ }),
@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
const result = { const result = {
useAzureSdk: true, useAzureSdk: true,
downloadConcurrency: 8, downloadConcurrency: 8,
timeoutInMs: 30000 timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
}; };
if (copy) { if (copy) {
if (typeof copy.useAzureSdk === 'boolean') { if (typeof copy.useAzureSdk === 'boolean') {
@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') { if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs; result.timeoutInMs = copy.timeoutInMs;
} }
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
} }
core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result; return result;
} }
exports.getDownloadOptions = getDownloadOptions; exports.getDownloadOptions = getDownloadOptions;

128
dist/setup/index.js vendored
View File

@ -525,7 +525,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/'); .replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`); core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace. // Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`); if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
} }
} }
catch (e_1_1) { e_1 = { error: e_1_1 }; } catch (e_1_1) { e_1 = { error: e_1_1 }; }
@ -683,6 +689,7 @@ const util = __importStar(__nccwpck_require__(3837));
const utils = __importStar(__nccwpck_require__(1518)); const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840); const constants_1 = __nccwpck_require__(8840);
const requestUtils_1 = __nccwpck_require__(3981); const requestUtils_1 = __nccwpck_require__(3981);
const abort_controller_1 = __nccwpck_require__(2557);
/** /**
* Pipes the body of a HTTP response to a stream * Pipes the body of a HTTP response to a stream
* *
@ -866,15 +873,24 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const fd = fs.openSync(archivePath, 'w'); const fd = fs.openSync(archivePath, 'w');
try { try {
downloadProgress.startDisplayTimer(); downloadProgress.startDisplayTimer();
const controller = new abort_controller_1.AbortController();
const abortSignal = controller.signal;
while (!downloadProgress.isDone()) { while (!downloadProgress.isDone()) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize; const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart); const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize); downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, { const result = yield promiseWithTimeout(options.segmentTimeoutInMs || 3600000, client.downloadToBuffer(segmentStart, segmentSize, {
abortSignal,
concurrency: options.downloadConcurrency, concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress() onProgress: downloadProgress.onProgress()
}); }));
fs.writeFileSync(fd, result); if (result === 'timeout') {
controller.abort();
throw new Error('Aborting cache download as the download time exceeded the timeout.');
}
else if (Buffer.isBuffer(result)) {
fs.writeFileSync(fd, result);
}
} }
} }
finally { finally {
@ -885,6 +901,16 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
}); });
} }
exports.downloadCacheStorageSDK = downloadCacheStorageSDK; exports.downloadCacheStorageSDK = downloadCacheStorageSDK;
const promiseWithTimeout = (timeoutMs, promise) => __awaiter(void 0, void 0, void 0, function* () {
let timeoutHandle;
const timeoutPromise = new Promise(resolve => {
timeoutHandle = setTimeout(() => resolve('timeout'), timeoutMs);
});
return Promise.race([promise, timeoutPromise]).then(result => {
clearTimeout(timeoutHandle);
return result;
});
});
//# sourceMappingURL=downloadUtils.js.map //# sourceMappingURL=downloadUtils.js.map
/***/ }), /***/ }),
@ -1044,6 +1070,7 @@ const fs_1 = __nccwpck_require__(7147);
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
const utils = __importStar(__nccwpck_require__(1518)); const utils = __importStar(__nccwpck_require__(1518));
const constants_1 = __nccwpck_require__(8840); const constants_1 = __nccwpck_require__(8840);
const IS_WINDOWS = process.platform === 'win32';
function getTarPath(args, compressionMethod) { function getTarPath(args, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
switch (process.platform) { switch (process.platform) {
@ -1091,26 +1118,43 @@ function getWorkingDirectory() {
var _a; var _a;
return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd();
} }
// Common function for extractTar and listTar to get the compression method
function getCompressionProgram(compressionMethod) {
// -d: Decompress.
// unzstd is equivalent to 'zstd -d'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd'];
default:
return ['-z'];
}
}
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
const args = [
...getCompressionProgram(compressionMethod),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
function extractTar(archivePath, compressionMethod) { function extractTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
// Create directory to extract tar into // Create directory to extract tar into
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
yield io.mkdirP(workingDirectory); yield io.mkdirP(workingDirectory);
// --d: Decompress.
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [ const args = [
...getCompressionProgram(), ...getCompressionProgram(compressionMethod),
'-xf', '-xf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P', '-P',
@ -1129,15 +1173,19 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n'));
const workingDirectory = getWorkingDirectory(); const workingDirectory = getWorkingDirectory();
// -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
// zstdmt is equivalent to 'zstd -T0'
// --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners. // Using 30 here because we also support 32-bit self-hosted runners.
// Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd. // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
function getCompressionProgram() { function getCompressionProgram() {
switch (compressionMethod) { switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd: case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30']; return [
'--use-compress-program',
IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30'
];
case constants_1.CompressionMethod.ZstdWithoutLong: case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0']; return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt'];
default: default:
return ['-z']; return ['-z'];
} }
@ -1159,32 +1207,6 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
}); });
} }
exports.createTar = createTar; exports.createTar = createTar;
function listTar(archivePath, compressionMethod) {
return __awaiter(this, void 0, void 0, function* () {
// --d: Decompress.
// --long=#: Enables long distance matching with # bits.
// Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
// Using 30 here because we also support 32-bit self-hosted runners.
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
default:
return ['-z'];
}
}
const args = [
...getCompressionProgram(),
'-tf',
archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'),
'-P'
];
yield execTar(args, compressionMethod);
});
}
exports.listTar = listTar;
//# sourceMappingURL=tar.js.map //# sourceMappingURL=tar.js.map
/***/ }), /***/ }),
@ -1235,7 +1257,8 @@ function getDownloadOptions(copy) {
const result = { const result = {
useAzureSdk: true, useAzureSdk: true,
downloadConcurrency: 8, downloadConcurrency: 8,
timeoutInMs: 30000 timeoutInMs: 30000,
segmentTimeoutInMs: 3600000
}; };
if (copy) { if (copy) {
if (typeof copy.useAzureSdk === 'boolean') { if (typeof copy.useAzureSdk === 'boolean') {
@ -1247,10 +1270,21 @@ function getDownloadOptions(copy) {
if (typeof copy.timeoutInMs === 'number') { if (typeof copy.timeoutInMs === 'number') {
result.timeoutInMs = copy.timeoutInMs; result.timeoutInMs = copy.timeoutInMs;
} }
if (typeof copy.segmentTimeoutInMs === 'number') {
result.segmentTimeoutInMs = copy.segmentTimeoutInMs;
}
}
const segmentDownloadTimeoutMins = process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS'];
if (segmentDownloadTimeoutMins &&
!isNaN(Number(segmentDownloadTimeoutMins)) &&
isFinite(Number(segmentDownloadTimeoutMins))) {
result.segmentTimeoutInMs = Number(segmentDownloadTimeoutMins) * 60 * 1000;
} }
core.debug(`Use Azure SDK: ${result.useAzureSdk}`); core.debug(`Use Azure SDK: ${result.useAzureSdk}`);
core.debug(`Download concurrency: ${result.downloadConcurrency}`); core.debug(`Download concurrency: ${result.downloadConcurrency}`);
core.debug(`Request timeout (ms): ${result.timeoutInMs}`); core.debug(`Request timeout (ms): ${result.timeoutInMs}`);
core.debug(`Cache segment download timeout mins env var: ${process.env['SEGMENT_DOWNLOAD_TIMEOUT_MINS']}`);
core.debug(`Segment download timeout (ms): ${result.segmentTimeoutInMs}`);
return result; return result;
} }
exports.getDownloadOptions = getDownloadOptions; exports.getDownloadOptions = getDownloadOptions;

14
package-lock.json generated
View File

@ -9,7 +9,7 @@
"version": "3.4.1", "version": "3.4.1",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^3.0.0", "@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4", "@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0", "@actions/glob": "^0.2.0",
@ -32,9 +32,9 @@
} }
}, },
"node_modules/@actions/cache": { "node_modules/@actions/cache": {
"version": "3.0.0", "version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"dependencies": { "dependencies": {
"@actions/core": "^1.2.6", "@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",
@ -4790,9 +4790,9 @@
}, },
"dependencies": { "dependencies": {
"@actions/cache": { "@actions/cache": {
"version": "3.0.0", "version": "3.0.4",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz", "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.4.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==", "integrity": "sha512-9RwVL8/ISJoYWFNH1wR/C26E+M3HDkGPWmbFJMMCKwTkjbNZJreMT4XaR/EB1bheIvN4PREQxEQQVJ18IPnf/Q==",
"requires": { "requires": {
"@actions/core": "^1.2.6", "@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1", "@actions/exec": "^1.0.1",

View File

@ -24,7 +24,7 @@
"author": "GitHub", "author": "GitHub",
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"@actions/cache": "^3.0.0", "@actions/cache": "^3.0.4",
"@actions/core": "^1.10.0", "@actions/core": "^1.10.0",
"@actions/exec": "^1.0.4", "@actions/exec": "^1.0.4",
"@actions/glob": "^0.2.0", "@actions/glob": "^0.2.0",