build(deps): bump @actions/cache from 3.2.1 to 3.2.2 (#825)
Co-authored-by: Fernandez Ludovic <ldez@users.noreply.github.com>
This commit is contained in:
		
							parent
							
								
									18dad33d2e
								
							
						
					
					
						commit
						5e676315e9
					
				
							
								
								
									
										183
									
								
								dist/post_run/index.js
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										183
									
								
								dist/post_run/index.js
									
									
									
										generated
									
									
										vendored
									
									
								
							| @ -331,10 +331,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) | |||||||
|     } |     } | ||||||
|     // Add salt to cache version to support breaking changes in cache entry
 |     // Add salt to cache version to support breaking changes in cache entry
 | ||||||
|     components.push(versionSalt); |     components.push(versionSalt); | ||||||
|     return crypto |     return crypto.createHash('sha256').update(components.join('|')).digest('hex'); | ||||||
|         .createHash('sha256') |  | ||||||
|         .update(components.join('|')) |  | ||||||
|         .digest('hex'); |  | ||||||
| } | } | ||||||
| exports.getCacheVersion = getCacheVersion; | exports.getCacheVersion = getCacheVersion; | ||||||
| function getCacheEntry(keys, paths, options) { | function getCacheEntry(keys, paths, options) { | ||||||
| @ -387,13 +384,21 @@ function downloadCache(archiveLocation, archivePath, options) { | |||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
|         const downloadOptions = (0, options_1.getDownloadOptions)(options); |         const downloadOptions = (0, options_1.getDownloadOptions)(options); | ||||||
|         if (downloadOptions.useAzureSdk && |         if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||||
|             archiveUrl.hostname.endsWith('.blob.core.windows.net')) { |             if (downloadOptions.useAzureSdk) { | ||||||
|             // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 |                 // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||||
|             yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); |                 yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||||
|  |             } | ||||||
|  |             else if (downloadOptions.concurrentBlobDownloads) { | ||||||
|  |                 // Use concurrent implementation with HttpClient to work around blob SDK issue
 | ||||||
|  |                 yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); | ||||||
|  |             } | ||||||
|  |             else { | ||||||
|  |                 // Otherwise, download using the Actions http-client.
 | ||||||
|  |                 yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|         else { |         else { | ||||||
|             // Otherwise, download using the Actions http-client.
 |  | ||||||
|             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); |             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||||
|         } |         } | ||||||
|     }); |     }); | ||||||
| @ -426,9 +431,7 @@ function getContentRange(start, end) { | |||||||
| } | } | ||||||
| function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         core.debug(`Uploading chunk of size ${end - |         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||||
|             start + |  | ||||||
|             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
 |  | ||||||
|         const additionalHeaders = { |         const additionalHeaders = { | ||||||
|             'Content-Type': 'application/octet-stream', |             'Content-Type': 'application/octet-stream', | ||||||
|             'Content-Range': getContentRange(start, end) |             'Content-Range': getContentRange(start, end) | ||||||
| @ -596,35 +599,42 @@ function getArchiveFileSizeInBytes(filePath) { | |||||||
| } | } | ||||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||||
| function resolvePaths(patterns) { | function resolvePaths(patterns) { | ||||||
|     var e_1, _a; |     var _a, e_1, _b, _c; | ||||||
|     var _b; |     var _d; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const paths = []; |         const paths = []; | ||||||
|         const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); |         const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); | ||||||
|         const globber = yield glob.create(patterns.join('\n'), { |         const globber = yield glob.create(patterns.join('\n'), { | ||||||
|             implicitDescendants: false |             implicitDescendants: false | ||||||
|         }); |         }); | ||||||
|         try { |         try { | ||||||
|             for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { |             for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { | ||||||
|                 const file = _d.value; |                 _c = _g.value; | ||||||
|                 const relativeFile = path |                 _e = false; | ||||||
|                     .relative(workspace, file) |                 try { | ||||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); |                     const file = _c; | ||||||
|                 core.debug(`Matched: ${relativeFile}`); |                     const relativeFile = path | ||||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 |                         .relative(workspace, file) | ||||||
|                 if (relativeFile === '') { |                         .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||||
|                     // path.relative returns empty string if workspace and file are equal
 |                     core.debug(`Matched: ${relativeFile}`); | ||||||
|                     paths.push('.'); |                     // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||||
|  |                     if (relativeFile === '') { | ||||||
|  |                         // path.relative returns empty string if workspace and file are equal
 | ||||||
|  |                         paths.push('.'); | ||||||
|  |                     } | ||||||
|  |                     else { | ||||||
|  |                         paths.push(`${relativeFile}`); | ||||||
|  |                     } | ||||||
|                 } |                 } | ||||||
|                 else { |                 finally { | ||||||
|                     paths.push(`${relativeFile}`); |                     _e = true; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } |         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||||
|         finally { |         finally { | ||||||
|             try { |             try { | ||||||
|                 if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); |                 if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); | ||||||
|             } |             } | ||||||
|             finally { if (e_1) throw e_1.error; } |             finally { if (e_1) throw e_1.error; } | ||||||
|         } |         } | ||||||
| @ -789,7 +799,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | |||||||
|     }); |     }); | ||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||||
| const core = __importStar(__nccwpck_require__(2186)); | const core = __importStar(__nccwpck_require__(2186)); | ||||||
| const http_client_1 = __nccwpck_require__(6255); | const http_client_1 = __nccwpck_require__(6255); | ||||||
| const storage_blob_1 = __nccwpck_require__(4100); | const storage_blob_1 = __nccwpck_require__(4100); | ||||||
| @ -946,6 +956,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCacheHttpClient = downloadCacheHttpClient; | exports.downloadCacheHttpClient = downloadCacheHttpClient; | ||||||
|  | /** | ||||||
|  |  * Download the cache using the Actions toolkit http-client concurrently | ||||||
|  |  * | ||||||
|  |  * @param archiveLocation the URL for the cache | ||||||
|  |  * @param archivePath the local path where the cache is saved | ||||||
|  |  */ | ||||||
|  | function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { | ||||||
|  |     var _a; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); | ||||||
|  |         const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { | ||||||
|  |             socketTimeout: options.timeoutInMs, | ||||||
|  |             keepAlive: true | ||||||
|  |         }); | ||||||
|  |         try { | ||||||
|  |             const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); | ||||||
|  |             const lengthHeader = res.message.headers['content-length']; | ||||||
|  |             if (lengthHeader === undefined || lengthHeader === null) { | ||||||
|  |                 throw new Error('Content-Length not found on blob response'); | ||||||
|  |             } | ||||||
|  |             const length = parseInt(lengthHeader); | ||||||
|  |             if (Number.isNaN(length)) { | ||||||
|  |                 throw new Error(`Could not interpret Content-Length: ${length}`); | ||||||
|  |             } | ||||||
|  |             const downloads = []; | ||||||
|  |             const blockSize = 4 * 1024 * 1024; | ||||||
|  |             for (let offset = 0; offset < length; offset += blockSize) { | ||||||
|  |                 const count = Math.min(blockSize, length - offset); | ||||||
|  |                 downloads.push({ | ||||||
|  |                     offset, | ||||||
|  |                     promiseGetter: () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                         return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); | ||||||
|  |                     }) | ||||||
|  |                 }); | ||||||
|  |             } | ||||||
|  |             // reverse to use .pop instead of .shift
 | ||||||
|  |             downloads.reverse(); | ||||||
|  |             let actives = 0; | ||||||
|  |             let bytesDownloaded = 0; | ||||||
|  |             const progress = new DownloadProgress(length); | ||||||
|  |             progress.startDisplayTimer(); | ||||||
|  |             const progressFn = progress.onProgress(); | ||||||
|  |             const activeDownloads = []; | ||||||
|  |             let nextDownload; | ||||||
|  |             const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                 const segment = yield Promise.race(Object.values(activeDownloads)); | ||||||
|  |                 yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); | ||||||
|  |                 actives--; | ||||||
|  |                 delete activeDownloads[segment.offset]; | ||||||
|  |                 bytesDownloaded += segment.count; | ||||||
|  |                 progressFn({ loadedBytes: bytesDownloaded }); | ||||||
|  |             }); | ||||||
|  |             while ((nextDownload = downloads.pop())) { | ||||||
|  |                 activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); | ||||||
|  |                 actives++; | ||||||
|  |                 if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { | ||||||
|  |                     yield waitAndWrite(); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             while (actives > 0) { | ||||||
|  |                 yield waitAndWrite(); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         finally { | ||||||
|  |             httpClient.dispose(); | ||||||
|  |             yield archiveDescriptor.close(); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; | ||||||
|  | function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const retries = 5; | ||||||
|  |         let failures = 0; | ||||||
|  |         while (true) { | ||||||
|  |             try { | ||||||
|  |                 const timeout = 30000; | ||||||
|  |                 const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); | ||||||
|  |                 if (typeof result === 'string') { | ||||||
|  |                     throw new Error('downloadSegmentRetry failed due to timeout'); | ||||||
|  |                 } | ||||||
|  |                 return result; | ||||||
|  |             } | ||||||
|  |             catch (err) { | ||||||
|  |                 if (failures >= retries) { | ||||||
|  |                     throw err; | ||||||
|  |                 } | ||||||
|  |                 failures++; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function downloadSegment(httpClient, archiveLocation, offset, count) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |             return yield httpClient.get(archiveLocation, { | ||||||
|  |                 Range: `bytes=${offset}-${offset + count - 1}` | ||||||
|  |             }); | ||||||
|  |         })); | ||||||
|  |         if (!partRes.readBodyBuffer) { | ||||||
|  |             throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); | ||||||
|  |         } | ||||||
|  |         return { | ||||||
|  |             offset, | ||||||
|  |             count, | ||||||
|  |             buffer: yield partRes.readBodyBuffer() | ||||||
|  |         }; | ||||||
|  |     }); | ||||||
|  | } | ||||||
| /** | /** | ||||||
|  * Download the cache using the Azure Storage SDK.  Only call this method if the |  * Download the cache using the Azure Storage SDK.  Only call this method if the | ||||||
|  * URL points to an Azure Storage endpoint. |  * URL points to an Azure Storage endpoint. | ||||||
| @ -1511,7 +1630,8 @@ exports.getUploadOptions = getUploadOptions; | |||||||
|  */ |  */ | ||||||
| function getDownloadOptions(copy) { | function getDownloadOptions(copy) { | ||||||
|     const result = { |     const result = { | ||||||
|         useAzureSdk: true, |         useAzureSdk: false, | ||||||
|  |         concurrentBlobDownloads: true, | ||||||
|         downloadConcurrency: 8, |         downloadConcurrency: 8, | ||||||
|         timeoutInMs: 30000, |         timeoutInMs: 30000, | ||||||
|         segmentTimeoutInMs: 600000, |         segmentTimeoutInMs: 600000, | ||||||
| @ -1521,6 +1641,9 @@ function getDownloadOptions(copy) { | |||||||
|         if (typeof copy.useAzureSdk === 'boolean') { |         if (typeof copy.useAzureSdk === 'boolean') { | ||||||
|             result.useAzureSdk = copy.useAzureSdk; |             result.useAzureSdk = copy.useAzureSdk; | ||||||
|         } |         } | ||||||
|  |         if (typeof copy.concurrentBlobDownloads === 'boolean') { | ||||||
|  |             result.concurrentBlobDownloads = copy.concurrentBlobDownloads; | ||||||
|  |         } | ||||||
|         if (typeof copy.downloadConcurrency === 'number') { |         if (typeof copy.downloadConcurrency === 'number') { | ||||||
|             result.downloadConcurrency = copy.downloadConcurrency; |             result.downloadConcurrency = copy.downloadConcurrency; | ||||||
|         } |         } | ||||||
|  | |||||||
							
								
								
									
										183
									
								
								dist/run/index.js
									
									
									
										generated
									
									
										vendored
									
									
								
							
							
						
						
									
										183
									
								
								dist/run/index.js
									
									
									
										generated
									
									
										vendored
									
									
								
							| @ -331,10 +331,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) | |||||||
|     } |     } | ||||||
|     // Add salt to cache version to support breaking changes in cache entry
 |     // Add salt to cache version to support breaking changes in cache entry
 | ||||||
|     components.push(versionSalt); |     components.push(versionSalt); | ||||||
|     return crypto |     return crypto.createHash('sha256').update(components.join('|')).digest('hex'); | ||||||
|         .createHash('sha256') |  | ||||||
|         .update(components.join('|')) |  | ||||||
|         .digest('hex'); |  | ||||||
| } | } | ||||||
| exports.getCacheVersion = getCacheVersion; | exports.getCacheVersion = getCacheVersion; | ||||||
| function getCacheEntry(keys, paths, options) { | function getCacheEntry(keys, paths, options) { | ||||||
| @ -387,13 +384,21 @@ function downloadCache(archiveLocation, archivePath, options) { | |||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const archiveUrl = new url_1.URL(archiveLocation); |         const archiveUrl = new url_1.URL(archiveLocation); | ||||||
|         const downloadOptions = (0, options_1.getDownloadOptions)(options); |         const downloadOptions = (0, options_1.getDownloadOptions)(options); | ||||||
|         if (downloadOptions.useAzureSdk && |         if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||||
|             archiveUrl.hostname.endsWith('.blob.core.windows.net')) { |             if (downloadOptions.useAzureSdk) { | ||||||
|             // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 |                 // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||||
|             yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); |                 yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||||
|  |             } | ||||||
|  |             else if (downloadOptions.concurrentBlobDownloads) { | ||||||
|  |                 // Use concurrent implementation with HttpClient to work around blob SDK issue
 | ||||||
|  |                 yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); | ||||||
|  |             } | ||||||
|  |             else { | ||||||
|  |                 // Otherwise, download using the Actions http-client.
 | ||||||
|  |                 yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||||
|  |             } | ||||||
|         } |         } | ||||||
|         else { |         else { | ||||||
|             // Otherwise, download using the Actions http-client.
 |  | ||||||
|             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); |             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||||
|         } |         } | ||||||
|     }); |     }); | ||||||
| @ -426,9 +431,7 @@ function getContentRange(start, end) { | |||||||
| } | } | ||||||
| function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         core.debug(`Uploading chunk of size ${end - |         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||||
|             start + |  | ||||||
|             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
 |  | ||||||
|         const additionalHeaders = { |         const additionalHeaders = { | ||||||
|             'Content-Type': 'application/octet-stream', |             'Content-Type': 'application/octet-stream', | ||||||
|             'Content-Range': getContentRange(start, end) |             'Content-Range': getContentRange(start, end) | ||||||
| @ -596,35 +599,42 @@ function getArchiveFileSizeInBytes(filePath) { | |||||||
| } | } | ||||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||||
| function resolvePaths(patterns) { | function resolvePaths(patterns) { | ||||||
|     var e_1, _a; |     var _a, e_1, _b, _c; | ||||||
|     var _b; |     var _d; | ||||||
|     return __awaiter(this, void 0, void 0, function* () { |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|         const paths = []; |         const paths = []; | ||||||
|         const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); |         const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); | ||||||
|         const globber = yield glob.create(patterns.join('\n'), { |         const globber = yield glob.create(patterns.join('\n'), { | ||||||
|             implicitDescendants: false |             implicitDescendants: false | ||||||
|         }); |         }); | ||||||
|         try { |         try { | ||||||
|             for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { |             for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { | ||||||
|                 const file = _d.value; |                 _c = _g.value; | ||||||
|                 const relativeFile = path |                 _e = false; | ||||||
|                     .relative(workspace, file) |                 try { | ||||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); |                     const file = _c; | ||||||
|                 core.debug(`Matched: ${relativeFile}`); |                     const relativeFile = path | ||||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 |                         .relative(workspace, file) | ||||||
|                 if (relativeFile === '') { |                         .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||||
|                     // path.relative returns empty string if workspace and file are equal
 |                     core.debug(`Matched: ${relativeFile}`); | ||||||
|                     paths.push('.'); |                     // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||||
|  |                     if (relativeFile === '') { | ||||||
|  |                         // path.relative returns empty string if workspace and file are equal
 | ||||||
|  |                         paths.push('.'); | ||||||
|  |                     } | ||||||
|  |                     else { | ||||||
|  |                         paths.push(`${relativeFile}`); | ||||||
|  |                     } | ||||||
|                 } |                 } | ||||||
|                 else { |                 finally { | ||||||
|                     paths.push(`${relativeFile}`); |                     _e = true; | ||||||
|                 } |                 } | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } |         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||||
|         finally { |         finally { | ||||||
|             try { |             try { | ||||||
|                 if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); |                 if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); | ||||||
|             } |             } | ||||||
|             finally { if (e_1) throw e_1.error; } |             finally { if (e_1) throw e_1.error; } | ||||||
|         } |         } | ||||||
| @ -789,7 +799,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | |||||||
|     }); |     }); | ||||||
| }; | }; | ||||||
| Object.defineProperty(exports, "__esModule", ({ value: true })); | Object.defineProperty(exports, "__esModule", ({ value: true })); | ||||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||||
| const core = __importStar(__nccwpck_require__(2186)); | const core = __importStar(__nccwpck_require__(2186)); | ||||||
| const http_client_1 = __nccwpck_require__(6255); | const http_client_1 = __nccwpck_require__(6255); | ||||||
| const storage_blob_1 = __nccwpck_require__(4100); | const storage_blob_1 = __nccwpck_require__(4100); | ||||||
| @ -946,6 +956,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | |||||||
|     }); |     }); | ||||||
| } | } | ||||||
| exports.downloadCacheHttpClient = downloadCacheHttpClient; | exports.downloadCacheHttpClient = downloadCacheHttpClient; | ||||||
|  | /** | ||||||
|  |  * Download the cache using the Actions toolkit http-client concurrently | ||||||
|  |  * | ||||||
|  |  * @param archiveLocation the URL for the cache | ||||||
|  |  * @param archivePath the local path where the cache is saved | ||||||
|  |  */ | ||||||
|  | function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { | ||||||
|  |     var _a; | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); | ||||||
|  |         const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { | ||||||
|  |             socketTimeout: options.timeoutInMs, | ||||||
|  |             keepAlive: true | ||||||
|  |         }); | ||||||
|  |         try { | ||||||
|  |             const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); | ||||||
|  |             const lengthHeader = res.message.headers['content-length']; | ||||||
|  |             if (lengthHeader === undefined || lengthHeader === null) { | ||||||
|  |                 throw new Error('Content-Length not found on blob response'); | ||||||
|  |             } | ||||||
|  |             const length = parseInt(lengthHeader); | ||||||
|  |             if (Number.isNaN(length)) { | ||||||
|  |                 throw new Error(`Could not interpret Content-Length: ${length}`); | ||||||
|  |             } | ||||||
|  |             const downloads = []; | ||||||
|  |             const blockSize = 4 * 1024 * 1024; | ||||||
|  |             for (let offset = 0; offset < length; offset += blockSize) { | ||||||
|  |                 const count = Math.min(blockSize, length - offset); | ||||||
|  |                 downloads.push({ | ||||||
|  |                     offset, | ||||||
|  |                     promiseGetter: () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                         return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); | ||||||
|  |                     }) | ||||||
|  |                 }); | ||||||
|  |             } | ||||||
|  |             // reverse to use .pop instead of .shift
 | ||||||
|  |             downloads.reverse(); | ||||||
|  |             let actives = 0; | ||||||
|  |             let bytesDownloaded = 0; | ||||||
|  |             const progress = new DownloadProgress(length); | ||||||
|  |             progress.startDisplayTimer(); | ||||||
|  |             const progressFn = progress.onProgress(); | ||||||
|  |             const activeDownloads = []; | ||||||
|  |             let nextDownload; | ||||||
|  |             const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |                 const segment = yield Promise.race(Object.values(activeDownloads)); | ||||||
|  |                 yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); | ||||||
|  |                 actives--; | ||||||
|  |                 delete activeDownloads[segment.offset]; | ||||||
|  |                 bytesDownloaded += segment.count; | ||||||
|  |                 progressFn({ loadedBytes: bytesDownloaded }); | ||||||
|  |             }); | ||||||
|  |             while ((nextDownload = downloads.pop())) { | ||||||
|  |                 activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); | ||||||
|  |                 actives++; | ||||||
|  |                 if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { | ||||||
|  |                     yield waitAndWrite(); | ||||||
|  |                 } | ||||||
|  |             } | ||||||
|  |             while (actives > 0) { | ||||||
|  |                 yield waitAndWrite(); | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |         finally { | ||||||
|  |             httpClient.dispose(); | ||||||
|  |             yield archiveDescriptor.close(); | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; | ||||||
|  | function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const retries = 5; | ||||||
|  |         let failures = 0; | ||||||
|  |         while (true) { | ||||||
|  |             try { | ||||||
|  |                 const timeout = 30000; | ||||||
|  |                 const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); | ||||||
|  |                 if (typeof result === 'string') { | ||||||
|  |                     throw new Error('downloadSegmentRetry failed due to timeout'); | ||||||
|  |                 } | ||||||
|  |                 return result; | ||||||
|  |             } | ||||||
|  |             catch (err) { | ||||||
|  |                 if (failures >= retries) { | ||||||
|  |                     throw err; | ||||||
|  |                 } | ||||||
|  |                 failures++; | ||||||
|  |             } | ||||||
|  |         } | ||||||
|  |     }); | ||||||
|  | } | ||||||
|  | function downloadSegment(httpClient, archiveLocation, offset, count) { | ||||||
|  |     return __awaiter(this, void 0, void 0, function* () { | ||||||
|  |         const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { | ||||||
|  |             return yield httpClient.get(archiveLocation, { | ||||||
|  |                 Range: `bytes=${offset}-${offset + count - 1}` | ||||||
|  |             }); | ||||||
|  |         })); | ||||||
|  |         if (!partRes.readBodyBuffer) { | ||||||
|  |             throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); | ||||||
|  |         } | ||||||
|  |         return { | ||||||
|  |             offset, | ||||||
|  |             count, | ||||||
|  |             buffer: yield partRes.readBodyBuffer() | ||||||
|  |         }; | ||||||
|  |     }); | ||||||
|  | } | ||||||
| /** | /** | ||||||
|  * Download the cache using the Azure Storage SDK.  Only call this method if the |  * Download the cache using the Azure Storage SDK.  Only call this method if the | ||||||
|  * URL points to an Azure Storage endpoint. |  * URL points to an Azure Storage endpoint. | ||||||
| @ -1511,7 +1630,8 @@ exports.getUploadOptions = getUploadOptions; | |||||||
|  */ |  */ | ||||||
| function getDownloadOptions(copy) { | function getDownloadOptions(copy) { | ||||||
|     const result = { |     const result = { | ||||||
|         useAzureSdk: true, |         useAzureSdk: false, | ||||||
|  |         concurrentBlobDownloads: true, | ||||||
|         downloadConcurrency: 8, |         downloadConcurrency: 8, | ||||||
|         timeoutInMs: 30000, |         timeoutInMs: 30000, | ||||||
|         segmentTimeoutInMs: 600000, |         segmentTimeoutInMs: 600000, | ||||||
| @ -1521,6 +1641,9 @@ function getDownloadOptions(copy) { | |||||||
|         if (typeof copy.useAzureSdk === 'boolean') { |         if (typeof copy.useAzureSdk === 'boolean') { | ||||||
|             result.useAzureSdk = copy.useAzureSdk; |             result.useAzureSdk = copy.useAzureSdk; | ||||||
|         } |         } | ||||||
|  |         if (typeof copy.concurrentBlobDownloads === 'boolean') { | ||||||
|  |             result.concurrentBlobDownloads = copy.concurrentBlobDownloads; | ||||||
|  |         } | ||||||
|         if (typeof copy.downloadConcurrency === 'number') { |         if (typeof copy.downloadConcurrency === 'number') { | ||||||
|             result.downloadConcurrency = copy.downloadConcurrency; |             result.downloadConcurrency = copy.downloadConcurrency; | ||||||
|         } |         } | ||||||
|  | |||||||
							
								
								
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @ -9,7 +9,7 @@ | |||||||
|       "version": "3.1.0", |       "version": "3.1.0", | ||||||
|       "license": "MIT", |       "license": "MIT", | ||||||
|       "dependencies": { |       "dependencies": { | ||||||
|         "@actions/cache": "^3.2.1", |         "@actions/cache": "^3.2.2", | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.1.1", |         "@actions/exec": "^1.1.1", | ||||||
|         "@actions/github": "^5.1.1", |         "@actions/github": "^5.1.1", | ||||||
| @ -43,14 +43,14 @@ | |||||||
|       } |       } | ||||||
|     }, |     }, | ||||||
|     "node_modules/@actions/cache": { |     "node_modules/@actions/cache": { | ||||||
|       "version": "3.2.1", |       "version": "3.2.2", | ||||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", |       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", | ||||||
|       "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", |       "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", | ||||||
|       "dependencies": { |       "dependencies": { | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.0.1", |         "@actions/exec": "^1.0.1", | ||||||
|         "@actions/glob": "^0.1.0", |         "@actions/glob": "^0.1.0", | ||||||
|         "@actions/http-client": "^2.0.1", |         "@actions/http-client": "^2.1.1", | ||||||
|         "@actions/io": "^1.0.1", |         "@actions/io": "^1.0.1", | ||||||
|         "@azure/abort-controller": "^1.1.0", |         "@azure/abort-controller": "^1.1.0", | ||||||
|         "@azure/ms-rest-js": "^2.6.0", |         "@azure/ms-rest-js": "^2.6.0", | ||||||
| @ -3937,14 +3937,14 @@ | |||||||
|       "dev": true |       "dev": true | ||||||
|     }, |     }, | ||||||
|     "@actions/cache": { |     "@actions/cache": { | ||||||
|       "version": "3.2.1", |       "version": "3.2.2", | ||||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.1.tgz", |       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.2.2.tgz", | ||||||
|       "integrity": "sha512-QurbMiY//02+0kN1adJkMHN44RcZ5kAXfhSnKUZmtSmhMTNqLitGArG1xOkt93NNyByTlLGAc5wIOF/dZ2ENOQ==", |       "integrity": "sha512-6D0Jq5JrLZRQ3VApeQwQkkV20ZZXjXsHNYXd9VjNUdi9E0h93wESpxfMJ2JWLCUCgHNLcfY0v3GjNM+2FdRMlg==", | ||||||
|       "requires": { |       "requires": { | ||||||
|         "@actions/core": "^1.10.0", |         "@actions/core": "^1.10.0", | ||||||
|         "@actions/exec": "^1.0.1", |         "@actions/exec": "^1.0.1", | ||||||
|         "@actions/glob": "^0.1.0", |         "@actions/glob": "^0.1.0", | ||||||
|         "@actions/http-client": "^2.0.1", |         "@actions/http-client": "^2.1.1", | ||||||
|         "@actions/io": "^1.0.1", |         "@actions/io": "^1.0.1", | ||||||
|         "@azure/abort-controller": "^1.1.0", |         "@azure/abort-controller": "^1.1.0", | ||||||
|         "@azure/ms-rest-js": "^2.6.0", |         "@azure/ms-rest-js": "^2.6.0", | ||||||
|  | |||||||
| @ -24,7 +24,7 @@ | |||||||
|   "author": "golangci", |   "author": "golangci", | ||||||
|   "license": "MIT", |   "license": "MIT", | ||||||
|   "dependencies": { |   "dependencies": { | ||||||
|     "@actions/cache": "^3.2.1", |     "@actions/cache": "^3.2.2", | ||||||
|     "@actions/core": "^1.10.0", |     "@actions/core": "^1.10.0", | ||||||
|     "@actions/exec": "^1.1.1", |     "@actions/exec": "^1.1.1", | ||||||
|     "@actions/github": "^5.1.1", |     "@actions/github": "^5.1.1", | ||||||
|  | |||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user
	![49699333+dependabot[bot]@users.noreply.github.com](/assets/img/avatar_default.png) dependabot[bot]
						dependabot[bot]