mirror of
				https://github.com/actions/cache.git
				synced 2025-10-31 14:30:54 +00:00 
			
		
		
		
	Consume latest toolkit and fix dangling promise bug (#1217)
* Consume latest toolkit and fix dangling promise bug * Pass earlyExit parameter to run method so tests don't hang * Pass earlyExit parameter to run method so tests don't hang * Refactor restore files to have better patterns for testing * style
This commit is contained in:
		
					parent
					
						
							
								67b839edb6
							
						
					
				
			
			
				commit
				
					
						f7ebb81a3f
					
				
			
		
					 14 changed files with 922 additions and 239 deletions
				
			
		
							
								
								
									
										221
									
								
								dist/save/index.js
									
										
									
									
										vendored
									
									
								
							
							
						
						
									
										221
									
								
								dist/save/index.js
									
										
									
									
										vendored
									
									
								
							|  | @ -1127,35 +1127,42 @@ function getArchiveFileSizeInBytes(filePath) { | |||
| } | ||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||
| function resolvePaths(patterns) { | ||||
|     var e_1, _a; | ||||
|     var _b; | ||||
|     var _a, e_1, _b, _c; | ||||
|     var _d; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const paths = []; | ||||
|         const workspace = (_b = process.env['GITHUB_WORKSPACE']) !== null && _b !== void 0 ? _b : process.cwd(); | ||||
|         const workspace = (_d = process.env['GITHUB_WORKSPACE']) !== null && _d !== void 0 ? _d : process.cwd(); | ||||
|         const globber = yield glob.create(patterns.join('\n'), { | ||||
|             implicitDescendants: false | ||||
|         }); | ||||
|         try { | ||||
|             for (var _c = __asyncValues(globber.globGenerator()), _d; _d = yield _c.next(), !_d.done;) { | ||||
|                 const file = _d.value; | ||||
|                 const relativeFile = path | ||||
|                     .relative(workspace, file) | ||||
|                     .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                 core.debug(`Matched: ${relativeFile}`); | ||||
|                 // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                 if (relativeFile === '') { | ||||
|                     // path.relative returns empty string if workspace and file are equal
 | ||||
|                     paths.push('.'); | ||||
|             for (var _e = true, _f = __asyncValues(globber.globGenerator()), _g; _g = yield _f.next(), _a = _g.done, !_a;) { | ||||
|                 _c = _g.value; | ||||
|                 _e = false; | ||||
|                 try { | ||||
|                     const file = _c; | ||||
|                     const relativeFile = path | ||||
|                         .relative(workspace, file) | ||||
|                         .replace(new RegExp(`\\${path.sep}`, 'g'), '/'); | ||||
|                     core.debug(`Matched: ${relativeFile}`); | ||||
|                     // Paths are made relative so the tar entries are all relative to the root of the workspace.
 | ||||
|                     if (relativeFile === '') { | ||||
|                         // path.relative returns empty string if workspace and file are equal
 | ||||
|                         paths.push('.'); | ||||
|                     } | ||||
|                     else { | ||||
|                         paths.push(`${relativeFile}`); | ||||
|                     } | ||||
|                 } | ||||
|                 else { | ||||
|                     paths.push(`${relativeFile}`); | ||||
|                 finally { | ||||
|                     _e = true; | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|         catch (e_1_1) { e_1 = { error: e_1_1 }; } | ||||
|         finally { | ||||
|             try { | ||||
|                 if (_d && !_d.done && (_a = _c.return)) yield _a.call(_c); | ||||
|                 if (!_e && !_a && (_b = _f.return)) yield _b.call(_f); | ||||
|             } | ||||
|             finally { if (e_1) throw e_1.error; } | ||||
|         } | ||||
|  | @ -3394,10 +3401,7 @@ function getCacheVersion(paths, compressionMethod, enableCrossOsArchive = false) | |||
|     } | ||||
|     // Add salt to cache version to support breaking changes in cache entry
 | ||||
|     components.push(versionSalt); | ||||
|     return crypto | ||||
|         .createHash('sha256') | ||||
|         .update(components.join('|')) | ||||
|         .digest('hex'); | ||||
|     return crypto.createHash('sha256').update(components.join('|')).digest('hex'); | ||||
| } | ||||
| exports.getCacheVersion = getCacheVersion; | ||||
| function getCacheEntry(keys, paths, options) { | ||||
|  | @ -3450,13 +3454,21 @@ function downloadCache(archiveLocation, archivePath, options) { | |||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
|         const downloadOptions = (0, options_1.getDownloadOptions)(options); | ||||
|         if (downloadOptions.useAzureSdk && | ||||
|             archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|         if (archiveUrl.hostname.endsWith('.blob.core.windows.net')) { | ||||
|             if (downloadOptions.useAzureSdk) { | ||||
|                 // Use Azure storage SDK to download caches hosted on Azure to improve speed and reliability.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheStorageSDK)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else if (downloadOptions.concurrentBlobDownloads) { | ||||
|                 // Use concurrent implementation with HttpClient to work around blob SDK issue
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClientConcurrent)(archiveLocation, archivePath, downloadOptions); | ||||
|             } | ||||
|             else { | ||||
|                 // Otherwise, download using the Actions http-client.
 | ||||
|                 yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|             } | ||||
|         } | ||||
|         else { | ||||
|             // Otherwise, download using the Actions http-client.
 | ||||
|             yield (0, downloadUtils_1.downloadCacheHttpClient)(archiveLocation, archivePath); | ||||
|         } | ||||
|     }); | ||||
|  | @ -3489,9 +3501,7 @@ function getContentRange(start, end) { | |||
| } | ||||
| function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         core.debug(`Uploading chunk of size ${end - | ||||
|             start + | ||||
|             1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`);
 | ||||
|         core.debug(`Uploading chunk of size ${end - start + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); | ||||
|         const additionalHeaders = { | ||||
|             'Content-Type': 'application/octet-stream', | ||||
|             'Content-Range': getContentRange(start, end) | ||||
|  | @ -4866,7 +4876,13 @@ function getProxyUrl(reqUrl) { | |||
|         } | ||||
|     })(); | ||||
|     if (proxyVar) { | ||||
|         return new URL(proxyVar); | ||||
|         try { | ||||
|             return new URL(proxyVar); | ||||
|         } | ||||
|         catch (_a) { | ||||
|             if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) | ||||
|                 return new URL(`http://${proxyVar}`); | ||||
|         } | ||||
|     } | ||||
|     else { | ||||
|         return undefined; | ||||
|  | @ -4877,6 +4893,10 @@ function checkBypass(reqUrl) { | |||
|     if (!reqUrl.hostname) { | ||||
|         return false; | ||||
|     } | ||||
|     const reqHost = reqUrl.hostname; | ||||
|     if (isLoopbackAddress(reqHost)) { | ||||
|         return true; | ||||
|     } | ||||
|     const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; | ||||
|     if (!noProxy) { | ||||
|         return false; | ||||
|  | @ -4902,13 +4922,24 @@ function checkBypass(reqUrl) { | |||
|         .split(',') | ||||
|         .map(x => x.trim().toUpperCase()) | ||||
|         .filter(x => x)) { | ||||
|         if (upperReqHosts.some(x => x === upperNoProxyItem)) { | ||||
|         if (upperNoProxyItem === '*' || | ||||
|             upperReqHosts.some(x => x === upperNoProxyItem || | ||||
|                 x.endsWith(`.${upperNoProxyItem}`) || | ||||
|                 (upperNoProxyItem.startsWith('.') && | ||||
|                     x.endsWith(`${upperNoProxyItem}`)))) { | ||||
|             return true; | ||||
|         } | ||||
|     } | ||||
|     return false; | ||||
| } | ||||
| exports.checkBypass = checkBypass; | ||||
| function isLoopbackAddress(host) { | ||||
|     const hostLower = host.toLowerCase(); | ||||
|     return (hostLower === 'localhost' || | ||||
|         hostLower.startsWith('127.') || | ||||
|         hostLower.startsWith('[::1]') || | ||||
|         hostLower.startsWith('[0:0:0:0:0:0:0:1]')); | ||||
| } | ||||
| //# sourceMappingURL=proxy.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
|  | @ -5557,7 +5588,7 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge | |||
|     }); | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| exports.downloadCacheStorageSDK = exports.downloadCacheHttpClientConcurrent = exports.downloadCacheHttpClient = exports.DownloadProgress = void 0; | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const http_client_1 = __webpack_require__(425); | ||||
| const storage_blob_1 = __webpack_require__(373); | ||||
|  | @ -5714,6 +5745,115 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | |||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClient = downloadCacheHttpClient; | ||||
| /** | ||||
|  * Download the cache using the Actions toolkit http-client concurrently | ||||
|  * | ||||
|  * @param archiveLocation the URL for the cache | ||||
|  * @param archivePath the local path where the cache is saved | ||||
|  */ | ||||
| function downloadCacheHttpClientConcurrent(archiveLocation, archivePath, options) { | ||||
|     var _a; | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveDescriptor = yield fs.promises.open(archivePath, 'w'); | ||||
|         const httpClient = new http_client_1.HttpClient('actions/cache', undefined, { | ||||
|             socketTimeout: options.timeoutInMs, | ||||
|             keepAlive: true | ||||
|         }); | ||||
|         try { | ||||
|             const res = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCacheMetadata', () => __awaiter(this, void 0, void 0, function* () { return yield httpClient.request('HEAD', archiveLocation, null, {}); })); | ||||
|             const lengthHeader = res.message.headers['content-length']; | ||||
|             if (lengthHeader === undefined || lengthHeader === null) { | ||||
|                 throw new Error('Content-Length not found on blob response'); | ||||
|             } | ||||
|             const length = parseInt(lengthHeader); | ||||
|             if (Number.isNaN(length)) { | ||||
|                 throw new Error(`Could not interpret Content-Length: ${length}`); | ||||
|             } | ||||
|             const downloads = []; | ||||
|             const blockSize = 4 * 1024 * 1024; | ||||
|             for (let offset = 0; offset < length; offset += blockSize) { | ||||
|                 const count = Math.min(blockSize, length - offset); | ||||
|                 downloads.push({ | ||||
|                     offset, | ||||
|                     promiseGetter: () => __awaiter(this, void 0, void 0, function* () { | ||||
|                         return yield downloadSegmentRetry(httpClient, archiveLocation, offset, count); | ||||
|                     }) | ||||
|                 }); | ||||
|             } | ||||
|             // reverse to use .pop instead of .shift
 | ||||
|             downloads.reverse(); | ||||
|             let actives = 0; | ||||
|             let bytesDownloaded = 0; | ||||
|             const progress = new DownloadProgress(length); | ||||
|             progress.startDisplayTimer(); | ||||
|             const progressFn = progress.onProgress(); | ||||
|             const activeDownloads = []; | ||||
|             let nextDownload; | ||||
|             const waitAndWrite = () => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const segment = yield Promise.race(Object.values(activeDownloads)); | ||||
|                 yield archiveDescriptor.write(segment.buffer, 0, segment.count, segment.offset); | ||||
|                 actives--; | ||||
|                 delete activeDownloads[segment.offset]; | ||||
|                 bytesDownloaded += segment.count; | ||||
|                 progressFn({ loadedBytes: bytesDownloaded }); | ||||
|             }); | ||||
|             while ((nextDownload = downloads.pop())) { | ||||
|                 activeDownloads[nextDownload.offset] = nextDownload.promiseGetter(); | ||||
|                 actives++; | ||||
|                 if (actives >= ((_a = options.downloadConcurrency) !== null && _a !== void 0 ? _a : 10)) { | ||||
|                     yield waitAndWrite(); | ||||
|                 } | ||||
|             } | ||||
|             while (actives > 0) { | ||||
|                 yield waitAndWrite(); | ||||
|             } | ||||
|         } | ||||
|         finally { | ||||
|             httpClient.dispose(); | ||||
|             yield archiveDescriptor.close(); | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| exports.downloadCacheHttpClientConcurrent = downloadCacheHttpClientConcurrent; | ||||
| function downloadSegmentRetry(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const retries = 5; | ||||
|         let failures = 0; | ||||
|         while (true) { | ||||
|             try { | ||||
|                 const timeout = 30000; | ||||
|                 const result = yield promiseWithTimeout(timeout, downloadSegment(httpClient, archiveLocation, offset, count)); | ||||
|                 if (typeof result === 'string') { | ||||
|                     throw new Error('downloadSegmentRetry failed due to timeout'); | ||||
|                 } | ||||
|                 return result; | ||||
|             } | ||||
|             catch (err) { | ||||
|                 if (failures >= retries) { | ||||
|                     throw err; | ||||
|                 } | ||||
|                 failures++; | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadSegment(httpClient, archiveLocation, offset, count) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const partRes = yield (0, requestUtils_1.retryHttpClientResponse)('downloadCachePart', () => __awaiter(this, void 0, void 0, function* () { | ||||
|             return yield httpClient.get(archiveLocation, { | ||||
|                 Range: `bytes=${offset}-${offset + count - 1}` | ||||
|             }); | ||||
|         })); | ||||
|         if (!partRes.readBodyBuffer) { | ||||
|             throw new Error('Expected HttpClientResponse to implement readBodyBuffer'); | ||||
|         } | ||||
|         return { | ||||
|             offset, | ||||
|             count, | ||||
|             buffer: yield partRes.readBodyBuffer() | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| /** | ||||
|  * Download the cache using the Azure Storage SDK.  Only call this method if the | ||||
|  * URL points to an Azure Storage endpoint. | ||||
|  | @ -35740,6 +35880,19 @@ class HttpClientResponse { | |||
|             })); | ||||
|         }); | ||||
|     } | ||||
|     readBodyBuffer() { | ||||
|         return __awaiter(this, void 0, void 0, function* () { | ||||
|             return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { | ||||
|                 const chunks = []; | ||||
|                 this.message.on('data', (chunk) => { | ||||
|                     chunks.push(chunk); | ||||
|                 }); | ||||
|                 this.message.on('end', () => { | ||||
|                     resolve(Buffer.concat(chunks)); | ||||
|                 }); | ||||
|             })); | ||||
|         }); | ||||
|     } | ||||
| } | ||||
| exports.HttpClientResponse = HttpClientResponse; | ||||
| function isHttps(requestUrl) { | ||||
|  | @ -40366,7 +40519,8 @@ exports.getUploadOptions = getUploadOptions; | |||
|  */ | ||||
| function getDownloadOptions(copy) { | ||||
|     const result = { | ||||
|         useAzureSdk: true, | ||||
|         useAzureSdk: false, | ||||
|         concurrentBlobDownloads: true, | ||||
|         downloadConcurrency: 8, | ||||
|         timeoutInMs: 30000, | ||||
|         segmentTimeoutInMs: 600000, | ||||
|  | @ -40376,6 +40530,9 @@ function getDownloadOptions(copy) { | |||
|         if (typeof copy.useAzureSdk === 'boolean') { | ||||
|             result.useAzureSdk = copy.useAzureSdk; | ||||
|         } | ||||
|         if (typeof copy.concurrentBlobDownloads === 'boolean') { | ||||
|             result.concurrentBlobDownloads = copy.concurrentBlobDownloads; | ||||
|         } | ||||
|         if (typeof copy.downloadConcurrency === 'number') { | ||||
|             result.downloadConcurrency = copy.downloadConcurrency; | ||||
|         } | ||||
|  |  | |||
		Loading…
	
	Add table
		Add a link
		
	
		Reference in a new issue