mirror of
				https://kkgithub.com/actions/setup-python.git
				synced 2025-10-31 18:51:51 +08:00 
			
		
		
		
	Update @actions/cache version to 1.0.8 (#283)
This commit is contained in:
		
							
								
								
									
										67
									
								
								dist/cache-save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										67
									
								
								dist/cache-save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1041,10 +1041,10 @@ function createTempDirectory() { | ||||
|     }); | ||||
| } | ||||
| exports.createTempDirectory = createTempDirectory; | ||||
| function getArchiveFileSizeIsBytes(filePath) { | ||||
| function getArchiveFileSizeInBytes(filePath) { | ||||
|     return fs.statSync(filePath).size; | ||||
| } | ||||
| exports.getArchiveFileSizeIsBytes = getArchiveFileSizeIsBytes; | ||||
| exports.getArchiveFileSizeInBytes = getArchiveFileSizeInBytes; | ||||
| function resolvePaths(patterns) { | ||||
|     var e_1, _a; | ||||
|     var _b; | ||||
| @ -3852,7 +3852,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { | ||||
| function uploadFile(httpClient, cacheId, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Upload Chunks
 | ||||
|         const fileSize = fs.statSync(archivePath).size; | ||||
|         const fileSize = utils.getArchiveFileSizeInBytes(archivePath); | ||||
|         const resourceUrl = getCacheApiUrl(`caches/${cacheId.toString()}`); | ||||
|         const fd = fs.openSync(archivePath, 'r'); | ||||
|         const uploadOptions = options_1.getUploadOptions(options); | ||||
| @ -3902,7 +3902,7 @@ function saveCache(cacheId, archivePath, options) { | ||||
|         yield uploadFile(httpClient, cacheId, archivePath, options); | ||||
|         // Commit Cache
 | ||||
|         core.debug('Commiting cache'); | ||||
|         const cacheSize = utils.getArchiveFileSizeIsBytes(archivePath); | ||||
|         const cacheSize = utils.getArchiveFileSizeInBytes(archivePath); | ||||
|         core.info(`Cache Size: ~${Math.round(cacheSize / (1024 * 1024))} MB (${cacheSize} B)`); | ||||
|         const commitCacheResponse = yield commitCache(httpClient, cacheId, cacheSize); | ||||
|         if (!requestUtils_1.isSuccessStatusCode(commitCacheResponse.statusCode)) { | ||||
| @ -5877,7 +5877,7 @@ function downloadCacheHttpClient(archiveLocation, archivePath) { | ||||
|         const contentLengthHeader = downloadResponse.message.headers['content-length']; | ||||
|         if (contentLengthHeader) { | ||||
|             const expectedLength = parseInt(contentLengthHeader); | ||||
|             const actualLength = utils.getArchiveFileSizeIsBytes(archivePath); | ||||
|             const actualLength = utils.getArchiveFileSizeInBytes(archivePath); | ||||
|             if (actualLength !== expectedLength) { | ||||
|                 throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); | ||||
|             } | ||||
| @ -34322,7 +34322,7 @@ Object.defineProperty(Response.prototype, Symbol.toStringTag, { | ||||
| }); | ||||
| 
 | ||||
| const INTERNALS$2 = Symbol('Request internals'); | ||||
| const URL = whatwgUrl.URL; | ||||
| const URL = Url.URL || whatwgUrl.URL; | ||||
| 
 | ||||
| // fix an issue where "format", "parse" aren't a named export for node <10
 | ||||
| const parse_url = Url.parse; | ||||
| @ -41451,7 +41451,7 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | ||||
|             if (core.isDebug()) { | ||||
|                 yield tar_1.listTar(archivePath, compressionMethod); | ||||
|             } | ||||
|             const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); | ||||
|             const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); | ||||
|             core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); | ||||
|             yield tar_1.extractTar(archivePath, compressionMethod); | ||||
|             core.info('Cache restored successfully'); | ||||
| @ -41496,18 +41496,29 @@ function saveCache(paths, key, options) { | ||||
|         const archiveFolder = yield utils.createTempDirectory(); | ||||
|         const archivePath = path.join(archiveFolder, utils.getCacheFileName(compressionMethod)); | ||||
|         core.debug(`Archive Path: ${archivePath}`); | ||||
|         yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); | ||||
|         if (core.isDebug()) { | ||||
|             yield tar_1.listTar(archivePath, compressionMethod); | ||||
|         try { | ||||
|             yield tar_1.createTar(archiveFolder, cachePaths, compressionMethod); | ||||
|             if (core.isDebug()) { | ||||
|                 yield tar_1.listTar(archivePath, compressionMethod); | ||||
|             } | ||||
|             const fileSizeLimit = 10 * 1024 * 1024 * 1024; // 10GB per repo limit
 | ||||
|             const archiveFileSize = utils.getArchiveFileSizeInBytes(archivePath); | ||||
|             core.debug(`File Size: ${archiveFileSize}`); | ||||
|             if (archiveFileSize > fileSizeLimit) { | ||||
|                 throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 10GB limit, not saving cache.`); | ||||
|             } | ||||
|             core.debug(`Saving Cache (ID: ${cacheId})`); | ||||
|             yield cacheHttpClient.saveCache(cacheId, archivePath, options); | ||||
|         } | ||||
|         const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit
 | ||||
|         const archiveFileSize = utils.getArchiveFileSizeIsBytes(archivePath); | ||||
|         core.debug(`File Size: ${archiveFileSize}`); | ||||
|         if (archiveFileSize > fileSizeLimit) { | ||||
|             throw new Error(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 5GB limit, not saving cache.`); | ||||
|         finally { | ||||
|             // Try to delete the archive to save space
 | ||||
|             try { | ||||
|                 yield utils.unlinkFile(archivePath); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 core.debug(`Failed to delete archive: ${error}`); | ||||
|             } | ||||
|         } | ||||
|         core.debug(`Saving Cache (ID: ${cacheId})`); | ||||
|         yield cacheHttpClient.saveCache(cacheId, archivePath, options); | ||||
|         return cacheId; | ||||
|     }); | ||||
| } | ||||
| @ -53218,7 +53229,12 @@ class HttpHeaders { | ||||
|      * Create a deep clone/copy of this HttpHeaders collection. | ||||
|      */ | ||||
|     clone() { | ||||
|         return new HttpHeaders(this.rawHeaders()); | ||||
|         const resultPreservingCasing = {}; | ||||
|         for (const headerKey in this._headersMap) { | ||||
|             const header = this._headersMap[headerKey]; | ||||
|             resultPreservingCasing[header.name] = header.value; | ||||
|         } | ||||
|         return new HttpHeaders(resultPreservingCasing); | ||||
|     } | ||||
| } | ||||
| 
 | ||||
| @ -53255,7 +53271,7 @@ const Constants = { | ||||
|     /** | ||||
|      * The core-http version | ||||
|      */ | ||||
|     coreHttpVersion: "2.2.1", | ||||
|     coreHttpVersion: "2.2.2", | ||||
|     /** | ||||
|      * Specifies HTTP. | ||||
|      */ | ||||
| @ -55568,7 +55584,7 @@ class FetchHttpClient { | ||||
|                 } | ||||
|                 let downloadStreamDone = Promise.resolve(); | ||||
|                 if (isReadableStream(operationResponse === null || operationResponse === void 0 ? void 0 : operationResponse.readableStreamBody)) { | ||||
|                     downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody); | ||||
|                     downloadStreamDone = isStreamComplete(operationResponse.readableStreamBody, abortController$1); | ||||
|                 } | ||||
|                 Promise.all([uploadStreamDone, downloadStreamDone]) | ||||
|                     .then(() => { | ||||
| @ -55586,11 +55602,14 @@ class FetchHttpClient { | ||||
| function isReadableStream(body) { | ||||
|     return body && typeof body.pipe === "function"; | ||||
| } | ||||
| function isStreamComplete(stream) { | ||||
| function isStreamComplete(stream, aborter) { | ||||
|     return new Promise((resolve) => { | ||||
|         stream.on("close", resolve); | ||||
|         stream.on("end", resolve); | ||||
|         stream.on("error", resolve); | ||||
|         stream.once("close", () => { | ||||
|             aborter === null || aborter === void 0 ? void 0 : aborter.abort(); | ||||
|             resolve(); | ||||
|         }); | ||||
|         stream.once("end", resolve); | ||||
|         stream.once("error", resolve); | ||||
|     }); | ||||
| } | ||||
| function parseHeaders(headers) { | ||||
|  | ||||
		Reference in New Issue
	
	Block a user
	 Nikita Bykov
					Nikita Bykov