mirror of
				https://kkgithub.com/actions/cache.git
				synced 2025-10-25 23:42:19 +08:00 
			
		
		
		
	Change to on end
This commit is contained in:
		
							
								
								
									
										8
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) { | ||||
|         // Upload Chunks | ||||
|         const stream = fs.createReadStream(archivePath); | ||||
|         let streamIsClosed = false; | ||||
|         stream.on("close", () => { | ||||
|             core.debug("Stream is closed"); | ||||
|         stream.on("end", () => { | ||||
|             core.debug("Stream is ended"); | ||||
|             streamIsClosed = true; | ||||
|         }); | ||||
|         const resourceUrl = getCacheApiUrl() + cacheId.toString(); | ||||
| @ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) { | ||||
|         while (!streamIsClosed) { | ||||
|             core.debug(`Offset: ${offset}`); | ||||
|             const chunk = stream.read(MAX_CHUNK_SIZE); | ||||
|             if (chunk == null) { | ||||
|                 core.debug(`Chunk is null, reading is over?`); | ||||
|                 break; | ||||
|             } | ||||
|             uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); | ||||
|             offset += MAX_CHUNK_SIZE; | ||||
|         } | ||||
|  | ||||
							
								
								
									
										8
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										8
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @ -1624,8 +1624,8 @@ function saveCache(cacheId, archivePath) { | ||||
|         // Upload Chunks | ||||
|         const stream = fs.createReadStream(archivePath); | ||||
|         let streamIsClosed = false; | ||||
|         stream.on("close", () => { | ||||
|             core.debug("Stream is closed"); | ||||
|         stream.on("end", () => { | ||||
|             core.debug("Stream is ended"); | ||||
|             streamIsClosed = true; | ||||
|         }); | ||||
|         const resourceUrl = getCacheApiUrl() + cacheId.toString(); | ||||
| @ -1634,6 +1634,10 @@ function saveCache(cacheId, archivePath) { | ||||
|         while (!streamIsClosed) { | ||||
|             core.debug(`Offset: ${offset}`); | ||||
|             const chunk = stream.read(MAX_CHUNK_SIZE); | ||||
|             if (chunk == null) { | ||||
|                 core.debug(`Chunk is null, reading is over?`); | ||||
|                 break; | ||||
|             } | ||||
|             uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); | ||||
|             offset += MAX_CHUNK_SIZE; | ||||
|         } | ||||
|  | ||||
| @ -185,8 +185,8 @@ export async function saveCache( | ||||
|     // Upload Chunks | ||||
|     const stream = fs.createReadStream(archivePath); | ||||
|     let streamIsClosed = false; | ||||
|     stream.on("close", () => { | ||||
|         core.debug("Stream is closed"); | ||||
|     stream.on("end", () => { | ||||
|         core.debug("Stream is ended"); | ||||
|         streamIsClosed = true; | ||||
|     }); | ||||
|  | ||||
| @ -196,6 +196,10 @@ export async function saveCache( | ||||
|     while (!streamIsClosed) { | ||||
|         core.debug(`Offset: ${offset}`); | ||||
|         const chunk: Buffer = stream.read(MAX_CHUNK_SIZE); | ||||
|         if (chunk == null) { | ||||
|             core.debug(`Chunk is null, reading is over?`); | ||||
|             break; | ||||
|         } | ||||
|         uploads.push(uploadChunk(restClient, resourceUrl, chunk, offset)); | ||||
|         offset += MAX_CHUNK_SIZE; | ||||
|     } | ||||
|  | ||||
		Reference in New Issue
	
	Block a user
	 Josh Gross
					Josh Gross