mirror of
				https://kkgithub.com/actions/cache.git
				synced 2025-10-30 18:21:50 +08:00 
			
		
		
		
	Compare commits
	
		
			29 Commits
		
	
	
		
			timeout-en
			...
			v1
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| f5ce41475b | |||
| 68fa0a8d81 | |||
| 56ec64e417 | |||
| efbc4e162b | |||
| d9747005de | |||
| 3f662ca624 | |||
| 0232e3178d | |||
| ee7a57c615 | |||
| da9f90cb83 | |||
| ec7f7ebd08 | |||
| 2a973a0f4e | |||
| cbbb8b4d4f | |||
| 5a0add1806 | |||
| 9fe7ad8b07 | |||
| 7c7d003bbb | |||
| 96e5a46c57 | |||
| 84e606dfac | |||
| 70655ec832 | |||
| fe1055e9d1 | |||
| a505c2e7a6 | |||
| 10a14413e7 | |||
| cf4f44db70 | |||
| 4c4974aff1 | |||
| cffae9552b | |||
| 44543250bd | |||
| 6491e51b66 | |||
| 86dff562ab | |||
| 0f810ad45a | |||
| 9d8c7b4041 | 
| @ -12,12 +12,5 @@ | ||||
|     "plugin:prettier/recommended", | ||||
|     "prettier/@typescript-eslint" | ||||
|   ], | ||||
|   "plugins": ["@typescript-eslint", "simple-import-sort", "jest"], | ||||
|   "rules": { | ||||
|     "import/first": "error", | ||||
|     "import/newline-after-import": "error", | ||||
|     "import/no-duplicates": "error", | ||||
|     "simple-import-sort/sort": "error", | ||||
|     "sort-imports": "off" | ||||
|   } | ||||
|   "plugins": ["@typescript-eslint", "jest"] | ||||
| } | ||||
|  | ||||
							
								
								
									
										35
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										35
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @ -1,35 +0,0 @@ | ||||
| name: "Code Scanning - Action" | ||||
|  | ||||
| on: | ||||
|   push: | ||||
|   schedule: | ||||
|     - cron: '0 0 * * 0' | ||||
|  | ||||
| jobs: | ||||
|   CodeQL-Build: | ||||
|  | ||||
|     strategy: | ||||
|       fail-fast: false | ||||
|  | ||||
|  | ||||
|     # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|     - name: Checkout repository | ||||
|       uses: actions/checkout@v2 | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|       uses: github/codeql-action/init@v1 | ||||
|       # Override language selection by uncommenting this and choosing your languages | ||||
|       # with: | ||||
|       #   languages: go, javascript, csharp, python, cpp, java | ||||
|  | ||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below). | ||||
|     - name: Autobuild | ||||
|       uses: github/codeql-action/autobuild@v1 | ||||
|  | ||||
|     - name: Perform CodeQL Analysis | ||||
|       uses: github/codeql-action/analyze@v1 | ||||
							
								
								
									
										32
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										32
									
								
								.github/workflows/workflow.yml
									
									
									
									
										vendored
									
									
								
							| @ -4,11 +4,13 @@ on: | ||||
|   pull_request: | ||||
|     branches: | ||||
|       - master | ||||
|       - releases/** | ||||
|     paths-ignore: | ||||
|       - '**.md' | ||||
|   push: | ||||
|     branches: | ||||
|       - master | ||||
|       - releases/** | ||||
|     paths-ignore: | ||||
|       - '**.md' | ||||
|  | ||||
| @ -56,19 +58,14 @@ jobs: | ||||
|     steps: | ||||
|     - name: Checkout | ||||
|       uses: actions/checkout@v2 | ||||
|     - name: Generate files in working directory | ||||
|     - name: Generate files | ||||
|       shell: bash | ||||
|       run: __tests__/create-cache-files.sh ${{ runner.os }} test-cache | ||||
|     - name: Generate files outside working directory | ||||
|       shell: bash | ||||
|       run: __tests__/create-cache-files.sh ${{ runner.os }} ~/test-cache | ||||
|       run: __tests__/create-cache-files.sh ${{ runner.os }} | ||||
|     - name: Save cache | ||||
|       uses: ./ | ||||
|       with: | ||||
|         key: test-${{ runner.os }}-${{ github.run_id }} | ||||
|         path: | | ||||
|           test-cache | ||||
|           ~/test-cache | ||||
|         path: test-cache | ||||
|   test-restore: | ||||
|     needs: test-save | ||||
|     strategy: | ||||
| @ -83,15 +80,10 @@ jobs: | ||||
|       uses: ./ | ||||
|       with: | ||||
|         key: test-${{ runner.os }}-${{ github.run_id }} | ||||
|         path: | | ||||
|           test-cache | ||||
|           ~/test-cache | ||||
|     - name: Verify cache files in working directory | ||||
|         path: test-cache | ||||
|     - name: Verify cache | ||||
|       shell: bash | ||||
|       run: __tests__/verify-cache-files.sh ${{ runner.os }} test-cache | ||||
|     - name: Verify cache files outside working directory | ||||
|       shell: bash | ||||
|       run: __tests__/verify-cache-files.sh ${{ runner.os }} ~/test-cache | ||||
|       run: __tests__/verify-cache-files.sh ${{ runner.os }} | ||||
|  | ||||
|   # End to end with proxy | ||||
|   test-proxy-save: | ||||
| @ -101,7 +93,7 @@ jobs: | ||||
|       options: --dns 127.0.0.1 | ||||
|     services: | ||||
|       squid-proxy: | ||||
|         image: datadog/squid:latest | ||||
|         image: ubuntu/squid:latest | ||||
|         ports: | ||||
|           - 3128:3128 | ||||
|     env: | ||||
| @ -110,7 +102,7 @@ jobs: | ||||
|     - name: Checkout | ||||
|       uses: actions/checkout@v2 | ||||
|     - name: Generate files | ||||
|       run: __tests__/create-cache-files.sh proxy test-cache | ||||
|       run: __tests__/create-cache-files.sh proxy | ||||
|     - name: Save cache | ||||
|       uses: ./ | ||||
|       with: | ||||
| @ -124,7 +116,7 @@ jobs: | ||||
|       options: --dns 127.0.0.1 | ||||
|     services: | ||||
|       squid-proxy: | ||||
|         image: datadog/squid:latest | ||||
|         image: ubuntu/squid:latest | ||||
|         ports: | ||||
|           - 3128:3128 | ||||
|     env: | ||||
| @ -138,4 +130,4 @@ jobs: | ||||
|         key: test-proxy-${{ github.run_id }} | ||||
|         path: test-cache | ||||
|     - name: Verify cache | ||||
|       run: __tests__/verify-cache-files.sh proxy test-cache | ||||
|       run: __tests__/verify-cache-files.sh proxy | ||||
|  | ||||
							
								
								
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										3
									
								
								.gitignore
									
									
									
									
										vendored
									
									
								
							| @ -1,5 +1,8 @@ | ||||
| __tests__/runner/* | ||||
|  | ||||
| # comment out in distribution branches | ||||
| dist/ | ||||
|  | ||||
| node_modules/ | ||||
| lib/ | ||||
|  | ||||
|  | ||||
| @ -37,7 +37,7 @@ jobs: | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|     - uses: actions/checkout@v2 | ||||
|     - uses: actions/checkout@v1 | ||||
|  | ||||
|     - name: Cache Primes | ||||
|       id: cache-primes | ||||
| @ -67,9 +67,7 @@ See [Examples](examples.md) for a list of `actions/cache` implementations for us | ||||
| - [Java - Gradle](./examples.md#java---gradle) | ||||
| - [Java - Maven](./examples.md#java---maven) | ||||
| - [Node - npm](./examples.md#node---npm) | ||||
| - [Node - Lerna](./examples.md#node---lerna) | ||||
| - [Node - Yarn](./examples.md#node---yarn) | ||||
| - [OCaml/Reason - esy](./examples.md##ocamlreason---esy) | ||||
| - [PHP - Composer](./examples.md#php---composer) | ||||
| - [Python - pip](./examples.md#python---pip) | ||||
| - [R - renv](./examples.md#r---renv) | ||||
| @ -91,7 +89,7 @@ Using the `cache-hit` output, subsequent steps (such as install or build) can be | ||||
| Example: | ||||
| ```yaml | ||||
| steps: | ||||
|   - uses: actions/checkout@v2 | ||||
|   - uses: actions/checkout@v1 | ||||
|  | ||||
|   - uses: actions/cache@v1 | ||||
|     id: cache | ||||
|  | ||||
| @ -1,6 +1,4 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as io from "@actions/io"; | ||||
| import { promises as fs } from "fs"; | ||||
| import * as os from "os"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| @ -8,24 +6,13 @@ import { Events, Outputs, State } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import * as actionUtils from "../src/utils/actionUtils"; | ||||
|  | ||||
| import uuid = require("uuid"); | ||||
|  | ||||
| jest.mock("@actions/core"); | ||||
| jest.mock("os"); | ||||
|  | ||||
| function getTempDir(): string { | ||||
|     return path.join(__dirname, "_temp", "actionUtils"); | ||||
| } | ||||
|  | ||||
| afterEach(() => { | ||||
|     delete process.env[Events.Key]; | ||||
| }); | ||||
|  | ||||
| afterAll(async () => { | ||||
|     delete process.env["GITHUB_WORKSPACE"]; | ||||
|     await io.rmRF(getTempDir()); | ||||
| }); | ||||
|  | ||||
| test("getArchiveFileSize returns file size", () => { | ||||
|     const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); | ||||
|  | ||||
| @ -194,43 +181,17 @@ test("isValidEvent returns false for unknown event", () => { | ||||
|     expect(isValidEvent).toBe(false); | ||||
| }); | ||||
|  | ||||
| test("resolvePaths with no ~ in path", async () => { | ||||
|     const filePath = ".cache"; | ||||
| test("resolvePath with no ~ in path", () => { | ||||
|     const filePath = ".cache/yarn"; | ||||
|  | ||||
|     // Create the following layout: | ||||
|     //   cwd | ||||
|     //   cwd/.cache | ||||
|     //   cwd/.cache/file.txt | ||||
|     const resolvedPath = actionUtils.resolvePath(filePath); | ||||
|  | ||||
|     const root = path.join(getTempDir(), "no-tilde"); | ||||
|     // tarball entries will be relative to workspace | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
|  | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     const cache = path.join(root, ".cache"); | ||||
|     await fs.mkdir(cache, { recursive: true }); | ||||
|     await fs.writeFile(path.join(cache, "file.txt"), "cached"); | ||||
|  | ||||
|     const originalCwd = process.cwd(); | ||||
|  | ||||
|     try { | ||||
|         process.chdir(root); | ||||
|  | ||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); | ||||
|  | ||||
|         const expectedPath = [filePath]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
|     const expectedPath = path.resolve(filePath); | ||||
|     expect(resolvedPath).toBe(expectedPath); | ||||
| }); | ||||
|  | ||||
| test("resolvePaths with ~ in path", async () => { | ||||
|     const cacheDir = uuid(); | ||||
|     const filePath = `~/${cacheDir}`; | ||||
|     // Create the following layout: | ||||
|     //   ~/uuid | ||||
|     //   ~/uuid/file.txt | ||||
| test("resolvePath with ~ in path", () => { | ||||
|     const filePath = "~/.cache/yarn"; | ||||
|  | ||||
|     const homedir = jest.requireActual("os").homedir(); | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
| @ -238,93 +199,24 @@ test("resolvePaths with ~ in path", async () => { | ||||
|         return homedir; | ||||
|     }); | ||||
|  | ||||
|     const target = path.join(homedir, cacheDir); | ||||
|     await fs.mkdir(target, { recursive: true }); | ||||
|     await fs.writeFile(path.join(target, "file.txt"), "cached"); | ||||
|     const resolvedPath = actionUtils.resolvePath(filePath); | ||||
|  | ||||
|     const root = getTempDir(); | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
|  | ||||
|     try { | ||||
|         const resolvedPath = await actionUtils.resolvePaths([filePath]); | ||||
|  | ||||
|         const expectedPath = [path.relative(root, target)]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         await io.rmRF(target); | ||||
|     } | ||||
|     const expectedPath = path.join(homedir, ".cache/yarn"); | ||||
|     expect(resolvedPath).toBe(expectedPath); | ||||
| }); | ||||
|  | ||||
| test("resolvePaths with home not found", async () => { | ||||
| test("resolvePath with home not found", () => { | ||||
|     const filePath = "~/.cache/yarn"; | ||||
|     const homedirMock = jest.spyOn(os, "homedir"); | ||||
|     homedirMock.mockImplementation(() => { | ||||
|         return ""; | ||||
|     }); | ||||
|  | ||||
|     await expect(actionUtils.resolvePaths([filePath])).rejects.toThrow( | ||||
|         "Unable to determine HOME directory" | ||||
|     expect(() => actionUtils.resolvePath(filePath)).toThrow( | ||||
|         "Unable to resolve `~` to HOME" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("resolvePaths inclusion pattern returns found", async () => { | ||||
|     const pattern = "*.ts"; | ||||
|     // Create the following layout: | ||||
|     //   inclusion-patterns | ||||
|     //   inclusion-patterns/miss.txt | ||||
|     //   inclusion-patterns/test.ts | ||||
|  | ||||
|     const root = path.join(getTempDir(), "inclusion-patterns"); | ||||
|     // tarball entries will be relative to workspace | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
|  | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); | ||||
|     await fs.writeFile(path.join(root, "test.ts"), "match"); | ||||
|  | ||||
|     const originalCwd = process.cwd(); | ||||
|  | ||||
|     try { | ||||
|         process.chdir(root); | ||||
|  | ||||
|         const resolvedPath = await actionUtils.resolvePaths([pattern]); | ||||
|  | ||||
|         const expectedPath = ["test.ts"]; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test("resolvePaths exclusion pattern returns not found", async () => { | ||||
|     const patterns = ["*.ts", "!test.ts"]; | ||||
|     // Create the following layout: | ||||
|     //   exclusion-patterns | ||||
|     //   exclusion-patterns/miss.txt | ||||
|     //   exclusion-patterns/test.ts | ||||
|  | ||||
|     const root = path.join(getTempDir(), "exclusion-patterns"); | ||||
|     // tarball entries will be relative to workspace | ||||
|     process.env["GITHUB_WORKSPACE"] = root; | ||||
|  | ||||
|     await fs.mkdir(root, { recursive: true }); | ||||
|     await fs.writeFile(path.join(root, "miss.txt"), "no match"); | ||||
|     await fs.writeFile(path.join(root, "test.ts"), "no match"); | ||||
|  | ||||
|     const originalCwd = process.cwd(); | ||||
|  | ||||
|     try { | ||||
|         process.chdir(root); | ||||
|  | ||||
|         const resolvedPath = await actionUtils.resolvePaths(patterns); | ||||
|  | ||||
|         const expectedPath = []; | ||||
|         expect(resolvedPath).toStrictEqual(expectedPath); | ||||
|     } finally { | ||||
|         process.chdir(originalCwd); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test("isValidEvent returns true for push event", () => { | ||||
|     const event = Events.Push; | ||||
|     process.env[Events.Key] = event; | ||||
| @ -342,16 +234,3 @@ test("isValidEvent returns true for pull request event", () => { | ||||
|  | ||||
|     expect(isValidEvent).toBe(true); | ||||
| }); | ||||
|  | ||||
| test("unlinkFile unlinks file", async () => { | ||||
|     const testDirectory = await fs.mkdtemp("unlinkFileTest"); | ||||
|     const testFile = path.join(testDirectory, "test.txt"); | ||||
|     await fs.writeFile(testFile, "hello world"); | ||||
|  | ||||
|     await actionUtils.unlinkFile(testFile); | ||||
|  | ||||
|     // This should throw as testFile should not exist | ||||
|     await expect(fs.stat(testFile)).rejects.toThrow(); | ||||
|  | ||||
|     await fs.rmdir(testDirectory); | ||||
| }); | ||||
|  | ||||
| @ -1,39 +1,144 @@ | ||||
| import { getCacheVersion } from "../src/cacheHttpClient"; | ||||
| import { CompressionMethod, Inputs } from "../src/constants"; | ||||
| import { retry } from "../src/cacheHttpClient"; | ||||
| import * as testUtils from "../src/utils/testUtils"; | ||||
|  | ||||
| afterEach(() => { | ||||
|     testUtils.clearInputs(); | ||||
| }); | ||||
|  | ||||
| test("getCacheVersion with path input and compression method undefined returns version", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
| interface TestResponse { | ||||
|     statusCode: number; | ||||
|     result: string | null; | ||||
| } | ||||
|  | ||||
|     const result = getCacheVersion(); | ||||
| function handleResponse( | ||||
|     response: TestResponse | undefined | ||||
| ): Promise<TestResponse> { | ||||
|     if (!response) { | ||||
|         fail("Retry method called too many times"); | ||||
|     } | ||||
|  | ||||
|     expect(result).toEqual( | ||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" | ||||
|     if (response.statusCode === 999) { | ||||
|         throw Error("Test Error"); | ||||
|     } else { | ||||
|         return Promise.resolve(response); | ||||
|     } | ||||
| } | ||||
|  | ||||
| async function testRetryExpectingResult( | ||||
|     responses: Array<TestResponse>, | ||||
|     expectedResult: string | null | ||||
| ): Promise<void> { | ||||
|     responses = responses.reverse(); // Reverse responses since we pop from end | ||||
|  | ||||
|     const actualResult = await retry( | ||||
|         "test", | ||||
|         () => handleResponse(responses.pop()), | ||||
|         (response: TestResponse) => response.statusCode | ||||
|     ); | ||||
|  | ||||
|     expect(actualResult.result).toEqual(expectedResult); | ||||
| } | ||||
|  | ||||
| async function testRetryExpectingError( | ||||
|     responses: Array<TestResponse> | ||||
| ): Promise<void> { | ||||
|     responses = responses.reverse(); // Reverse responses since we pop from end | ||||
|  | ||||
|     expect( | ||||
|         retry( | ||||
|             "test", | ||||
|             () => handleResponse(responses.pop()), | ||||
|             (response: TestResponse) => response.statusCode | ||||
|         ) | ||||
|     ).rejects.toBeInstanceOf(Error); | ||||
| } | ||||
|  | ||||
| test("retry works on successful response", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("getCacheVersion with zstd compression returns version", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const result = getCacheVersion(CompressionMethod.Zstd); | ||||
|  | ||||
|     expect(result).toEqual( | ||||
|         "273877e14fd65d270b87a198edbfa2db5a43de567c9a548d2a2505b408befe24" | ||||
| test("retry works after retryable status code", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 503, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("getCacheVersion with gzip compression does not change vesion", async () => { | ||||
|     testUtils.setInput(Inputs.Path, "node_modules"); | ||||
|     const result = getCacheVersion(CompressionMethod.Gzip); | ||||
| test("retry fails after exhausting retries", async () => { | ||||
|     await testRetryExpectingError([ | ||||
|         { | ||||
|             statusCode: 503, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 503, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 200, | ||||
|             result: "Ok" | ||||
|         } | ||||
|     ]); | ||||
| }); | ||||
|  | ||||
|     expect(result).toEqual( | ||||
|         "b3e0c6cb5ecf32614eeb2997d905b9c297046d7cbf69062698f25b14b4cb0985" | ||||
| test("retry fails after non-retryable status code", async () => { | ||||
|     await testRetryExpectingError([ | ||||
|         { | ||||
|             statusCode: 500, | ||||
|             result: null | ||||
|         }, | ||||
|         { | ||||
|             statusCode: 200, | ||||
|             result: "Ok" | ||||
|         } | ||||
|     ]); | ||||
| }); | ||||
|  | ||||
| test("retry works after error", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 999, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         "Ok" | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("getCacheVersion with no input throws", async () => { | ||||
|     expect(() => getCacheVersion()).toThrow(); | ||||
| test("retry returns after client error", async () => { | ||||
|     await testRetryExpectingResult( | ||||
|         [ | ||||
|             { | ||||
|                 statusCode: 400, | ||||
|                 result: null | ||||
|             }, | ||||
|             { | ||||
|                 statusCode: 200, | ||||
|                 result: "Ok" | ||||
|             } | ||||
|         ], | ||||
|         null | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| @ -7,11 +7,5 @@ if [ -z "$prefix" ]; then | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| path="$2" | ||||
| if [ -z "$path" ]; then | ||||
|   echo "Must supply path argument" | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| mkdir -p $path | ||||
| echo "$prefix $GITHUB_RUN_ID" > $path/test-file.txt | ||||
| mkdir test-cache | ||||
| echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt | ||||
| @ -1,13 +1,7 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Events, | ||||
|     Inputs | ||||
| } from "../src/constants"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/restore"; | ||||
| import * as tar from "../src/tar"; | ||||
| @ -19,6 +13,10 @@ jest.mock("../src/tar"); | ||||
| jest.mock("../src/utils/actionUtils"); | ||||
|  | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||
|         return path.resolve(filePath); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( | ||||
|         (key, cacheResult) => { | ||||
|             const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
| @ -35,11 +33,6 @@ beforeAll(() => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getSupportedEvents(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getCacheFileName(cm); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
| @ -66,8 +59,7 @@ test("restore with invalid event outputs warning", async () => { | ||||
| test("restore with no path should fail", async () => { | ||||
|     const failedMock = jest.spyOn(core, "setFailed"); | ||||
|     await run(); | ||||
|     // this input isn't necessary for restore b/c tarball contains entries relative to workspace | ||||
|     expect(failedMock).not.toHaveBeenCalledWith( | ||||
|     expect(failedMock).toHaveBeenCalledWith( | ||||
|         "Input required and not supplied: path" | ||||
|     ); | ||||
| }); | ||||
| @ -144,7 +136,7 @@ test("restore with no cache found", async () => { | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache not found for input keys: ${key}` | ||||
|         `Cache not found for input keys: ${key}.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| @ -203,12 +195,13 @@ test("restore with restore keys and no cache found", async () => { | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Cache not found for input keys: ${key}, ${restoreKey}` | ||||
|         `Cache not found for input keys: ${key}, ${restoreKey}.` | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("restore with gzip compressed cache found", async () => { | ||||
| test("restore with cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
| @ -237,7 +230,7 @@ test("restore with gzip compressed cache found", async () => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Gzip); | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
| @ -247,20 +240,12 @@ test("restore with gzip compressed cache found", async () => { | ||||
|         .mockReturnValue(fileSize); | ||||
|  | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const unlinkFileMock = jest.spyOn(actionUtils, "unlinkFile"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|  | ||||
|     const compression = CompressionMethod.Gzip; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
| @ -270,21 +255,18 @@ test("restore with gzip compressed cache found", async () => { | ||||
|     expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); | ||||
|  | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
|  | ||||
|     expect(unlinkFileMock).toHaveBeenCalledTimes(1); | ||||
|     expect(unlinkFileMock).toHaveBeenCalledWith(archivePath); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("restore with a pull request event and zstd compressed cache found", async () => { | ||||
| test("restore with a pull request event and cache found", async () => { | ||||
|     const key = "node-test"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key | ||||
| @ -315,7 +297,7 @@ test("restore with a pull request event and zstd compressed cache found", async | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
| @ -326,17 +308,11 @@ test("restore with a pull request event and zstd compressed cache found", async | ||||
|  | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
| @ -347,19 +323,19 @@ test("restore with a pull request event and zstd compressed cache found", async | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); | ||||
|  | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("restore with cache found for restore key", async () => { | ||||
|     const key = "node-test"; | ||||
|     const restoreKey = "node-"; | ||||
|     const cachePath = path.resolve("node_modules"); | ||||
|     testUtils.setInputs({ | ||||
|         path: "node_modules", | ||||
|         key, | ||||
| @ -389,7 +365,7 @@ test("restore with cache found for restore key", async () => { | ||||
|         return Promise.resolve(tempPath); | ||||
|     }); | ||||
|  | ||||
|     const archivePath = path.join(tempPath, CacheFilename.Zstd); | ||||
|     const archivePath = path.join(tempPath, "cache.tgz"); | ||||
|     const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); | ||||
|     const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); | ||||
|  | ||||
| @ -400,17 +376,11 @@ test("restore with cache found for restore key", async () => { | ||||
|  | ||||
|     const extractTarMock = jest.spyOn(tar, "extractTar"); | ||||
|     const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey], { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); | ||||
|     expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); | ||||
|     expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); | ||||
|     expect(downloadCacheMock).toHaveBeenCalledWith( | ||||
| @ -421,7 +391,7 @@ test("restore with cache found for restore key", async () => { | ||||
|     expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); | ||||
|  | ||||
|     expect(extractTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, compression); | ||||
|     expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); | ||||
|     expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); | ||||
| @ -430,5 +400,4 @@ test("restore with cache found for restore key", async () => { | ||||
|         `Cache restored from key: ${restoreKey}` | ||||
|     ); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| @ -1,13 +1,7 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "../src/cacheHttpClient"; | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Events, | ||||
|     Inputs | ||||
| } from "../src/constants"; | ||||
| import { Events, Inputs } from "../src/constants"; | ||||
| import { ArtifactCacheEntry } from "../src/contracts"; | ||||
| import run from "../src/save"; | ||||
| import * as tar from "../src/tar"; | ||||
| @ -46,20 +40,13 @@ beforeAll(() => { | ||||
|         return actualUtils.getSupportedEvents(); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "resolvePaths").mockImplementation( | ||||
|         async filePaths => { | ||||
|             return filePaths.map(x => path.resolve(x)); | ||||
|         } | ||||
|     ); | ||||
|     jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { | ||||
|         return path.resolve(filePath); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { | ||||
|         return Promise.resolve("/foo/bar"); | ||||
|     }); | ||||
|  | ||||
|     jest.spyOn(actionUtils, "getCacheFileName").mockImplementation(cm => { | ||||
|         const actualUtils = jest.requireActual("../src/utils/actionUtils"); | ||||
|         return actualUtils.getCacheFileName(cm); | ||||
|     }); | ||||
| }); | ||||
|  | ||||
| beforeEach(() => { | ||||
| @ -202,7 +189,7 @@ test("save with large cache outputs warning", async () => { | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
| @ -211,27 +198,20 @@ test("save with large cache outputs warning", async () => { | ||||
|     jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { | ||||
|         return cacheSize; | ||||
|     }); | ||||
|     const compression = CompressionMethod.Gzip; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith( | ||||
|         "Cache size of ~6144 MB (6442450944 B) is over the 5GB limit, not saving cache." | ||||
|     ); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("save with reserve cache failure outputs warning", async () => { | ||||
| @ -267,18 +247,13 @@ test("save with reserve cache failure outputs warning", async () => { | ||||
|         }); | ||||
|  | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|  | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     expect(infoMock).toHaveBeenCalledWith( | ||||
|         `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
| @ -288,7 +263,6 @@ test("save with reserve cache failure outputs warning", async () => { | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(0); | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(0); | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("save with server error outputs warning", async () => { | ||||
| @ -314,7 +288,7 @@ test("save with server error outputs warning", async () => { | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
| @ -331,36 +305,24 @@ test("save with server error outputs warning", async () => { | ||||
|         .mockImplementationOnce(() => { | ||||
|             throw new Error("HTTP Error Occurred"); | ||||
|         }); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|  | ||||
|     expect(logWarningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| test("save with valid inputs uploads a cache", async () => { | ||||
| @ -385,7 +347,7 @@ test("save with valid inputs uploads a cache", async () => { | ||||
|         }); | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     const cachePaths = [path.resolve(inputPath)]; | ||||
|     const cachePath = path.resolve(inputPath); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|  | ||||
|     const cacheId = 4; | ||||
| @ -398,31 +360,19 @@ test("save with valid inputs uploads a cache", async () => { | ||||
|     const createTarMock = jest.spyOn(tar, "createTar"); | ||||
|  | ||||
|     const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); | ||||
|     const compression = CompressionMethod.Zstd; | ||||
|     const getCompressionMock = jest | ||||
|         .spyOn(actionUtils, "getCompressionMethod") | ||||
|         .mockReturnValue(Promise.resolve(compression)); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(reserveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey, { | ||||
|         compressionMethod: compression | ||||
|     }); | ||||
|     expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); | ||||
|  | ||||
|     const archiveFolder = "/foo/bar"; | ||||
|     const archiveFile = path.join(archiveFolder, CacheFilename.Zstd); | ||||
|     const archivePath = path.join("/foo/bar", "cache.tgz"); | ||||
|  | ||||
|     expect(createTarMock).toHaveBeenCalledTimes(1); | ||||
|     expect(createTarMock).toHaveBeenCalledWith( | ||||
|         archiveFolder, | ||||
|         cachePaths, | ||||
|         compression | ||||
|     ); | ||||
|     expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archiveFile); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
|     expect(getCompressionMock).toHaveBeenCalledTimes(1); | ||||
| }); | ||||
|  | ||||
| @ -1,204 +1,86 @@ | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import { CacheFilename, CompressionMethod } from "../src/constants"; | ||||
| import * as tar from "../src/tar"; | ||||
| import * as utils from "../src/utils/actionUtils"; | ||||
|  | ||||
| import fs = require("fs"); | ||||
|  | ||||
| jest.mock("@actions/exec"); | ||||
| jest.mock("@actions/io"); | ||||
|  | ||||
| const IS_WINDOWS = process.platform === "win32"; | ||||
|  | ||||
| function getTempDir(): string { | ||||
|     return path.join(__dirname, "_temp", "tar"); | ||||
| } | ||||
|  | ||||
| beforeAll(async () => { | ||||
| beforeAll(() => { | ||||
|     jest.spyOn(io, "which").mockImplementation(tool => { | ||||
|         return Promise.resolve(tool); | ||||
|     }); | ||||
|  | ||||
|     process.env["GITHUB_WORKSPACE"] = process.cwd(); | ||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); | ||||
| }); | ||||
|  | ||||
| afterAll(async () => { | ||||
|     delete process.env["GITHUB_WORKSPACE"]; | ||||
|     await jest.requireActual("@actions/io").rmRF(getTempDir()); | ||||
| }); | ||||
|  | ||||
| test("zstd extract tar", async () => { | ||||
| test("extract BSD tar", async () => { | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const archivePath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` | ||||
|         : "cache.tar"; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|     const targetDirectory = "~/.npm/cache"; | ||||
|     await tar.extractTar(archivePath, targetDirectory); | ||||
|  | ||||
|     await tar.extractTar(archivePath, CompressionMethod.Zstd); | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); | ||||
|  | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "--use-compress-program", | ||||
|             "zstd -d --long=30", | ||||
|             "-xf", | ||||
|     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||
|         "-xz", | ||||
|         "-f", | ||||
|         IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||
|             "-P", | ||||
|         "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace | ||||
|         ], | ||||
|         { cwd: undefined } | ||||
|     ); | ||||
|         IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory | ||||
|     ]); | ||||
| }); | ||||
|  | ||||
| test("gzip extract tar", async () => { | ||||
|     const mkdirMock = jest.spyOn(io, "mkdirP"); | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|     const archivePath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\fakepath\\cache.tar` | ||||
|         : "cache.tar"; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|  | ||||
|     await tar.extractTar(archivePath, CompressionMethod.Gzip); | ||||
|  | ||||
|     expect(mkdirMock).toHaveBeenCalledWith(workspace); | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "-z", | ||||
|             "-xf", | ||||
|             IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace | ||||
|         ], | ||||
|         { cwd: undefined } | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("gzip extract GNU tar on windows", async () => { | ||||
| test("extract GNU tar", async () => { | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     if (IS_WINDOWS) { | ||||
|         jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); | ||||
|         jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true)); | ||||
|  | ||||
|         const isGnuMock = jest | ||||
|             .spyOn(utils, "useGnuTar") | ||||
|             .mockReturnValue(Promise.resolve(true)); | ||||
|         const execMock = jest.spyOn(exec, "exec"); | ||||
|         const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; | ||||
|         const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|         const targetDirectory = "~/.npm/cache"; | ||||
|  | ||||
|         await tar.extractTar(archivePath, CompressionMethod.Gzip); | ||||
|         await tar.extractTar(archivePath, targetDirectory); | ||||
|  | ||||
|         expect(isGnuMock).toHaveBeenCalledTimes(1); | ||||
|         expect(execMock).toHaveBeenCalledTimes(1); | ||||
|         expect(execMock).toHaveBeenCalledWith( | ||||
|             `"tar"`, | ||||
|             [ | ||||
|                 "-z", | ||||
|                 "-xf", | ||||
|         expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [ | ||||
|             "-xz", | ||||
|             "-f", | ||||
|             archivePath.replace(/\\/g, "/"), | ||||
|                 "-P", | ||||
|             "-C", | ||||
|                 workspace?.replace(/\\/g, "/"), | ||||
|             targetDirectory?.replace(/\\/g, "/"), | ||||
|             "--force-local" | ||||
|             ], | ||||
|             { cwd: undefined } | ||||
|         ); | ||||
|         ]); | ||||
|     } | ||||
| }); | ||||
|  | ||||
| test("zstd create tar", async () => { | ||||
| test("create BSD tar", async () => { | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const archiveFolder = getTempDir(); | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; | ||||
|  | ||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); | ||||
|  | ||||
|     await tar.createTar( | ||||
|         archiveFolder, | ||||
|         sourceDirectories, | ||||
|         CompressionMethod.Zstd | ||||
|     ); | ||||
|     const archivePath = "cache.tar"; | ||||
|     const sourceDirectory = "~/.npm/cache"; | ||||
|     await tar.createTar(archivePath, sourceDirectory); | ||||
|  | ||||
|     const IS_WINDOWS = process.platform === "win32"; | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "--use-compress-program", | ||||
|             "zstd -T0 --long=30", | ||||
|             "-cf", | ||||
|             IS_WINDOWS | ||||
|                 ? CacheFilename.Zstd.replace(/\\/g, "/") | ||||
|                 : CacheFilename.Zstd, | ||||
|             "-P", | ||||
|     expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ | ||||
|         "-cz", | ||||
|         "-f", | ||||
|         IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, | ||||
|         "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, | ||||
|             "--files-from", | ||||
|             "manifest.txt" | ||||
|         ], | ||||
|         { | ||||
|             cwd: archiveFolder | ||||
|         } | ||||
|     ); | ||||
| }); | ||||
|  | ||||
| test("gzip create tar", async () => { | ||||
|     const execMock = jest.spyOn(exec, "exec"); | ||||
|  | ||||
|     const archiveFolder = getTempDir(); | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"]; | ||||
|     const sourceDirectories = ["~/.npm/cache", `${workspace}/dist`]; | ||||
|  | ||||
|     await fs.promises.mkdir(archiveFolder, { recursive: true }); | ||||
|  | ||||
|     await tar.createTar( | ||||
|         archiveFolder, | ||||
|         sourceDirectories, | ||||
|         CompressionMethod.Gzip | ||||
|     ); | ||||
|  | ||||
|     const tarPath = IS_WINDOWS | ||||
|         ? `${process.env["windir"]}\\System32\\tar.exe` | ||||
|         : "tar"; | ||||
|  | ||||
|     expect(execMock).toHaveBeenCalledTimes(1); | ||||
|     expect(execMock).toHaveBeenCalledWith( | ||||
|         `"${tarPath}"`, | ||||
|         [ | ||||
|             "-z", | ||||
|             "-cf", | ||||
|             IS_WINDOWS | ||||
|                 ? CacheFilename.Gzip.replace(/\\/g, "/") | ||||
|                 : CacheFilename.Gzip, | ||||
|             "-P", | ||||
|             "-C", | ||||
|             IS_WINDOWS ? workspace?.replace(/\\/g, "/") : workspace, | ||||
|             "--files-from", | ||||
|             "manifest.txt" | ||||
|         ], | ||||
|         { | ||||
|             cwd: archiveFolder | ||||
|         } | ||||
|     ); | ||||
|         IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory, | ||||
|         "." | ||||
|     ]); | ||||
| }); | ||||
|  | ||||
| @ -7,12 +7,6 @@ if [ -z "$prefix" ]; then | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| path="$2" | ||||
| if [ -z "$path" ]; then | ||||
|   echo "Must specify path argument" | ||||
|   exit 1 | ||||
| fi | ||||
|  | ||||
| # Sanity check GITHUB_RUN_ID defined | ||||
| if [ -z "$GITHUB_RUN_ID" ]; then | ||||
|   echo "GITHUB_RUN_ID not defined" | ||||
| @ -20,7 +14,7 @@ if [ -z "$GITHUB_RUN_ID" ]; then | ||||
| fi | ||||
|  | ||||
| # Verify file exists | ||||
| file="$path/test-file.txt" | ||||
| file="test-cache/test-file.txt" | ||||
| echo "Checking for $file" | ||||
| if [ ! -e $file ]; then | ||||
|   echo "File does not exist" | ||||
|  | ||||
							
								
								
									
										4547
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4547
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										4541
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										4541
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										90
									
								
								examples.md
									
									
									
									
									
								
							
							
						
						
									
										90
									
								
								examples.md
									
									
									
									
									
								
							| @ -1,6 +1,5 @@ | ||||
| # Examples | ||||
|  | ||||
| - [Examples](#examples) | ||||
| - [C# - NuGet](#c---nuget) | ||||
| - [Elixir - Mix](#elixir---mix) | ||||
| - [Go - Modules](#go---modules) | ||||
| @ -8,21 +7,10 @@ | ||||
| - [Java - Gradle](#java---gradle) | ||||
| - [Java - Maven](#java---maven) | ||||
| - [Node - npm](#node---npm) | ||||
|     - [macOS and Ubuntu](#macos-and-ubuntu) | ||||
|     - [Windows](#windows) | ||||
|     - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config) | ||||
|   - [Node - Lerna](#node---lerna) | ||||
| - [Node - Yarn](#node---yarn) | ||||
|   - [OCaml/Reason - esy](#ocamlreason---esy) | ||||
| - [PHP - Composer](#php---composer) | ||||
| - [Python - pip](#python---pip) | ||||
|     - [Simple example](#simple-example) | ||||
|     - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow) | ||||
|     - [Using pip to get cache location](#using-pip-to-get-cache-location) | ||||
|     - [Using a script to get cache location](#using-a-script-to-get-cache-location) | ||||
| - [R - renv](#r---renv) | ||||
|     - [Simple example](#simple-example-1) | ||||
|     - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow-1) | ||||
| - [Ruby - Bundler](#ruby---bundler) | ||||
| - [Rust - Cargo](#rust---cargo) | ||||
| - [Scala - SBT](#scala---sbt) | ||||
| @ -106,7 +94,7 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ~/.gradle/caches | ||||
|     key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle*') }} | ||||
|     key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-gradle- | ||||
| ``` | ||||
| @ -142,14 +130,10 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o | ||||
| ### Windows | ||||
|  | ||||
| ```yaml | ||||
| - name: Get npm cache directory | ||||
|   id: npm-cache | ||||
|   run: | | ||||
|     echo "::set-output name=dir::$(npm config get cache)" | ||||
| - uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.npm-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} | ||||
|     path: ~\AppData\Roaming\npm-cache | ||||
|     key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-node- | ||||
| ``` | ||||
| @ -169,64 +153,22 @@ For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` o | ||||
|       ${{ runner.os }}-node- | ||||
| ``` | ||||
|  | ||||
| ## Node - Lerna | ||||
|  | ||||
| >Note this example uses the new multi-paths feature and is only available at `master` | ||||
| ```yaml | ||||
| - name: restore lerna | ||||
|   uses: actions/cache@master | ||||
|   with: | ||||
|     path: | | ||||
|       node_modules | ||||
|       */*/node_modules | ||||
|     key: ${{ runner.os }}-${{ hashFiles('**/yarn.lock') }} | ||||
| ``` | ||||
|  | ||||
| ## Node - Yarn | ||||
| The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. | ||||
|  | ||||
| ```yaml | ||||
| - name: Get yarn cache directory path | ||||
|   id: yarn-cache-dir-path | ||||
| - name: Get yarn cache | ||||
|   id: yarn-cache | ||||
|   run: echo "::set-output name=dir::$(yarn cache dir)" | ||||
|  | ||||
| - uses: actions/cache@v1 | ||||
|   id: yarn-cache # use this to check for `cache-hit` (`steps.yarn-cache.outputs.cache-hit != 'true'`) | ||||
|   with: | ||||
|     path: ${{ steps.yarn-cache-dir-path.outputs.dir }} | ||||
|     path: ${{ steps.yarn-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-yarn- | ||||
| ``` | ||||
|  | ||||
| ## OCaml/Reason - esy | ||||
| Esy allows you to export built dependencies and import pre-built dependencies. | ||||
| ```yaml | ||||
|     - name: Restore Cache | ||||
|       id: restore-cache | ||||
|       uses: actions/cache@v1 | ||||
|       with: | ||||
|         path: _export | ||||
|         key:  ${{ runner.os }}-esy-${{ hashFiles('esy.lock/index.json') }} | ||||
|         restore-keys: | | ||||
|           ${{ runner.os }}-esy- | ||||
|     - name: Esy install | ||||
|       run: 'esy install' | ||||
|     - name: Import Cache | ||||
|       run: | | ||||
|         esy import-dependencies _export | ||||
|         rm -rf _export | ||||
|  | ||||
|     ...(Build job)... | ||||
|  | ||||
|     # Re-export dependencies if anything has changed or if it is the first time | ||||
|     - name: Setting dependency cache  | ||||
|       run: | | ||||
|         esy export-dependencies | ||||
|       if: steps.restore-cache.outputs.cache-hit != 'true' | ||||
| ``` | ||||
|  | ||||
|  | ||||
| ## PHP - Composer | ||||
|  | ||||
| ```yaml   | ||||
| @ -291,29 +233,11 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. | ||||
|       ${{ runner.os }}-pip- | ||||
| ``` | ||||
|  | ||||
| ### Using pip to get cache location | ||||
|  | ||||
| > Note: This requires pip 20.1+ | ||||
| ```yaml | ||||
| - name: Get pip cache dir | ||||
|   id: pip-cache | ||||
|   run: | | ||||
|     echo "::set-output name=dir::$(pip cache dir)" | ||||
|  | ||||
| - name: pip cache | ||||
|   uses: actions/cache@v1 | ||||
|   with: | ||||
|     path: ${{ steps.pip-cache.outputs.dir }} | ||||
|     key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} | ||||
|     restore-keys: | | ||||
|       ${{ runner.os }}-pip- | ||||
| ``` | ||||
|  | ||||
| ### Using a script to get cache location | ||||
|  | ||||
| > Note: This uses an internal pip API and may not always work | ||||
| ```yaml | ||||
| - name: Get pip cache dir | ||||
| - name: Get pip cache | ||||
|    id: pip-cache | ||||
|    run: | | ||||
|      python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" | ||||
|  | ||||
							
								
								
									
										3960
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										3960
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
										
											
												File diff suppressed because it is too large
												Load Diff
											
										
									
								
							
							
								
								
									
										13
									
								
								package.json
									
									
									
									
									
								
							
							
						
						
									
										13
									
								
								package.json
									
									
									
									
									
								
							| @ -1,15 +1,16 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "1.1.2", | ||||
|   "version": "1.2.0", | ||||
|   "private": true, | ||||
|   "description": "Cache dependencies and build outputs", | ||||
|   "main": "dist/restore/index.js", | ||||
|   "scripts": { | ||||
|     "build": "tsc && ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts", | ||||
|     "build": "tsc", | ||||
|     "test": "tsc --noEmit && jest --coverage", | ||||
|     "lint": "eslint **/*.ts --cache", | ||||
|     "format": "prettier --write **/*.ts", | ||||
|     "format-check": "prettier --check **/*.ts" | ||||
|     "format-check": "prettier --check **/*.ts", | ||||
|     "release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/" | ||||
|   }, | ||||
|   "repository": { | ||||
|     "type": "git", | ||||
| @ -23,10 +24,9 @@ | ||||
|   "author": "GitHub", | ||||
|   "license": "MIT", | ||||
|   "dependencies": { | ||||
|     "@actions/core": "^1.2.0", | ||||
|     "@actions/core": "^1.10.0", | ||||
|     "@actions/exec": "^1.0.1", | ||||
|     "@actions/glob": "^0.1.0", | ||||
|     "@actions/http-client": "^1.0.8", | ||||
|     "@actions/http-client": "^1.0.6", | ||||
|     "@actions/io": "^1.0.1", | ||||
|     "uuid": "^3.3.3" | ||||
|   }, | ||||
| @ -43,7 +43,6 @@ | ||||
|     "eslint-plugin-import": "^2.18.2", | ||||
|     "eslint-plugin-jest": "^23.0.3", | ||||
|     "eslint-plugin-prettier": "^3.1.1", | ||||
|     "eslint-plugin-simple-import-sort": "^5.0.2", | ||||
|     "jest": "^24.8.0", | ||||
|     "jest-circus": "^24.7.1", | ||||
|     "nock": "^11.7.0", | ||||
|  | ||||
| @ -6,23 +6,19 @@ import { | ||||
|     IRequestOptions, | ||||
|     ITypedResponse | ||||
| } from "@actions/http-client/interfaces"; | ||||
| import * as crypto from "crypto"; | ||||
| import * as fs from "fs"; | ||||
| import * as stream from "stream"; | ||||
| import * as util from "util"; | ||||
|  | ||||
| import { CompressionMethod, DefaultSocketTimeout, Inputs } from "./constants"; | ||||
| import { SocketTimeout } from "./constants"; | ||||
| import { | ||||
|     ArtifactCacheEntry, | ||||
|     CacheOptions, | ||||
|     CommitCacheRequest, | ||||
|     ReserveCacheRequest, | ||||
|     ReserveCacheResponse | ||||
| } from "./contracts"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
|  | ||||
| const versionSalt = "1.0"; | ||||
|  | ||||
| function isSuccessStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return false; | ||||
| @ -30,6 +26,13 @@ function isSuccessStatusCode(statusCode?: number): boolean { | ||||
|     return statusCode >= 200 && statusCode < 300; | ||||
| } | ||||
|  | ||||
| function isServerErrorStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return true; | ||||
|     } | ||||
|     return statusCode >= 500; | ||||
| } | ||||
|  | ||||
| function isRetryableStatusCode(statusCode?: number): boolean { | ||||
|     if (!statusCode) { | ||||
|         return false; | ||||
| @ -85,40 +88,83 @@ function createHttpClient(): HttpClient { | ||||
|     ); | ||||
| } | ||||
|  | ||||
| function parseEnvNumber(key: string): number | undefined { | ||||
|     const value = Number(process.env[key]); | ||||
|     if (Number.isNaN(value) || value < 0) { | ||||
|         return undefined; | ||||
|     } | ||||
|     return value; | ||||
| export async function retry<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<T>, | ||||
|     getStatusCode: (T) => number | undefined, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<T> { | ||||
|     let response: T | undefined = undefined; | ||||
|     let statusCode: number | undefined = undefined; | ||||
|     let isRetryable = false; | ||||
|     let errorMessage = ""; | ||||
|     let attempt = 1; | ||||
|  | ||||
|     while (attempt <= maxAttempts) { | ||||
|         try { | ||||
|             response = await method(); | ||||
|             statusCode = getStatusCode(response); | ||||
|  | ||||
|             if (!isServerErrorStatusCode(statusCode)) { | ||||
|                 return response; | ||||
|             } | ||||
|  | ||||
| export function getCacheVersion(compressionMethod?: CompressionMethod): string { | ||||
|     const components = [core.getInput(Inputs.Path, { required: true })].concat( | ||||
|         compressionMethod == CompressionMethod.Zstd ? [compressionMethod] : [] | ||||
|             isRetryable = isRetryableStatusCode(statusCode); | ||||
|             errorMessage = `Cache service responded with ${statusCode}`; | ||||
|         } catch (error) { | ||||
|             isRetryable = true; | ||||
|             errorMessage = error.message; | ||||
|         } | ||||
|  | ||||
|         core.debug( | ||||
|             `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` | ||||
|         ); | ||||
|  | ||||
|     // Add salt to cache version to support breaking changes in cache entry | ||||
|     components.push(versionSalt); | ||||
|         if (!isRetryable) { | ||||
|             core.debug(`${name} - Error is not retryable`); | ||||
|             break; | ||||
|         } | ||||
|  | ||||
|     return crypto | ||||
|         .createHash("sha256") | ||||
|         .update(components.join("|")) | ||||
|         .digest("hex"); | ||||
|         attempt++; | ||||
|     } | ||||
|  | ||||
|     throw Error(`${name} failed: ${errorMessage}`); | ||||
| } | ||||
|  | ||||
| export async function retryTypedResponse<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<ITypedResponse<T>>, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<ITypedResponse<T>> { | ||||
|     return await retry( | ||||
|         name, | ||||
|         method, | ||||
|         (response: ITypedResponse<T>) => response.statusCode, | ||||
|         maxAttempts | ||||
|     ); | ||||
| } | ||||
|  | ||||
| export async function retryHttpClientResponse<T>( | ||||
|     name: string, | ||||
|     method: () => Promise<IHttpClientResponse>, | ||||
|     maxAttempts = 2 | ||||
| ): Promise<IHttpClientResponse> { | ||||
|     return await retry( | ||||
|         name, | ||||
|         method, | ||||
|         (response: IHttpClientResponse) => response.message.statusCode, | ||||
|         maxAttempts | ||||
|     ); | ||||
| } | ||||
|  | ||||
| export async function getCacheEntry( | ||||
|     keys: string[], | ||||
|     options?: CacheOptions | ||||
|     keys: string[] | ||||
| ): Promise<ArtifactCacheEntry | null> { | ||||
|     const httpClient = createHttpClient(); | ||||
|     const version = getCacheVersion(options?.compressionMethod); | ||||
|     const resource = `cache?keys=${encodeURIComponent( | ||||
|         keys.join(",") | ||||
|     )}&version=${version}`; | ||||
|     const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; | ||||
|  | ||||
|     const response = await httpClient.getJson<ArtifactCacheEntry>( | ||||
|         getCacheApiUrl(resource) | ||||
|     const response = await retryTypedResponse("getCacheEntry", () => | ||||
|         httpClient.getJson<ArtifactCacheEntry>(getCacheApiUrl(resource)) | ||||
|     ); | ||||
|     if (response.statusCode === 204) { | ||||
|         return null; | ||||
| @ -153,15 +199,16 @@ export async function downloadCache( | ||||
| ): Promise<void> { | ||||
|     const stream = fs.createWriteStream(archivePath); | ||||
|     const httpClient = new HttpClient("actions/cache"); | ||||
|     const downloadResponse = await httpClient.get(archiveLocation); | ||||
|     const downloadResponse = await retryHttpClientResponse( | ||||
|         "downloadCache", | ||||
|         () => httpClient.get(archiveLocation) | ||||
|     ); | ||||
|  | ||||
|     // Abort download if no traffic received over the socket. | ||||
|     const socketTimeout = | ||||
|         parseEnvNumber("CACHE_SOCKET_TIMEOUT") ?? DefaultSocketTimeout; | ||||
|     downloadResponse.message.socket.setTimeout(socketTimeout, () => { | ||||
|     downloadResponse.message.socket.setTimeout(SocketTimeout, () => { | ||||
|         downloadResponse.message.destroy(); | ||||
|         core.debug( | ||||
|             `Aborting download, socket timed out after ${socketTimeout} ms` | ||||
|             `Aborting download, socket timed out after ${SocketTimeout} ms` | ||||
|         ); | ||||
|     }); | ||||
|  | ||||
| @ -186,20 +233,17 @@ export async function downloadCache( | ||||
| } | ||||
|  | ||||
| // Reserve Cache | ||||
| export async function reserveCache( | ||||
|     key: string, | ||||
|     options?: CacheOptions | ||||
| ): Promise<number> { | ||||
| export async function reserveCache(key: string): Promise<number> { | ||||
|     const httpClient = createHttpClient(); | ||||
|     const version = getCacheVersion(options?.compressionMethod); | ||||
|  | ||||
|     const reserveCacheRequest: ReserveCacheRequest = { | ||||
|         key, | ||||
|         version | ||||
|         key | ||||
|     }; | ||||
|     const response = await httpClient.postJson<ReserveCacheResponse>( | ||||
|     const response = await retryTypedResponse("reserveCache", () => | ||||
|         httpClient.postJson<ReserveCacheResponse>( | ||||
|             getCacheApiUrl("caches"), | ||||
|             reserveCacheRequest | ||||
|         ) | ||||
|     ); | ||||
|     return response?.result?.cacheId ?? -1; | ||||
| } | ||||
| @ -216,7 +260,7 @@ function getContentRange(start: number, end: number): string { | ||||
| async function uploadChunk( | ||||
|     httpClient: HttpClient, | ||||
|     resourceUrl: string, | ||||
|     data: NodeJS.ReadableStream, | ||||
|     openStream: () => NodeJS.ReadableStream, | ||||
|     start: number, | ||||
|     end: number | ||||
| ): Promise<void> { | ||||
| @ -233,33 +277,24 @@ async function uploadChunk( | ||||
|         "Content-Range": getContentRange(start, end) | ||||
|     }; | ||||
|  | ||||
|     const uploadChunkRequest = async (): Promise<IHttpClientResponse> => { | ||||
|         return await httpClient.sendStream( | ||||
|     await retryHttpClientResponse( | ||||
|         `uploadChunk (start: ${start}, end: ${end})`, | ||||
|         () => | ||||
|             httpClient.sendStream( | ||||
|                 "PATCH", | ||||
|                 resourceUrl, | ||||
|             data, | ||||
|                 openStream(), | ||||
|                 additionalHeaders | ||||
|             ) | ||||
|     ); | ||||
|     }; | ||||
|  | ||||
|     const response = await uploadChunkRequest(); | ||||
|     if (isSuccessStatusCode(response.message.statusCode)) { | ||||
|         return; | ||||
| } | ||||
|  | ||||
|     if (isRetryableStatusCode(response.message.statusCode)) { | ||||
|         core.debug( | ||||
|             `Received ${response.message.statusCode}, retrying chunk at offset ${start}.` | ||||
|         ); | ||||
|         const retryResponse = await uploadChunkRequest(); | ||||
|         if (isSuccessStatusCode(retryResponse.message.statusCode)) { | ||||
|             return; | ||||
| function parseEnvNumber(key: string): number | undefined { | ||||
|     const value = Number(process.env[key]); | ||||
|     if (Number.isNaN(value) || value < 0) { | ||||
|         return undefined; | ||||
|     } | ||||
|     } | ||||
|  | ||||
|     throw new Error( | ||||
|         `Cache service responded with ${response.message.statusCode} during chunk upload.` | ||||
|     ); | ||||
|     return value; | ||||
| } | ||||
|  | ||||
| async function uploadFile( | ||||
| @ -292,17 +327,23 @@ async function uploadFile( | ||||
|                     const start = offset; | ||||
|                     const end = offset + chunkSize - 1; | ||||
|                     offset += MAX_CHUNK_SIZE; | ||||
|                     const chunk = fs.createReadStream(archivePath, { | ||||
|                         fd, | ||||
|                         start, | ||||
|                         end, | ||||
|                         autoClose: false | ||||
|                     }); | ||||
|  | ||||
|                     await uploadChunk( | ||||
|                         httpClient, | ||||
|                         resourceUrl, | ||||
|                         chunk, | ||||
|                         () => | ||||
|                             fs | ||||
|                                 .createReadStream(archivePath, { | ||||
|                                     fd, | ||||
|                                     start, | ||||
|                                     end, | ||||
|                                     autoClose: false | ||||
|                                 }) | ||||
|                                 .on("error", error => { | ||||
|                                     throw new Error( | ||||
|                                         `Cache upload failed because file read failed with ${error.Message}` | ||||
|                                     ); | ||||
|                                 }), | ||||
|                         start, | ||||
|                         end | ||||
|                     ); | ||||
| @ -321,9 +362,11 @@ async function commitCache( | ||||
|     filesize: number | ||||
| ): Promise<ITypedResponse<null>> { | ||||
|     const commitCacheRequest: CommitCacheRequest = { size: filesize }; | ||||
|     return await httpClient.postJson<null>( | ||||
|     return await retryTypedResponse("commitCache", () => | ||||
|         httpClient.postJson<null>( | ||||
|             getCacheApiUrl(`caches/${cacheId.toString()}`), | ||||
|             commitCacheRequest | ||||
|         ) | ||||
|     ); | ||||
| } | ||||
|  | ||||
|  | ||||
| @ -19,17 +19,7 @@ export enum Events { | ||||
|     PullRequest = "pull_request" | ||||
| } | ||||
|  | ||||
| export enum CacheFilename { | ||||
|     Gzip = "cache.tgz", | ||||
|     Zstd = "cache.tzst" | ||||
| } | ||||
|  | ||||
| export enum CompressionMethod { | ||||
|     Gzip = "gzip", | ||||
|     Zstd = "zstd" | ||||
| } | ||||
|  | ||||
| // Socket timeout in milliseconds during download.  If no traffic is received | ||||
| // over the socket during this period, the socket is destroyed and the download | ||||
| // is aborted. | ||||
| export const DefaultSocketTimeout = 5000; | ||||
| export const SocketTimeout = 5000; | ||||
|  | ||||
							
								
								
									
										6
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										6
									
								
								src/contracts.d.ts
									
									
									
									
										vendored
									
									
								
							| @ -1,5 +1,3 @@ | ||||
| import { CompressionMethod } from "./constants"; | ||||
|  | ||||
| export interface ArtifactCacheEntry { | ||||
|     cacheKey?: string; | ||||
|     scope?: string; | ||||
| @ -19,7 +17,3 @@ export interface ReserveCacheRequest { | ||||
| export interface ReserveCacheResponse { | ||||
|     cacheId: number; | ||||
| } | ||||
|  | ||||
| export interface CacheOptions { | ||||
|     compressionMethod?: CompressionMethod; | ||||
| } | ||||
|  | ||||
| @ -1,6 +1,5 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { extractTar } from "./tar"; | ||||
| @ -20,6 +19,11 @@ async function run(): Promise<void> { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         const cachePath = utils.resolvePath( | ||||
|             core.getInput(Inputs.Path, { required: true }) | ||||
|         ); | ||||
|         core.debug(`Cache Path: ${cachePath}`); | ||||
|  | ||||
|         const primaryKey = core.getInput(Inputs.Key, { required: true }); | ||||
|         core.saveState(State.CacheKey, primaryKey); | ||||
|  | ||||
| @ -54,27 +58,24 @@ async function run(): Promise<void> { | ||||
|             } | ||||
|         } | ||||
|  | ||||
|         const compressionMethod = await utils.getCompressionMethod(); | ||||
|  | ||||
|         try { | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys, { | ||||
|                 compressionMethod: compressionMethod | ||||
|             }); | ||||
|             const cacheEntry = await cacheHttpClient.getCacheEntry(keys); | ||||
|             if (!cacheEntry?.archiveLocation) { | ||||
|                 core.info(`Cache not found for input keys: ${keys.join(", ")}`); | ||||
|                 core.info( | ||||
|                     `Cache not found for input keys: ${keys.join(", ")}.` | ||||
|                 ); | ||||
|                 return; | ||||
|             } | ||||
|  | ||||
|             const archivePath = path.join( | ||||
|                 await utils.createTempDirectory(), | ||||
|                 utils.getCacheFileName(compressionMethod) | ||||
|                 "cache.tgz" | ||||
|             ); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
|  | ||||
|             // Store the cache result | ||||
|             utils.setCacheState(cacheEntry); | ||||
|  | ||||
|             try { | ||||
|             // Download the cache from the cache entry | ||||
|             await cacheHttpClient.downloadCache( | ||||
|                 cacheEntry.archiveLocation, | ||||
| @ -88,15 +89,7 @@ async function run(): Promise<void> { | ||||
|                 )} MB (${archiveFileSize} B)` | ||||
|             ); | ||||
|  | ||||
|                 await extractTar(archivePath, compressionMethod); | ||||
|             } finally { | ||||
|                 // Try to delete the archive to save space | ||||
|                 try { | ||||
|                     await utils.unlinkFile(archivePath); | ||||
|                 } catch (error) { | ||||
|                     core.debug(`Failed to delete archive: ${error}`); | ||||
|                 } | ||||
|             } | ||||
|             await extractTar(archivePath, cachePath); | ||||
|  | ||||
|             const isExactKeyMatch = utils.isExactKeyMatch( | ||||
|                 primaryKey, | ||||
|  | ||||
							
								
								
									
										26
									
								
								src/save.ts
									
									
									
									
									
								
							
							
						
						
									
										26
									
								
								src/save.ts
									
									
									
									
									
								
							| @ -1,6 +1,5 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as path from "path"; | ||||
|  | ||||
| import * as cacheHttpClient from "./cacheHttpClient"; | ||||
| import { Events, Inputs, State } from "./constants"; | ||||
| import { createTar } from "./tar"; | ||||
| @ -35,12 +34,8 @@ async function run(): Promise<void> { | ||||
|             return; | ||||
|         } | ||||
|  | ||||
|         const compressionMethod = await utils.getCompressionMethod(); | ||||
|  | ||||
|         core.debug("Reserving Cache"); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey, { | ||||
|             compressionMethod: compressionMethod | ||||
|         }); | ||||
|         const cacheId = await cacheHttpClient.reserveCache(primaryKey); | ||||
|         if (cacheId == -1) { | ||||
|             core.info( | ||||
|                 `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` | ||||
| @ -48,25 +43,18 @@ async function run(): Promise<void> { | ||||
|             return; | ||||
|         } | ||||
|         core.debug(`Cache ID: ${cacheId}`); | ||||
|         const cachePaths = await utils.resolvePaths( | ||||
|             core | ||||
|                 .getInput(Inputs.Path, { required: true }) | ||||
|                 .split("\n") | ||||
|                 .filter(x => x !== "") | ||||
|         const cachePath = utils.resolvePath( | ||||
|             core.getInput(Inputs.Path, { required: true }) | ||||
|         ); | ||||
|         core.debug(`Cache Path: ${cachePath}`); | ||||
|  | ||||
|         core.debug("Cache Paths:"); | ||||
|         core.debug(`${JSON.stringify(cachePaths)}`); | ||||
|  | ||||
|         const archiveFolder = await utils.createTempDirectory(); | ||||
|         const archivePath = path.join( | ||||
|             archiveFolder, | ||||
|             utils.getCacheFileName(compressionMethod) | ||||
|             await utils.createTempDirectory(), | ||||
|             "cache.tgz" | ||||
|         ); | ||||
|  | ||||
|         core.debug(`Archive Path: ${archivePath}`); | ||||
|  | ||||
|         await createTar(archiveFolder, cachePaths, compressionMethod); | ||||
|         await createTar(archivePath, cachePath); | ||||
|  | ||||
|         const fileSizeLimit = 5 * 1024 * 1024 * 1024; // 5GB per repo limit | ||||
|         const archiveFileSize = utils.getArchiveFileSize(archivePath); | ||||
|  | ||||
							
								
								
									
										81
									
								
								src/tar.ts
									
									
									
									
									
								
							
							
						
						
									
										81
									
								
								src/tar.ts
									
									
									
									
									
								
							| @ -1,10 +1,26 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import { exec } from "@actions/exec"; | ||||
| import * as io from "@actions/io"; | ||||
| import { existsSync, writeFileSync } from "fs"; | ||||
| import { existsSync } from "fs"; | ||||
| import * as path from "path"; | ||||
| import * as tar from "./tar"; | ||||
|  | ||||
| import { CompressionMethod } from "./constants"; | ||||
| import * as utils from "./utils/actionUtils"; | ||||
| export async function isGnuTar(): Promise<boolean> { | ||||
|     core.debug("Checking tar --version"); | ||||
|     let versionOutput = ""; | ||||
|     await exec("tar --version", [], { | ||||
|         ignoreReturnCode: true, | ||||
|         silent: true, | ||||
|         listeners: { | ||||
|             stdout: (data: Buffer): string => | ||||
|                 (versionOutput += data.toString()), | ||||
|             stderr: (data: Buffer): string => (versionOutput += data.toString()) | ||||
|         } | ||||
|     }); | ||||
|  | ||||
|     core.debug(versionOutput.trim()); | ||||
|     return versionOutput.toUpperCase().includes("GNU TAR"); | ||||
| } | ||||
|  | ||||
| async function getTarPath(args: string[]): Promise<string> { | ||||
|     // Explicitly use BSD Tar on Windows | ||||
| @ -13,75 +29,48 @@ async function getTarPath(args: string[]): Promise<string> { | ||||
|         const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; | ||||
|         if (existsSync(systemTar)) { | ||||
|             return systemTar; | ||||
|         } else if (await utils.useGnuTar()) { | ||||
|         } else if (await tar.isGnuTar()) { | ||||
|             args.push("--force-local"); | ||||
|         } | ||||
|     } | ||||
|     return await io.which("tar", true); | ||||
| } | ||||
|  | ||||
| async function execTar(args: string[], cwd?: string): Promise<void> { | ||||
| async function execTar(args: string[]): Promise<void> { | ||||
|     try { | ||||
|         await exec(`"${await getTarPath(args)}"`, args, { cwd: cwd }); | ||||
|         await exec(`"${await getTarPath(args)}"`, args); | ||||
|     } catch (error) { | ||||
|         throw new Error(`Tar failed with error: ${error?.message}`); | ||||
|     } | ||||
| } | ||||
|  | ||||
| function getWorkingDirectory(): string { | ||||
|     return process.env["GITHUB_WORKSPACE"] ?? process.cwd(); | ||||
| } | ||||
|  | ||||
| export async function extractTar( | ||||
|     archivePath: string, | ||||
|     compressionMethod: CompressionMethod | ||||
|     targetDirectory: string | ||||
| ): Promise<void> { | ||||
|     // Create directory to extract tar into | ||||
|     const workingDirectory = getWorkingDirectory(); | ||||
|     await io.mkdirP(workingDirectory); | ||||
|     // --d: Decompress. | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners. | ||||
|     await io.mkdirP(targetDirectory); | ||||
|     const args = [ | ||||
|         ...(compressionMethod == CompressionMethod.Zstd | ||||
|             ? ["--use-compress-program", "zstd -d --long=30"] | ||||
|             : ["-z"]), | ||||
|         "-xf", | ||||
|         "-xz", | ||||
|         "-f", | ||||
|         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "-P", | ||||
|         "-C", | ||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") | ||||
|         targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") | ||||
|     ]; | ||||
|     await execTar(args); | ||||
| } | ||||
|  | ||||
| export async function createTar( | ||||
|     archiveFolder: string, | ||||
|     sourceDirectories: string[], | ||||
|     compressionMethod: CompressionMethod | ||||
|     archivePath: string, | ||||
|     sourceDirectory: string | ||||
| ): Promise<void> { | ||||
|     // Write source directories to manifest.txt to avoid command length limits | ||||
|     const manifestFilename = "manifest.txt"; | ||||
|     const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|     writeFileSync( | ||||
|         path.join(archiveFolder, manifestFilename), | ||||
|         sourceDirectories.join("\n") | ||||
|     ); | ||||
|     // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores. | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit. | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners. | ||||
|     const workingDirectory = getWorkingDirectory(); | ||||
|     const args = [ | ||||
|         ...(compressionMethod == CompressionMethod.Zstd | ||||
|             ? ["--use-compress-program", "zstd -T0 --long=30"] | ||||
|             : ["-z"]), | ||||
|         "-cf", | ||||
|         cacheFileName.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "-P", | ||||
|         "-cz", | ||||
|         "-f", | ||||
|         archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "-C", | ||||
|         workingDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "--files-from", | ||||
|         manifestFilename | ||||
|         sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), | ||||
|         "." | ||||
|     ]; | ||||
|     await execTar(args, archiveFolder); | ||||
|     await execTar(args); | ||||
| } | ||||
|  | ||||
| @ -1,19 +1,11 @@ | ||||
| import * as core from "@actions/core"; | ||||
| import * as exec from "@actions/exec"; | ||||
| import * as glob from "@actions/glob"; | ||||
| import * as io from "@actions/io"; | ||||
| import * as fs from "fs"; | ||||
| import * as os from "os"; | ||||
| import * as path from "path"; | ||||
| import * as util from "util"; | ||||
| import * as uuidV4 from "uuid/v4"; | ||||
|  | ||||
| import { | ||||
|     CacheFilename, | ||||
|     CompressionMethod, | ||||
|     Events, | ||||
|     Outputs, | ||||
|     State | ||||
| } from "../constants"; | ||||
| import { Events, Outputs, State } from "../constants"; | ||||
| import { ArtifactCacheEntry } from "../contracts"; | ||||
|  | ||||
| // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 | ||||
| @ -36,7 +28,6 @@ export async function createTempDirectory(): Promise<string> { | ||||
|         } | ||||
|         tempDirectory = path.join(baseLocation, "actions", "temp"); | ||||
|     } | ||||
|  | ||||
|     const dest = path.join(tempDirectory, uuidV4.default()); | ||||
|     await io.mkdirP(dest); | ||||
|     return dest; | ||||
| @ -91,21 +82,16 @@ export function logWarning(message: string): void { | ||||
|     core.info(`${warningPrefix}${message}`); | ||||
| } | ||||
|  | ||||
| export async function resolvePaths(patterns: string[]): Promise<string[]> { | ||||
|     const paths: string[] = []; | ||||
|     const workspace = process.env["GITHUB_WORKSPACE"] ?? process.cwd(); | ||||
|     const globber = await glob.create(patterns.join("\n"), { | ||||
|         implicitDescendants: false | ||||
|     }); | ||||
|  | ||||
|     for await (const file of globber.globGenerator()) { | ||||
|         const relativeFile = path.relative(workspace, file); | ||||
|         core.debug(`Matched: ${relativeFile}`); | ||||
|         // Paths are made relative so the tar entries are all relative to the root of the workspace. | ||||
|         paths.push(`${relativeFile}`); | ||||
| export function resolvePath(filePath: string): string { | ||||
|     if (filePath[0] === "~") { | ||||
|         const home = os.homedir(); | ||||
|         if (!home) { | ||||
|             throw new Error("Unable to resolve `~` to HOME"); | ||||
|         } | ||||
|         return path.join(home, filePath.slice(1)); | ||||
|     } | ||||
|  | ||||
|     return paths; | ||||
|     return path.resolve(filePath); | ||||
| } | ||||
|  | ||||
| export function getSupportedEvents(): string[] { | ||||
| @ -119,48 +105,3 @@ export function isValidEvent(): boolean { | ||||
|     const githubEvent = process.env[Events.Key] || ""; | ||||
|     return getSupportedEvents().includes(githubEvent); | ||||
| } | ||||
|  | ||||
| export function unlinkFile(path: fs.PathLike): Promise<void> { | ||||
|     return util.promisify(fs.unlink)(path); | ||||
| } | ||||
|  | ||||
| async function getVersion(app: string): Promise<string> { | ||||
|     core.debug(`Checking ${app} --version`); | ||||
|     let versionOutput = ""; | ||||
|     try { | ||||
|         await exec.exec(`${app} --version`, [], { | ||||
|             ignoreReturnCode: true, | ||||
|             silent: true, | ||||
|             listeners: { | ||||
|                 stdout: (data: Buffer): string => | ||||
|                     (versionOutput += data.toString()), | ||||
|                 stderr: (data: Buffer): string => | ||||
|                     (versionOutput += data.toString()) | ||||
|             } | ||||
|         }); | ||||
|     } catch (err) { | ||||
|         core.debug(err.message); | ||||
|     } | ||||
|  | ||||
|     versionOutput = versionOutput.trim(); | ||||
|     core.debug(versionOutput); | ||||
|     return versionOutput; | ||||
| } | ||||
|  | ||||
| export async function getCompressionMethod(): Promise<CompressionMethod> { | ||||
|     const versionOutput = await getVersion("zstd"); | ||||
|     return versionOutput.toLowerCase().includes("zstd command line interface") | ||||
|         ? CompressionMethod.Zstd | ||||
|         : CompressionMethod.Gzip; | ||||
| } | ||||
|  | ||||
| export function getCacheFileName(compressionMethod: CompressionMethod): string { | ||||
|     return compressionMethod == CompressionMethod.Zstd | ||||
|         ? CacheFilename.Zstd | ||||
|         : CacheFilename.Gzip; | ||||
| } | ||||
|  | ||||
| export async function useGnuTar(): Promise<boolean> { | ||||
|     const versionOutput = await getVersion("tar"); | ||||
|     return versionOutput.toLowerCase().includes("gnu tar"); | ||||
| } | ||||
|  | ||||
		Reference in New Issue
	
	Block a user
	