Compare commits
	
		
			99 Commits
		
	
	
		
			phantsure/
			...
			v3.2.0-bet
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
|   | ed5e94a5f5 | ||
|   | 6277f55919 | ||
|   | c30e6dcb11 | ||
|   | 05c9b49ea4 | ||
|   | da311f75a6 | ||
|   | 7a139a9cec | ||
|   | 930f080bad | ||
|   | 56e956426f | ||
|   | 766d8255cd | ||
|   | 686bf424a8 | ||
|   | fb5b333162 | ||
|   | c11ac6c2fe | ||
|   | d5c949690c | ||
|   | 44df5ab77e | ||
|   | 65057ce6fe | ||
|   | 8031e403b4 | ||
|   | c8d01facfc | ||
|   | 81aaae062b | ||
|   | 9d445b2565 | ||
|   | adecab4b4a | ||
|   | 075ad790b0 | ||
|   | b275c83bd6 | ||
|   | 1ddc49105d | ||
|   | 407044787b | ||
|   | 87a7d01109 | ||
|   | 29d6c7aa7f | ||
|   | df53d3c04b | ||
|   | dd740c87de | ||
|   | 0a6e5b052a | ||
|   | a2137c625c | ||
|   | 5a2b5e5714 | ||
|   | 9e9a19bf5f | ||
|   | 84ea3e177d | ||
|   | 00b72c7e02 | ||
|   | 0cc9c1d4e8 | ||
|   | ccf90c02ed | ||
|   | 4f42dc56c1 | ||
|   | b6604364ae | ||
|   | c0cc8dd60a | ||
|   | 91d7bd61be | ||
|   | 5e7f2c1182 | ||
|   | 3d4af52c52 | ||
|   | d91f5bd2fd | ||
|   | 61aa90bfc3 | ||
|   | 0c2d18e609 | ||
|   | 804322aab0 | ||
|   | f426a0deb2 | ||
|   | be72d0432d | ||
|   | 8ecd00a4be | ||
|   | 12a1a354bd | ||
|   | 782b0bd3df | ||
|   | f33ca902b8 | ||
|   | d48d03435b | ||
|   | 6f77edac15 | ||
|   | 6e12d27152 | ||
|   | c346bf01e0 | ||
|   | 593e91a38b | ||
|   | e4c2242eff | ||
|   | 66ef8a0951 | ||
|   | 657c52f11e | ||
|   | 34e917cb7d | ||
|   | ac8fc97c06 | ||
|   | 86712a0733 | ||
|   | d6e98d9302 | ||
|   | a76826ef46 | ||
|   | e02e5113ed | ||
|   | 85ae5bbcea | ||
|   | cce93fb2c7 | ||
|   | e3d8fb0b34 | ||
|   | 020a412c27 | ||
|   | d95c048983 | ||
|   | 706c369cf1 | ||
|   | 11ab7ccfa2 | ||
|   | 4b5f33df54 | ||
|   | 56a0133650 | ||
|   | 19446b165a | ||
|   | 8a88690a20 | ||
|   | 6e2c6a5916 | ||
|   | 2c9fb32186 | ||
|   | 01d96636a0 | ||
|   | 9c5a42a7c9 | ||
|   | a172494938 | ||
|   | f8717682fb | ||
|   | af1210e2a3 | ||
|   | ab0e7714ce | ||
|   | fb4a5dce60 | ||
|   | 71334c58b2 | ||
|   | 888d454557 | ||
|   | dddd7ce07c | ||
|   | abddc4dd44 | ||
|   | 921c58ee44 | ||
|   | 7f45813c72 | ||
|   | 0769f2e443 | ||
|   | 5fe0b944ef | ||
|   | 69b8227b27 | ||
|   | 515d10b4fd | ||
|   | 669e7536d9 | ||
|   | 29dbbce762 | ||
|   | ea5981db97 | 
| @@ -6,7 +6,7 @@ | ||||
| 	// Use 'forwardPorts' to make a list of ports inside the container available locally. | ||||
| 	// "forwardPorts": [], | ||||
| 	// Use 'postCreateCommand' to run commands after the container is created. | ||||
| 	"postCreateCommand": "npm install" | ||||
| 	"postCreateCommand": "npm install && npm run build" | ||||
| 	// Configure tool-specific properties. | ||||
| 	// "customizations": {}, | ||||
| 	// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root. | ||||
|   | ||||
							
								
								
									
										40
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										40
									
								
								.github/workflows/codeql.yml
									
									
									
									
										vendored
									
									
								
							| @@ -8,39 +8,45 @@ on: | ||||
|  | ||||
| jobs: | ||||
|   CodeQL-Build: | ||||
|     # CodeQL runs on ubuntu-latest, windows-latest, and macos-latest | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     permissions: | ||||
|       # required for all workflows | ||||
|       security-events: write | ||||
|     # CodeQL runs on ubuntu-latest and windows-latest | ||||
|     runs-on: ubuntu-latest | ||||
|  | ||||
|     steps: | ||||
|     - name: Checkout repository | ||||
|       uses: actions/checkout@v3 | ||||
|       with: | ||||
|         # We must fetch at least the immediate parents so that if this is | ||||
|         # a pull request then we can checkout the head. | ||||
|         fetch-depth: 2 | ||||
|  | ||||
|     # If this run was triggered by a pull request event, then checkout | ||||
|     # the head of the pull request instead of the merge commit. | ||||
|     - run: git checkout HEAD^2 | ||||
|       if: ${{ github.event_name == 'pull_request' }} | ||||
|  | ||||
|     # Initializes the CodeQL tools for scanning. | ||||
|     - name: Initialize CodeQL | ||||
|       uses: github/codeql-action/init@v2 | ||||
|       uses: github/codeql-action/init@v1 | ||||
|       # Override language selection by uncommenting this and choosing your languages | ||||
|       # with: | ||||
|       #   languages: go, javascript, csharp, python, cpp, java, ruby | ||||
|       #   languages: go, javascript, csharp, python, cpp, java | ||||
|  | ||||
|     # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below). | ||||
|     # Autobuild attempts to build any compiled languages  (C/C++, C#, or Java). | ||||
|     # If this step fails, then you should remove it and run the build manually (see below) | ||||
|     - name: Autobuild | ||||
|       uses: github/codeql-action/autobuild@v2 | ||||
|       uses: github/codeql-action/autobuild@v1 | ||||
|  | ||||
|     # ℹ️ Command-line programs to run using the OS shell. | ||||
|     # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun | ||||
|     # 📚 https://git.io/JvXDl | ||||
|  | ||||
|     # ✏️ If the Autobuild fails above, remove it and uncomment the following | ||||
|     #    three lines and modify them (or add more) to build your code if your | ||||
|     #    project uses a compiled language | ||||
|     # ✏️ If the Autobuild fails above, remove it and uncomment the following three lines | ||||
|     #    and modify them (or add more) to build your code if your project | ||||
|     #    uses a compiled language | ||||
|  | ||||
|     #- run: | | ||||
|     #     make bootstrap | ||||
|     #     make release | ||||
|     #   make bootstrap | ||||
|     #   make release | ||||
|  | ||||
|     - name: Perform CodeQL Analysis | ||||
|       uses: github/codeql-action/analyze@v2 | ||||
|       uses: github/codeql-action/analyze@v1 | ||||
|   | ||||
							
								
								
									
										2
									
								
								.licenses/npm/@actions/cache.dep.yml
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										2
									
								
								.licenses/npm/@actions/cache.dep.yml
									
									
									
										generated
									
									
									
								
							| @@ -1,6 +1,6 @@ | ||||
| --- | ||||
| name: "@actions/cache" | ||||
| version: 3.1.1 | ||||
| version: 3.0.5 | ||||
| type: npm | ||||
| summary: | ||||
| homepage: | ||||
|   | ||||
| @@ -27,7 +27,6 @@ See ["Caching dependencies to speed up workflows"](https://docs.github.com/en/ac | ||||
| * Fixed the download stuck problem by introducing a timeout of 1 hour for cache downloads. | ||||
| * Fix zstd not working for windows on gnu tar in issues. | ||||
| * Allowing users to provide a custom timeout as input for aborting download of a cache segment using an environment variable `SEGMENT_DOWNLOAD_TIMEOUT_MINS`. Default is 60 minutes. | ||||
| * Two new actions available for granular control over caches - [restore](restore/action.yml) and [save](save/action.yml) | ||||
|  | ||||
| Refer [here](https://github.com/actions/cache/blob/v2/README.md) for previous versions | ||||
|  | ||||
|   | ||||
							
								
								
									
										11
									
								
								RELEASES.md
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								RELEASES.md
									
									
									
									
									
								
							| @@ -52,14 +52,3 @@ | ||||
|  | ||||
| ### 3.2.0-beta.1 | ||||
| - Added two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache. | ||||
|  | ||||
| ### 3.2.0 | ||||
| - Released the two new actions - [restore](restore/action.yml) and [save](save/action.yml) for granular control on cache | ||||
|  | ||||
| ### 3.2.1 | ||||
| - Update `@actions/cache` on windows to use gnu tar and zstd by default and fallback to bsdtar and zstd if gnu tar is not available. ([issue](https://github.com/actions/cache/issues/984)) | ||||
| - Added support for fallback to gzip to restore old caches on windows. | ||||
| - Added logs for cache version in case of a cache miss. | ||||
|  | ||||
| ### 3.2.2 | ||||
| - Reverted the changes made in 3.2.1 to use gnu tar and zstd by default on windows. | ||||
| @@ -91,31 +91,3 @@ test("save with valid inputs uploads a cache", async () => { | ||||
|  | ||||
|     expect(failedMock).toHaveBeenCalledTimes(0); | ||||
| }); | ||||
|  | ||||
| test("save failing logs the warning message", async () => { | ||||
|     const warningMock = jest.spyOn(core, "warning"); | ||||
|  | ||||
|     const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; | ||||
|  | ||||
|     const inputPath = "node_modules"; | ||||
|     testUtils.setInput(Inputs.Key, primaryKey); | ||||
|     testUtils.setInput(Inputs.Path, inputPath); | ||||
|     testUtils.setInput(Inputs.UploadChunkSize, "4000000"); | ||||
|  | ||||
|     const cacheId = -1; | ||||
|     const saveCacheMock = jest | ||||
|         .spyOn(cache, "saveCache") | ||||
|         .mockImplementationOnce(() => { | ||||
|             return Promise.resolve(cacheId); | ||||
|         }); | ||||
|  | ||||
|     await run(); | ||||
|  | ||||
|     expect(saveCacheMock).toHaveBeenCalledTimes(1); | ||||
|     expect(saveCacheMock).toHaveBeenCalledWith([inputPath], primaryKey, { | ||||
|         uploadChunkSize: 4000000 | ||||
|     }); | ||||
|  | ||||
|     expect(warningMock).toHaveBeenCalledTimes(1); | ||||
|     expect(warningMock).toHaveBeenCalledWith("Cache save failed."); | ||||
| }); | ||||
|   | ||||
| @@ -25,15 +25,16 @@ afterEach(() => { | ||||
| }); | ||||
|  | ||||
| test("StateProvider saves states", async () => { | ||||
|     const states = new Map<string, string>(); | ||||
|     const getStateMock = jest | ||||
|         .spyOn(core, "getState") | ||||
|         .mockImplementation(key => states.get(key) || ""); | ||||
|         .mockImplementation(name => | ||||
|             jest.requireActual("@actions/core").getState(name) | ||||
|         ); | ||||
|  | ||||
|     const saveStateMock = jest | ||||
|         .spyOn(core, "saveState") | ||||
|         .mockImplementation((key, value) => { | ||||
|             states.set(key, value); | ||||
|             return jest.requireActual("@actions/core").saveState(key, value); | ||||
|         }); | ||||
|  | ||||
|     const setOutputMock = jest | ||||
| @@ -47,11 +48,9 @@ test("StateProvider saves states", async () => { | ||||
|     const stateProvider: IStateProvider = new StateProvider(); | ||||
|     stateProvider.setState("stateKey", "stateValue"); | ||||
|     stateProvider.setState(State.CacheMatchedKey, cacheMatchedKey); | ||||
|     const stateValue = stateProvider.getState("stateKey"); | ||||
|     const cacheStateValue = stateProvider.getCacheState(); | ||||
|     stateProvider.getState("stateKey"); | ||||
|     stateProvider.getCacheState(); | ||||
|  | ||||
|     expect(stateValue).toBe("stateValue"); | ||||
|     expect(cacheStateValue).toBe(cacheMatchedKey); | ||||
|     expect(getStateMock).toHaveBeenCalledTimes(2); | ||||
|     expect(saveStateMock).toHaveBeenCalledTimes(2); | ||||
|     expect(setOutputMock).toHaveBeenCalledTimes(0); | ||||
|   | ||||
							
								
								
									
										334
									
								
								dist/restore-only/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										334
									
								
								dist/restore-only/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1177,10 +1177,6 @@ function getVersion(app) { | ||||
| // Use zstandard if possible to maximize cache performance
 | ||||
| function getCompressionMethod() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { | ||||
|             // Disable zstd due to bug https://github.com/actions/cache/issues/301
 | ||||
|             return constants_1.CompressionMethod.Gzip; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('zstd'); | ||||
|         const version = semver.clean(versionOutput); | ||||
|         if (!versionOutput.toLowerCase().includes('zstd command line interface')) { | ||||
| @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { | ||||
|         : constants_1.CacheFilename.Zstd; | ||||
| } | ||||
| exports.getCacheFileName = getCacheFileName; | ||||
| function isGnuTarInstalled() { | ||||
| function getGnuTarPathOnWindows() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { | ||||
|             return constants_1.GnuTarPathOnWindows; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; | ||||
|     }); | ||||
| } | ||||
| exports.isGnuTarInstalled = isGnuTarInstalled; | ||||
| exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; | ||||
| function assertDefined(name, value) { | ||||
|     if (value === undefined) { | ||||
|         throw Error(`Expected ${name} but value was undefiend`); | ||||
| @@ -3433,10 +3432,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 204) { | ||||
|             // List cache for primary key only if cache miss occurs
 | ||||
|             if (core.isDebug()) { | ||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); | ||||
|             } | ||||
|             // Cache not found
 | ||||
|             return null; | ||||
|         } | ||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||
| @@ -3445,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             // Cache achiveLocation not found. This should never happen, and hence bail out.
 | ||||
|             throw new Error('Cache not found.'); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
| @@ -3454,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function printCachesListForDiagnostics(key, httpClient, version) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 200) { | ||||
|             const cacheListResult = response.result; | ||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; | ||||
|             if (totalCount && totalCount > 0) { | ||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); | ||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { | ||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
| @@ -38222,21 +38203,19 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const IS_WINDOWS = process.platform === 'win32'; | ||||
| function getTarPath(args, compressionMethod) { | ||||
| // Returns tar path and type: BSD or GNU
 | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         switch (process.platform) { | ||||
|             case 'win32': { | ||||
|                 const systemTar = `${process.env['windir']}\\System32\\tar.exe`; | ||||
|                 if (compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     // We only use zstandard compression on windows when gnu tar is installed due to
 | ||||
|                     // a bug with compressing large files with bsdtar + zstd
 | ||||
|                     args.push('--force-local'); | ||||
|                 const gnuTar = yield utils.getGnuTarPathOnWindows(); | ||||
|                 const systemTar = constants_1.SystemTarPathOnWindows; | ||||
|                 if (gnuTar) { | ||||
|                     // Use GNUtar as default on windows
 | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else if (fs_1.existsSync(systemTar)) { | ||||
|                     return systemTar; | ||||
|                 } | ||||
|                 else if (yield utils.isGnuTarInstalled()) { | ||||
|                     args.push('--force-local'); | ||||
|                     return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
| @@ -38244,25 +38223,92 @@ function getTarPath(args, compressionMethod) { | ||||
|                 const gnuTar = yield io.which('gtar', false); | ||||
|                 if (gnuTar) { | ||||
|                     // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
 | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     return gnuTar; | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else { | ||||
|                     return { | ||||
|                         path: yield io.which('tar', true), | ||||
|                         type: constants_1.ArchiveToolType.BSD | ||||
|                     }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|             default: | ||||
|                 break; | ||||
|         } | ||||
|         return yield io.which('tar', true); | ||||
|         // Default assumption is GNU tar is present in path
 | ||||
|         return { | ||||
|             path: yield io.which('tar', true), | ||||
|             type: constants_1.ArchiveToolType.GNU | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| function execTar(args, compressionMethod, cwd) { | ||||
| // Return arguments for tar as per tarPath, compressionMethod, method type and os
 | ||||
| function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); | ||||
|         const args = [`"${tarPath.path}"`]; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const tarFile = 'cache.tar'; | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // Speficic args for BSD tar on windows for workaround
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         // Method specific args
 | ||||
|         switch (type) { | ||||
|             case 'create': | ||||
|                 args.push('--posix', '-cf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); | ||||
|                 break; | ||||
|             case 'extract': | ||||
|                 args.push('-xf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); | ||||
|                 break; | ||||
|             case 'list': | ||||
|                 args.push('-tf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); | ||||
|                 break; | ||||
|         } | ||||
|         catch (error) { | ||||
|             throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|         // Platform specific args
 | ||||
|         if (tarPath.type === constants_1.ArchiveToolType.GNU) { | ||||
|             switch (process.platform) { | ||||
|                 case 'win32': | ||||
|                     args.push('--force-local'); | ||||
|                     break; | ||||
|                 case 'darwin': | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     break; | ||||
|             } | ||||
|         } | ||||
|         return args; | ||||
|     }); | ||||
| } | ||||
| // Returns commands to run tar and compression program
 | ||||
| function getCommands(compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let args; | ||||
|         const tarPath = yield getTarPath(); | ||||
|         const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); | ||||
|         const compressionArgs = type !== 'create' | ||||
|             ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) | ||||
|             : yield getCompressionProgram(tarPath, compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         if (BSD_TAR_ZSTD && type !== 'create') { | ||||
|             args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; | ||||
|         } | ||||
|         else { | ||||
|             args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; | ||||
|         } | ||||
|         if (BSD_TAR_ZSTD) { | ||||
|             return args; | ||||
|         } | ||||
|         return [args.join(' ')]; | ||||
|     }); | ||||
| } | ||||
| function getWorkingDirectory() { | ||||
| @@ -38270,91 +38316,116 @@ function getWorkingDirectory() { | ||||
|     return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); | ||||
| } | ||||
| // Common function for extractTar and listTar to get the compression method
 | ||||
| function getCompressionProgram(compressionMethod) { | ||||
|     // -d: Decompress.
 | ||||
|     // unzstd is equivalent to 'zstd -d'
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     switch (compressionMethod) { | ||||
|         case constants_1.CompressionMethod.Zstd: | ||||
|             return [ | ||||
|                 '--use-compress-program', | ||||
|                 IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' | ||||
|             ]; | ||||
|         case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|             return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; | ||||
|         default: | ||||
|             return ['-z']; | ||||
|     } | ||||
| function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // -d: Decompress.
 | ||||
|         // unzstd is equivalent to 'zstd -d'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d --long=30 -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Used for creating the archive
 | ||||
| // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
| // zstdmt is equivalent to 'zstd -T0'
 | ||||
| // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
| // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
| // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
| function getCompressionProgram(tarPath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 --long=30 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Executes all commands as separate processes
 | ||||
| function execCommands(commands, cwd) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         for (const command of commands) { | ||||
|             try { | ||||
|                 yield exec_1.exec(command, undefined, { cwd }); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // List the contents of a tar
 | ||||
| function listTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-tf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P' | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'list', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.listTar = listTar; | ||||
| // Extract a tar
 | ||||
| function extractTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into
 | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         yield io.mkdirP(workingDirectory); | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-xf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'extract', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| // Create a tar
 | ||||
| function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Write source directories to manifest.txt to avoid command length limits
 | ||||
|         const manifestFilename = 'manifest.txt'; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
|         // zstdmt is equivalent to 'zstd -T0'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
|         } | ||||
|         const args = [ | ||||
|             '--posix', | ||||
|             ...getCompressionProgram(), | ||||
|             '-cf', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--exclude', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--files-from', | ||||
|             manifestFilename | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod, archiveFolder); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); | ||||
|         const commands = yield getCommands(compressionMethod, 'create'); | ||||
|         yield execCommands(commands, archiveFolder); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
| @@ -47104,6 +47175,7 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(114)); | ||||
| const tar_1 = __webpack_require__(434); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| class ValidationError extends Error { | ||||
|     constructor(message) { | ||||
|         super(message); | ||||
| @@ -47165,16 +47237,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | ||||
|         for (const key of keys) { | ||||
|             checkKey(key); | ||||
|         } | ||||
|         const compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let cacheEntry; | ||||
|         let compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let archivePath = ''; | ||||
|         try { | ||||
|             // path are needed to compute version
 | ||||
|             const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|             cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                 compressionMethod | ||||
|             }); | ||||
|             if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                 // Cache not found
 | ||||
|                 return undefined; | ||||
|                 // This is to support the old cache entry created by gzip on windows.
 | ||||
|                 if (process.platform === 'win32' && | ||||
|                     compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     compressionMethod = constants_1.CompressionMethod.Gzip; | ||||
|                     cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                         compressionMethod | ||||
|                     }); | ||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                         return undefined; | ||||
|                     } | ||||
|                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||
|                 } | ||||
|                 else { | ||||
|                     // Cache not found
 | ||||
|                     return undefined; | ||||
|                 } | ||||
|             } | ||||
|             archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
| @@ -53255,6 +53342,11 @@ var CompressionMethod; | ||||
|     CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; | ||||
|     CompressionMethod["Zstd"] = "zstd"; | ||||
| })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); | ||||
| var ArchiveToolType; | ||||
| (function (ArchiveToolType) { | ||||
|     ArchiveToolType["GNU"] = "gnu"; | ||||
|     ArchiveToolType["BSD"] = "bsd"; | ||||
| })(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); | ||||
| // The default number of retry attempts.
 | ||||
| exports.DefaultRetryAttempts = 2; | ||||
| // The default delay in milliseconds between retry attempts.
 | ||||
| @@ -53263,6 +53355,12 @@ exports.DefaultRetryDelay = 5000; | ||||
| // over the socket during this period, the socket is destroyed and the download
 | ||||
| // is aborted.
 | ||||
| exports.SocketTimeout = 5000; | ||||
| // The default path of GNUtar on hosted Windows runners
 | ||||
| exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; | ||||
| // The default path of BSDtar on hosted Windows runners
 | ||||
| exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; | ||||
| exports.TarFilename = 'cache.tar'; | ||||
| exports.ManifestFilename = 'manifest.txt'; | ||||
| //# sourceMappingURL=constants.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
|   | ||||
							
								
								
									
										334
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										334
									
								
								dist/restore/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1177,10 +1177,6 @@ function getVersion(app) { | ||||
| // Use zstandard if possible to maximize cache performance
 | ||||
| function getCompressionMethod() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { | ||||
|             // Disable zstd due to bug https://github.com/actions/cache/issues/301
 | ||||
|             return constants_1.CompressionMethod.Gzip; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('zstd'); | ||||
|         const version = semver.clean(versionOutput); | ||||
|         if (!versionOutput.toLowerCase().includes('zstd command line interface')) { | ||||
| @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { | ||||
|         : constants_1.CacheFilename.Zstd; | ||||
| } | ||||
| exports.getCacheFileName = getCacheFileName; | ||||
| function isGnuTarInstalled() { | ||||
| function getGnuTarPathOnWindows() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { | ||||
|             return constants_1.GnuTarPathOnWindows; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; | ||||
|     }); | ||||
| } | ||||
| exports.isGnuTarInstalled = isGnuTarInstalled; | ||||
| exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; | ||||
| function assertDefined(name, value) { | ||||
|     if (value === undefined) { | ||||
|         throw Error(`Expected ${name} but value was undefiend`); | ||||
| @@ -3433,10 +3432,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 204) { | ||||
|             // List cache for primary key only if cache miss occurs
 | ||||
|             if (core.isDebug()) { | ||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); | ||||
|             } | ||||
|             // Cache not found
 | ||||
|             return null; | ||||
|         } | ||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||
| @@ -3445,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             // Cache achiveLocation not found. This should never happen, and hence bail out.
 | ||||
|             throw new Error('Cache not found.'); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
| @@ -3454,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function printCachesListForDiagnostics(key, httpClient, version) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 200) { | ||||
|             const cacheListResult = response.result; | ||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; | ||||
|             if (totalCount && totalCount > 0) { | ||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); | ||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { | ||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
| @@ -38135,21 +38116,19 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const IS_WINDOWS = process.platform === 'win32'; | ||||
| function getTarPath(args, compressionMethod) { | ||||
| // Returns tar path and type: BSD or GNU
 | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         switch (process.platform) { | ||||
|             case 'win32': { | ||||
|                 const systemTar = `${process.env['windir']}\\System32\\tar.exe`; | ||||
|                 if (compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     // We only use zstandard compression on windows when gnu tar is installed due to
 | ||||
|                     // a bug with compressing large files with bsdtar + zstd
 | ||||
|                     args.push('--force-local'); | ||||
|                 const gnuTar = yield utils.getGnuTarPathOnWindows(); | ||||
|                 const systemTar = constants_1.SystemTarPathOnWindows; | ||||
|                 if (gnuTar) { | ||||
|                     // Use GNUtar as default on windows
 | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else if (fs_1.existsSync(systemTar)) { | ||||
|                     return systemTar; | ||||
|                 } | ||||
|                 else if (yield utils.isGnuTarInstalled()) { | ||||
|                     args.push('--force-local'); | ||||
|                     return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
| @@ -38157,25 +38136,92 @@ function getTarPath(args, compressionMethod) { | ||||
|                 const gnuTar = yield io.which('gtar', false); | ||||
|                 if (gnuTar) { | ||||
|                     // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
 | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     return gnuTar; | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else { | ||||
|                     return { | ||||
|                         path: yield io.which('tar', true), | ||||
|                         type: constants_1.ArchiveToolType.BSD | ||||
|                     }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|             default: | ||||
|                 break; | ||||
|         } | ||||
|         return yield io.which('tar', true); | ||||
|         // Default assumption is GNU tar is present in path
 | ||||
|         return { | ||||
|             path: yield io.which('tar', true), | ||||
|             type: constants_1.ArchiveToolType.GNU | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| function execTar(args, compressionMethod, cwd) { | ||||
| // Return arguments for tar as per tarPath, compressionMethod, method type and os
 | ||||
| function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); | ||||
|         const args = [`"${tarPath.path}"`]; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const tarFile = 'cache.tar'; | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // Speficic args for BSD tar on windows for workaround
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         // Method specific args
 | ||||
|         switch (type) { | ||||
|             case 'create': | ||||
|                 args.push('--posix', '-cf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); | ||||
|                 break; | ||||
|             case 'extract': | ||||
|                 args.push('-xf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); | ||||
|                 break; | ||||
|             case 'list': | ||||
|                 args.push('-tf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); | ||||
|                 break; | ||||
|         } | ||||
|         catch (error) { | ||||
|             throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|         // Platform specific args
 | ||||
|         if (tarPath.type === constants_1.ArchiveToolType.GNU) { | ||||
|             switch (process.platform) { | ||||
|                 case 'win32': | ||||
|                     args.push('--force-local'); | ||||
|                     break; | ||||
|                 case 'darwin': | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     break; | ||||
|             } | ||||
|         } | ||||
|         return args; | ||||
|     }); | ||||
| } | ||||
| // Returns commands to run tar and compression program
 | ||||
| function getCommands(compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let args; | ||||
|         const tarPath = yield getTarPath(); | ||||
|         const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); | ||||
|         const compressionArgs = type !== 'create' | ||||
|             ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) | ||||
|             : yield getCompressionProgram(tarPath, compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         if (BSD_TAR_ZSTD && type !== 'create') { | ||||
|             args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; | ||||
|         } | ||||
|         else { | ||||
|             args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; | ||||
|         } | ||||
|         if (BSD_TAR_ZSTD) { | ||||
|             return args; | ||||
|         } | ||||
|         return [args.join(' ')]; | ||||
|     }); | ||||
| } | ||||
| function getWorkingDirectory() { | ||||
| @@ -38183,91 +38229,116 @@ function getWorkingDirectory() { | ||||
|     return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); | ||||
| } | ||||
| // Common function for extractTar and listTar to get the compression method
 | ||||
| function getCompressionProgram(compressionMethod) { | ||||
|     // -d: Decompress.
 | ||||
|     // unzstd is equivalent to 'zstd -d'
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     switch (compressionMethod) { | ||||
|         case constants_1.CompressionMethod.Zstd: | ||||
|             return [ | ||||
|                 '--use-compress-program', | ||||
|                 IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' | ||||
|             ]; | ||||
|         case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|             return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; | ||||
|         default: | ||||
|             return ['-z']; | ||||
|     } | ||||
| function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // -d: Decompress.
 | ||||
|         // unzstd is equivalent to 'zstd -d'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d --long=30 -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Used for creating the archive
 | ||||
| // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
| // zstdmt is equivalent to 'zstd -T0'
 | ||||
| // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
| // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
| // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
| function getCompressionProgram(tarPath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 --long=30 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Executes all commands as separate processes
 | ||||
| function execCommands(commands, cwd) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         for (const command of commands) { | ||||
|             try { | ||||
|                 yield exec_1.exec(command, undefined, { cwd }); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // List the contents of a tar
 | ||||
| function listTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-tf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P' | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'list', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.listTar = listTar; | ||||
| // Extract a tar
 | ||||
| function extractTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into
 | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         yield io.mkdirP(workingDirectory); | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-xf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'extract', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| // Create a tar
 | ||||
| function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Write source directories to manifest.txt to avoid command length limits
 | ||||
|         const manifestFilename = 'manifest.txt'; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
|         // zstdmt is equivalent to 'zstd -T0'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
|         } | ||||
|         const args = [ | ||||
|             '--posix', | ||||
|             ...getCompressionProgram(), | ||||
|             '-cf', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--exclude', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--files-from', | ||||
|             manifestFilename | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod, archiveFolder); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); | ||||
|         const commands = yield getCommands(compressionMethod, 'create'); | ||||
|         yield execCommands(commands, archiveFolder); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
| @@ -47075,6 +47146,7 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(114)); | ||||
| const tar_1 = __webpack_require__(434); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| class ValidationError extends Error { | ||||
|     constructor(message) { | ||||
|         super(message); | ||||
| @@ -47136,16 +47208,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | ||||
|         for (const key of keys) { | ||||
|             checkKey(key); | ||||
|         } | ||||
|         const compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let cacheEntry; | ||||
|         let compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let archivePath = ''; | ||||
|         try { | ||||
|             // path are needed to compute version
 | ||||
|             const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|             cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                 compressionMethod | ||||
|             }); | ||||
|             if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                 // Cache not found
 | ||||
|                 return undefined; | ||||
|                 // This is to support the old cache entry created by gzip on windows.
 | ||||
|                 if (process.platform === 'win32' && | ||||
|                     compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     compressionMethod = constants_1.CompressionMethod.Gzip; | ||||
|                     cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                         compressionMethod | ||||
|                     }); | ||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                         return undefined; | ||||
|                     } | ||||
|                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||
|                 } | ||||
|                 else { | ||||
|                     // Cache not found
 | ||||
|                     return undefined; | ||||
|                 } | ||||
|             } | ||||
|             archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
| @@ -53255,6 +53342,11 @@ var CompressionMethod; | ||||
|     CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; | ||||
|     CompressionMethod["Zstd"] = "zstd"; | ||||
| })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); | ||||
| var ArchiveToolType; | ||||
| (function (ArchiveToolType) { | ||||
|     ArchiveToolType["GNU"] = "gnu"; | ||||
|     ArchiveToolType["BSD"] = "bsd"; | ||||
| })(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); | ||||
| // The default number of retry attempts.
 | ||||
| exports.DefaultRetryAttempts = 2; | ||||
| // The default delay in milliseconds between retry attempts.
 | ||||
| @@ -53263,6 +53355,12 @@ exports.DefaultRetryDelay = 5000; | ||||
| // over the socket during this period, the socket is destroyed and the download
 | ||||
| // is aborted.
 | ||||
| exports.SocketTimeout = 5000; | ||||
| // The default path of GNUtar on hosted Windows runners
 | ||||
| exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; | ||||
| // The default path of BSDtar on hosted Windows runners
 | ||||
| exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; | ||||
| exports.TarFilename = 'cache.tar'; | ||||
| exports.ManifestFilename = 'manifest.txt'; | ||||
| //# sourceMappingURL=constants.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
|   | ||||
							
								
								
									
										367
									
								
								dist/save-only/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										367
									
								
								dist/save-only/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1043,29 +1043,6 @@ class ExecState extends events.EventEmitter { | ||||
| 
 | ||||
| "use strict"; | ||||
| 
 | ||||
| var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { | ||||
|     if (k2 === undefined) k2 = k; | ||||
|     var desc = Object.getOwnPropertyDescriptor(m, k); | ||||
|     if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { | ||||
|       desc = { enumerable: true, get: function() { return m[k]; } }; | ||||
|     } | ||||
|     Object.defineProperty(o, k2, desc); | ||||
| }) : (function(o, m, k, k2) { | ||||
|     if (k2 === undefined) k2 = k; | ||||
|     o[k2] = m[k]; | ||||
| })); | ||||
| var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { | ||||
|     Object.defineProperty(o, "default", { enumerable: true, value: v }); | ||||
| }) : function(o, v) { | ||||
|     o["default"] = v; | ||||
| }); | ||||
| var __importStar = (this && this.__importStar) || function (mod) { | ||||
|     if (mod && mod.__esModule) return mod; | ||||
|     var result = {}; | ||||
|     if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); | ||||
|     __setModuleDefault(result, mod); | ||||
|     return result; | ||||
| }; | ||||
| var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { | ||||
|     function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } | ||||
|     return new (P || (P = Promise))(function (resolve, reject) { | ||||
| @@ -1079,15 +1056,11 @@ var __importDefault = (this && this.__importDefault) || function (mod) { | ||||
|     return (mod && mod.__esModule) ? mod : { "default": mod }; | ||||
| }; | ||||
| Object.defineProperty(exports, "__esModule", { value: true }); | ||||
| const core = __importStar(__webpack_require__(470)); | ||||
| const saveImpl_1 = __importDefault(__webpack_require__(471)); | ||||
| const stateProvider_1 = __webpack_require__(309); | ||||
| function run() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheId = yield (0, saveImpl_1.default)(new stateProvider_1.NullStateProvider()); | ||||
|         if (cacheId === -1) { | ||||
|             core.warning(`Cache save failed.`); | ||||
|         } | ||||
|         yield (0, saveImpl_1.default)(new stateProvider_1.NullStateProvider()); | ||||
|     }); | ||||
| } | ||||
| run(); | ||||
| @@ -1233,10 +1206,6 @@ function getVersion(app) { | ||||
| // Use zstandard if possible to maximize cache performance
 | ||||
| function getCompressionMethod() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { | ||||
|             // Disable zstd due to bug https://github.com/actions/cache/issues/301
 | ||||
|             return constants_1.CompressionMethod.Gzip; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('zstd'); | ||||
|         const version = semver.clean(versionOutput); | ||||
|         if (!versionOutput.toLowerCase().includes('zstd command line interface')) { | ||||
| @@ -1260,13 +1229,16 @@ function getCacheFileName(compressionMethod) { | ||||
|         : constants_1.CacheFilename.Zstd; | ||||
| } | ||||
| exports.getCacheFileName = getCacheFileName; | ||||
| function isGnuTarInstalled() { | ||||
| function getGnuTarPathOnWindows() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { | ||||
|             return constants_1.GnuTarPathOnWindows; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; | ||||
|     }); | ||||
| } | ||||
| exports.isGnuTarInstalled = isGnuTarInstalled; | ||||
| exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; | ||||
| function assertDefined(name, value) { | ||||
|     if (value === undefined) { | ||||
|         throw Error(`Expected ${name} but value was undefiend`); | ||||
| @@ -3489,10 +3461,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 204) { | ||||
|             // List cache for primary key only if cache miss occurs
 | ||||
|             if (core.isDebug()) { | ||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); | ||||
|             } | ||||
|             // Cache not found
 | ||||
|             return null; | ||||
|         } | ||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||
| @@ -3501,6 +3470,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             // Cache achiveLocation not found. This should never happen, and hence bail out.
 | ||||
|             throw new Error('Cache not found.'); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
| @@ -3510,22 +3480,6 @@ function getCacheEntry(keys, paths, options) { | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function printCachesListForDiagnostics(key, httpClient, version) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 200) { | ||||
|             const cacheListResult = response.result; | ||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; | ||||
|             if (totalCount && totalCount > 0) { | ||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); | ||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { | ||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
| @@ -38186,21 +38140,19 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const IS_WINDOWS = process.platform === 'win32'; | ||||
| function getTarPath(args, compressionMethod) { | ||||
| // Returns tar path and type: BSD or GNU
 | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         switch (process.platform) { | ||||
|             case 'win32': { | ||||
|                 const systemTar = `${process.env['windir']}\\System32\\tar.exe`; | ||||
|                 if (compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     // We only use zstandard compression on windows when gnu tar is installed due to
 | ||||
|                     // a bug with compressing large files with bsdtar + zstd
 | ||||
|                     args.push('--force-local'); | ||||
|                 const gnuTar = yield utils.getGnuTarPathOnWindows(); | ||||
|                 const systemTar = constants_1.SystemTarPathOnWindows; | ||||
|                 if (gnuTar) { | ||||
|                     // Use GNUtar as default on windows
 | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else if (fs_1.existsSync(systemTar)) { | ||||
|                     return systemTar; | ||||
|                 } | ||||
|                 else if (yield utils.isGnuTarInstalled()) { | ||||
|                     args.push('--force-local'); | ||||
|                     return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
| @@ -38208,25 +38160,92 @@ function getTarPath(args, compressionMethod) { | ||||
|                 const gnuTar = yield io.which('gtar', false); | ||||
|                 if (gnuTar) { | ||||
|                     // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
 | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     return gnuTar; | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else { | ||||
|                     return { | ||||
|                         path: yield io.which('tar', true), | ||||
|                         type: constants_1.ArchiveToolType.BSD | ||||
|                     }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|             default: | ||||
|                 break; | ||||
|         } | ||||
|         return yield io.which('tar', true); | ||||
|         // Default assumption is GNU tar is present in path
 | ||||
|         return { | ||||
|             path: yield io.which('tar', true), | ||||
|             type: constants_1.ArchiveToolType.GNU | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| function execTar(args, compressionMethod, cwd) { | ||||
| // Return arguments for tar as per tarPath, compressionMethod, method type and os
 | ||||
| function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); | ||||
|         const args = [`"${tarPath.path}"`]; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const tarFile = 'cache.tar'; | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // Speficic args for BSD tar on windows for workaround
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         // Method specific args
 | ||||
|         switch (type) { | ||||
|             case 'create': | ||||
|                 args.push('--posix', '-cf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); | ||||
|                 break; | ||||
|             case 'extract': | ||||
|                 args.push('-xf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); | ||||
|                 break; | ||||
|             case 'list': | ||||
|                 args.push('-tf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); | ||||
|                 break; | ||||
|         } | ||||
|         catch (error) { | ||||
|             throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|         // Platform specific args
 | ||||
|         if (tarPath.type === constants_1.ArchiveToolType.GNU) { | ||||
|             switch (process.platform) { | ||||
|                 case 'win32': | ||||
|                     args.push('--force-local'); | ||||
|                     break; | ||||
|                 case 'darwin': | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     break; | ||||
|             } | ||||
|         } | ||||
|         return args; | ||||
|     }); | ||||
| } | ||||
| // Returns commands to run tar and compression program
 | ||||
| function getCommands(compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let args; | ||||
|         const tarPath = yield getTarPath(); | ||||
|         const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); | ||||
|         const compressionArgs = type !== 'create' | ||||
|             ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) | ||||
|             : yield getCompressionProgram(tarPath, compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         if (BSD_TAR_ZSTD && type !== 'create') { | ||||
|             args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; | ||||
|         } | ||||
|         else { | ||||
|             args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; | ||||
|         } | ||||
|         if (BSD_TAR_ZSTD) { | ||||
|             return args; | ||||
|         } | ||||
|         return [args.join(' ')]; | ||||
|     }); | ||||
| } | ||||
| function getWorkingDirectory() { | ||||
| @@ -38234,91 +38253,116 @@ function getWorkingDirectory() { | ||||
|     return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); | ||||
| } | ||||
| // Common function for extractTar and listTar to get the compression method
 | ||||
| function getCompressionProgram(compressionMethod) { | ||||
|     // -d: Decompress.
 | ||||
|     // unzstd is equivalent to 'zstd -d'
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     switch (compressionMethod) { | ||||
|         case constants_1.CompressionMethod.Zstd: | ||||
|             return [ | ||||
|                 '--use-compress-program', | ||||
|                 IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' | ||||
|             ]; | ||||
|         case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|             return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; | ||||
|         default: | ||||
|             return ['-z']; | ||||
|     } | ||||
| function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // -d: Decompress.
 | ||||
|         // unzstd is equivalent to 'zstd -d'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d --long=30 -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Used for creating the archive
 | ||||
| // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
| // zstdmt is equivalent to 'zstd -T0'
 | ||||
| // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
| // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
| // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
| function getCompressionProgram(tarPath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 --long=30 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Executes all commands as separate processes
 | ||||
| function execCommands(commands, cwd) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         for (const command of commands) { | ||||
|             try { | ||||
|                 yield exec_1.exec(command, undefined, { cwd }); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // List the contents of a tar
 | ||||
| function listTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-tf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P' | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'list', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.listTar = listTar; | ||||
| // Extract a tar
 | ||||
| function extractTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into
 | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         yield io.mkdirP(workingDirectory); | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-xf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'extract', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| // Create a tar
 | ||||
| function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Write source directories to manifest.txt to avoid command length limits
 | ||||
|         const manifestFilename = 'manifest.txt'; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
|         // zstdmt is equivalent to 'zstd -T0'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
|         } | ||||
|         const args = [ | ||||
|             '--posix', | ||||
|             ...getCompressionProgram(), | ||||
|             '-cf', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--exclude', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--files-from', | ||||
|             manifestFilename | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod, archiveFolder); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); | ||||
|         const commands = yield getCommands(compressionMethod, 'create'); | ||||
|         yield execCommands(commands, archiveFolder); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
| @@ -41048,7 +41092,6 @@ const utils = __importStar(__webpack_require__(443)); | ||||
| process.on("uncaughtException", e => utils.logWarning(e.message)); | ||||
| function saveImpl(stateProvider) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let cacheId = -1; | ||||
|         try { | ||||
|             if (!utils.isCacheFeatureAvailable()) { | ||||
|                 return; | ||||
| @@ -41075,7 +41118,7 @@ function saveImpl(stateProvider) { | ||||
|             const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { | ||||
|                 required: true | ||||
|             }); | ||||
|             cacheId = yield cache.saveCache(cachePaths, primaryKey, { | ||||
|             const cacheId = yield cache.saveCache(cachePaths, primaryKey, { | ||||
|                 uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) | ||||
|             }); | ||||
|             if (cacheId != -1) { | ||||
| @@ -41085,7 +41128,6 @@ function saveImpl(stateProvider) { | ||||
|         catch (error) { | ||||
|             utils.logWarning(error.message); | ||||
|         } | ||||
|         return cacheId; | ||||
|     }); | ||||
| } | ||||
| exports.default = saveImpl; | ||||
| @@ -47217,6 +47259,7 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(114)); | ||||
| const tar_1 = __webpack_require__(434); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| class ValidationError extends Error { | ||||
|     constructor(message) { | ||||
|         super(message); | ||||
| @@ -47278,16 +47321,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | ||||
|         for (const key of keys) { | ||||
|             checkKey(key); | ||||
|         } | ||||
|         const compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let cacheEntry; | ||||
|         let compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let archivePath = ''; | ||||
|         try { | ||||
|             // path are needed to compute version
 | ||||
|             const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|             cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                 compressionMethod | ||||
|             }); | ||||
|             if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                 // Cache not found
 | ||||
|                 return undefined; | ||||
|                 // This is to support the old cache entry created by gzip on windows.
 | ||||
|                 if (process.platform === 'win32' && | ||||
|                     compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     compressionMethod = constants_1.CompressionMethod.Gzip; | ||||
|                     cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                         compressionMethod | ||||
|                     }); | ||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                         return undefined; | ||||
|                     } | ||||
|                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||
|                 } | ||||
|                 else { | ||||
|                     // Cache not found
 | ||||
|                     return undefined; | ||||
|                 } | ||||
|             } | ||||
|             archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
| @@ -53290,6 +53348,11 @@ var CompressionMethod; | ||||
|     CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; | ||||
|     CompressionMethod["Zstd"] = "zstd"; | ||||
| })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); | ||||
| var ArchiveToolType; | ||||
| (function (ArchiveToolType) { | ||||
|     ArchiveToolType["GNU"] = "gnu"; | ||||
|     ArchiveToolType["BSD"] = "bsd"; | ||||
| })(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); | ||||
| // The default number of retry attempts.
 | ||||
| exports.DefaultRetryAttempts = 2; | ||||
| // The default delay in milliseconds between retry attempts.
 | ||||
| @@ -53298,6 +53361,12 @@ exports.DefaultRetryDelay = 5000; | ||||
| // over the socket during this period, the socket is destroyed and the download
 | ||||
| // is aborted.
 | ||||
| exports.SocketTimeout = 5000; | ||||
| // The default path of GNUtar on hosted Windows runners
 | ||||
| exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; | ||||
| // The default path of BSDtar on hosted Windows runners
 | ||||
| exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; | ||||
| exports.TarFilename = 'cache.tar'; | ||||
| exports.ManifestFilename = 'manifest.txt'; | ||||
| //# sourceMappingURL=constants.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
|   | ||||
							
								
								
									
										338
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							
							
						
						
									
										338
									
								
								dist/save/index.js
									
									
									
									
										vendored
									
									
								
							| @@ -1177,10 +1177,6 @@ function getVersion(app) { | ||||
| // Use zstandard if possible to maximize cache performance
 | ||||
| function getCompressionMethod() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (process.platform === 'win32' && !(yield isGnuTarInstalled())) { | ||||
|             // Disable zstd due to bug https://github.com/actions/cache/issues/301
 | ||||
|             return constants_1.CompressionMethod.Gzip; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('zstd'); | ||||
|         const version = semver.clean(versionOutput); | ||||
|         if (!versionOutput.toLowerCase().includes('zstd command line interface')) { | ||||
| @@ -1204,13 +1200,16 @@ function getCacheFileName(compressionMethod) { | ||||
|         : constants_1.CacheFilename.Zstd; | ||||
| } | ||||
| exports.getCacheFileName = getCacheFileName; | ||||
| function isGnuTarInstalled() { | ||||
| function getGnuTarPathOnWindows() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         if (fs.existsSync(constants_1.GnuTarPathOnWindows)) { | ||||
|             return constants_1.GnuTarPathOnWindows; | ||||
|         } | ||||
|         const versionOutput = yield getVersion('tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar'); | ||||
|         return versionOutput.toLowerCase().includes('gnu tar') ? io.which('tar') : ''; | ||||
|     }); | ||||
| } | ||||
| exports.isGnuTarInstalled = isGnuTarInstalled; | ||||
| exports.getGnuTarPathOnWindows = getGnuTarPathOnWindows; | ||||
| function assertDefined(name, value) { | ||||
|     if (value === undefined) { | ||||
|         throw Error(`Expected ${name} but value was undefiend`); | ||||
| @@ -3433,10 +3432,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const resource = `cache?keys=${encodeURIComponent(keys.join(','))}&version=${version}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('getCacheEntry', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 204) { | ||||
|             // List cache for primary key only if cache miss occurs
 | ||||
|             if (core.isDebug()) { | ||||
|                 yield printCachesListForDiagnostics(keys[0], httpClient, version); | ||||
|             } | ||||
|             // Cache not found
 | ||||
|             return null; | ||||
|         } | ||||
|         if (!requestUtils_1.isSuccessStatusCode(response.statusCode)) { | ||||
| @@ -3445,6 +3441,7 @@ function getCacheEntry(keys, paths, options) { | ||||
|         const cacheResult = response.result; | ||||
|         const cacheDownloadUrl = cacheResult === null || cacheResult === void 0 ? void 0 : cacheResult.archiveLocation; | ||||
|         if (!cacheDownloadUrl) { | ||||
|             // Cache achiveLocation not found. This should never happen, and hence bail out.
 | ||||
|             throw new Error('Cache not found.'); | ||||
|         } | ||||
|         core.setSecret(cacheDownloadUrl); | ||||
| @@ -3454,22 +3451,6 @@ function getCacheEntry(keys, paths, options) { | ||||
|     }); | ||||
| } | ||||
| exports.getCacheEntry = getCacheEntry; | ||||
| function printCachesListForDiagnostics(key, httpClient, version) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const resource = `caches?key=${encodeURIComponent(key)}`; | ||||
|         const response = yield requestUtils_1.retryTypedResponse('listCache', () => __awaiter(this, void 0, void 0, function* () { return httpClient.getJson(getCacheApiUrl(resource)); })); | ||||
|         if (response.statusCode === 200) { | ||||
|             const cacheListResult = response.result; | ||||
|             const totalCount = cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.totalCount; | ||||
|             if (totalCount && totalCount > 0) { | ||||
|                 core.debug(`No matching cache found for cache key '${key}', version '${version} and scope ${process.env['GITHUB_REF']}. There exist one or more cache(s) with similar key but they have different version or scope. See more info on cache matching here: https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#matching-a-cache-key \nOther caches with similar key:`); | ||||
|                 for (const cacheEntry of (cacheListResult === null || cacheListResult === void 0 ? void 0 : cacheListResult.artifactCaches) || []) { | ||||
|                     core.debug(`Cache Key: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheKey}, Cache Version: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.cacheVersion}, Cache Scope: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.scope}, Cache Created: ${cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.creationTime}`); | ||||
|                 } | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| function downloadCache(archiveLocation, archivePath, options) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const archiveUrl = new url_1.URL(archiveLocation); | ||||
| @@ -38130,21 +38111,19 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| const IS_WINDOWS = process.platform === 'win32'; | ||||
| function getTarPath(args, compressionMethod) { | ||||
| // Returns tar path and type: BSD or GNU
 | ||||
| function getTarPath() { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         switch (process.platform) { | ||||
|             case 'win32': { | ||||
|                 const systemTar = `${process.env['windir']}\\System32\\tar.exe`; | ||||
|                 if (compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     // We only use zstandard compression on windows when gnu tar is installed due to
 | ||||
|                     // a bug with compressing large files with bsdtar + zstd
 | ||||
|                     args.push('--force-local'); | ||||
|                 const gnuTar = yield utils.getGnuTarPathOnWindows(); | ||||
|                 const systemTar = constants_1.SystemTarPathOnWindows; | ||||
|                 if (gnuTar) { | ||||
|                     // Use GNUtar as default on windows
 | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else if (fs_1.existsSync(systemTar)) { | ||||
|                     return systemTar; | ||||
|                 } | ||||
|                 else if (yield utils.isGnuTarInstalled()) { | ||||
|                     args.push('--force-local'); | ||||
|                     return { path: systemTar, type: constants_1.ArchiveToolType.BSD }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
| @@ -38152,25 +38131,92 @@ function getTarPath(args, compressionMethod) { | ||||
|                 const gnuTar = yield io.which('gtar', false); | ||||
|                 if (gnuTar) { | ||||
|                     // fix permission denied errors when extracting BSD tar archive with GNU tar - https://github.com/actions/cache/issues/527
 | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     return gnuTar; | ||||
|                     return { path: gnuTar, type: constants_1.ArchiveToolType.GNU }; | ||||
|                 } | ||||
|                 else { | ||||
|                     return { | ||||
|                         path: yield io.which('tar', true), | ||||
|                         type: constants_1.ArchiveToolType.BSD | ||||
|                     }; | ||||
|                 } | ||||
|                 break; | ||||
|             } | ||||
|             default: | ||||
|                 break; | ||||
|         } | ||||
|         return yield io.which('tar', true); | ||||
|         // Default assumption is GNU tar is present in path
 | ||||
|         return { | ||||
|             path: yield io.which('tar', true), | ||||
|             type: constants_1.ArchiveToolType.GNU | ||||
|         }; | ||||
|     }); | ||||
| } | ||||
| function execTar(args, compressionMethod, cwd) { | ||||
| // Return arguments for tar as per tarPath, compressionMethod, method type and os
 | ||||
| function getTarArgs(tarPath, compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         try { | ||||
|             yield exec_1.exec(`"${yield getTarPath(args, compressionMethod)}"`, args, { cwd }); | ||||
|         const args = [`"${tarPath.path}"`]; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const tarFile = 'cache.tar'; | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // Speficic args for BSD tar on windows for workaround
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         // Method specific args
 | ||||
|         switch (type) { | ||||
|             case 'create': | ||||
|                 args.push('--posix', '-cf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--exclude', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '--files-from', constants_1.ManifestFilename); | ||||
|                 break; | ||||
|             case 'extract': | ||||
|                 args.push('-xf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P', '-C', workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/')); | ||||
|                 break; | ||||
|             case 'list': | ||||
|                 args.push('-tf', BSD_TAR_ZSTD | ||||
|                     ? tarFile | ||||
|                     : archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), '-P'); | ||||
|                 break; | ||||
|         } | ||||
|         catch (error) { | ||||
|             throw new Error(`Tar failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|         // Platform specific args
 | ||||
|         if (tarPath.type === constants_1.ArchiveToolType.GNU) { | ||||
|             switch (process.platform) { | ||||
|                 case 'win32': | ||||
|                     args.push('--force-local'); | ||||
|                     break; | ||||
|                 case 'darwin': | ||||
|                     args.push('--delay-directory-restore'); | ||||
|                     break; | ||||
|             } | ||||
|         } | ||||
|         return args; | ||||
|     }); | ||||
| } | ||||
| // Returns commands to run tar and compression program
 | ||||
| function getCommands(compressionMethod, type, archivePath = '') { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let args; | ||||
|         const tarPath = yield getTarPath(); | ||||
|         const tarArgs = yield getTarArgs(tarPath, compressionMethod, type, archivePath); | ||||
|         const compressionArgs = type !== 'create' | ||||
|             ? yield getDecompressionProgram(tarPath, compressionMethod, archivePath) | ||||
|             : yield getCompressionProgram(tarPath, compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         if (BSD_TAR_ZSTD && type !== 'create') { | ||||
|             args = [[...compressionArgs].join(' '), [...tarArgs].join(' ')]; | ||||
|         } | ||||
|         else { | ||||
|             args = [[...tarArgs].join(' '), [...compressionArgs].join(' ')]; | ||||
|         } | ||||
|         if (BSD_TAR_ZSTD) { | ||||
|             return args; | ||||
|         } | ||||
|         return [args.join(' ')]; | ||||
|     }); | ||||
| } | ||||
| function getWorkingDirectory() { | ||||
| @@ -38178,91 +38224,116 @@ function getWorkingDirectory() { | ||||
|     return (_a = process.env['GITHUB_WORKSPACE']) !== null && _a !== void 0 ? _a : process.cwd(); | ||||
| } | ||||
| // Common function for extractTar and listTar to get the compression method
 | ||||
| function getCompressionProgram(compressionMethod) { | ||||
|     // -d: Decompress.
 | ||||
|     // unzstd is equivalent to 'zstd -d'
 | ||||
|     // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|     // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|     switch (compressionMethod) { | ||||
|         case constants_1.CompressionMethod.Zstd: | ||||
|             return [ | ||||
|                 '--use-compress-program', | ||||
|                 IS_WINDOWS ? 'zstd -d --long=30' : 'unzstd --long=30' | ||||
|             ]; | ||||
|         case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|             return ['--use-compress-program', IS_WINDOWS ? 'zstd -d' : 'unzstd']; | ||||
|         default: | ||||
|             return ['-z']; | ||||
|     } | ||||
| function getDecompressionProgram(tarPath, compressionMethod, archivePath) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // -d: Decompress.
 | ||||
|         // unzstd is equivalent to 'zstd -d'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d --long=30 -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -d --long=30"' : 'unzstd --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -d -o', | ||||
|                         constants_1.TarFilename, | ||||
|                         archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -d"' : 'unzstd']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Used for creating the archive
 | ||||
| // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
| // zstdmt is equivalent to 'zstd -T0'
 | ||||
| // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
| // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
| // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
| function getCompressionProgram(tarPath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         const BSD_TAR_ZSTD = tarPath.type === constants_1.ArchiveToolType.BSD && | ||||
|             compressionMethod !== constants_1.CompressionMethod.Gzip && | ||||
|             IS_WINDOWS; | ||||
|         switch (compressionMethod) { | ||||
|             case constants_1.CompressionMethod.Zstd: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 --long=30 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? '"zstd -T0 --long=30"' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|             case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                 return BSD_TAR_ZSTD | ||||
|                     ? [ | ||||
|                         'zstd -T0 -o', | ||||
|                         cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|                         constants_1.TarFilename | ||||
|                     ] | ||||
|                     : ['--use-compress-program', IS_WINDOWS ? '"zstd -T0"' : 'zstdmt']; | ||||
|             default: | ||||
|                 return ['-z']; | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // Executes all commands as separate processes
 | ||||
| function execCommands(commands, cwd) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         for (const command of commands) { | ||||
|             try { | ||||
|                 yield exec_1.exec(command, undefined, { cwd }); | ||||
|             } | ||||
|             catch (error) { | ||||
|                 throw new Error(`${command.split(' ')[0]} failed with error: ${error === null || error === void 0 ? void 0 : error.message}`); | ||||
|             } | ||||
|         } | ||||
|     }); | ||||
| } | ||||
| // List the contents of a tar
 | ||||
| function listTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-tf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P' | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'list', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.listTar = listTar; | ||||
| // Extract a tar
 | ||||
| function extractTar(archivePath, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Create directory to extract tar into
 | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         yield io.mkdirP(workingDirectory); | ||||
|         const args = [ | ||||
|             ...getCompressionProgram(compressionMethod), | ||||
|             '-xf', | ||||
|             archivePath.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/') | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod); | ||||
|         const commands = yield getCommands(compressionMethod, 'extract', archivePath); | ||||
|         yield execCommands(commands); | ||||
|     }); | ||||
| } | ||||
| exports.extractTar = extractTar; | ||||
| // Create a tar
 | ||||
| function createTar(archiveFolder, sourceDirectories, compressionMethod) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         // Write source directories to manifest.txt to avoid command length limits
 | ||||
|         const manifestFilename = 'manifest.txt'; | ||||
|         const cacheFileName = utils.getCacheFileName(compressionMethod); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join('\n')); | ||||
|         const workingDirectory = getWorkingDirectory(); | ||||
|         // -T#: Compress using # working thread. If # is 0, attempt to detect and use the number of physical CPU cores.
 | ||||
|         // zstdmt is equivalent to 'zstd -T0'
 | ||||
|         // --long=#: Enables long distance matching with # bits. Maximum is 30 (1GB) on 32-bit OS and 31 (2GB) on 64-bit.
 | ||||
|         // Using 30 here because we also support 32-bit self-hosted runners.
 | ||||
|         // Long range mode is added to zstd in v1.3.2 release, so we will not use --long in older version of zstd.
 | ||||
|         function getCompressionProgram() { | ||||
|             switch (compressionMethod) { | ||||
|                 case constants_1.CompressionMethod.Zstd: | ||||
|                     return [ | ||||
|                         '--use-compress-program', | ||||
|                         IS_WINDOWS ? 'zstd -T0 --long=30' : 'zstdmt --long=30' | ||||
|                     ]; | ||||
|                 case constants_1.CompressionMethod.ZstdWithoutLong: | ||||
|                     return ['--use-compress-program', IS_WINDOWS ? 'zstd -T0' : 'zstdmt']; | ||||
|                 default: | ||||
|                     return ['-z']; | ||||
|             } | ||||
|         } | ||||
|         const args = [ | ||||
|             '--posix', | ||||
|             ...getCompressionProgram(), | ||||
|             '-cf', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--exclude', | ||||
|             cacheFileName.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '-P', | ||||
|             '-C', | ||||
|             workingDirectory.replace(new RegExp(`\\${path.sep}`, 'g'), '/'), | ||||
|             '--files-from', | ||||
|             manifestFilename | ||||
|         ]; | ||||
|         yield execTar(args, compressionMethod, archiveFolder); | ||||
|         fs_1.writeFileSync(path.join(archiveFolder, constants_1.ManifestFilename), sourceDirectories.join('\n')); | ||||
|         const commands = yield getCommands(compressionMethod, 'create'); | ||||
|         yield execCommands(commands, archiveFolder); | ||||
|     }); | ||||
| } | ||||
| exports.createTar = createTar; | ||||
| @@ -40992,7 +41063,6 @@ const utils = __importStar(__webpack_require__(443)); | ||||
| process.on("uncaughtException", e => utils.logWarning(e.message)); | ||||
| function saveImpl(stateProvider) { | ||||
|     return __awaiter(this, void 0, void 0, function* () { | ||||
|         let cacheId = -1; | ||||
|         try { | ||||
|             if (!utils.isCacheFeatureAvailable()) { | ||||
|                 return; | ||||
| @@ -41019,7 +41089,7 @@ function saveImpl(stateProvider) { | ||||
|             const cachePaths = utils.getInputAsArray(constants_1.Inputs.Path, { | ||||
|                 required: true | ||||
|             }); | ||||
|             cacheId = yield cache.saveCache(cachePaths, primaryKey, { | ||||
|             const cacheId = yield cache.saveCache(cachePaths, primaryKey, { | ||||
|                 uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize) | ||||
|             }); | ||||
|             if (cacheId != -1) { | ||||
| @@ -41029,7 +41099,6 @@ function saveImpl(stateProvider) { | ||||
|         catch (error) { | ||||
|             utils.logWarning(error.message); | ||||
|         } | ||||
|         return cacheId; | ||||
|     }); | ||||
| } | ||||
| exports.default = saveImpl; | ||||
| @@ -47190,6 +47259,7 @@ const path = __importStar(__webpack_require__(622)); | ||||
| const utils = __importStar(__webpack_require__(15)); | ||||
| const cacheHttpClient = __importStar(__webpack_require__(114)); | ||||
| const tar_1 = __webpack_require__(434); | ||||
| const constants_1 = __webpack_require__(931); | ||||
| class ValidationError extends Error { | ||||
|     constructor(message) { | ||||
|         super(message); | ||||
| @@ -47251,16 +47321,31 @@ function restoreCache(paths, primaryKey, restoreKeys, options) { | ||||
|         for (const key of keys) { | ||||
|             checkKey(key); | ||||
|         } | ||||
|         const compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let cacheEntry; | ||||
|         let compressionMethod = yield utils.getCompressionMethod(); | ||||
|         let archivePath = ''; | ||||
|         try { | ||||
|             // path are needed to compute version
 | ||||
|             const cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|             cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                 compressionMethod | ||||
|             }); | ||||
|             if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                 // Cache not found
 | ||||
|                 return undefined; | ||||
|                 // This is to support the old cache entry created by gzip on windows.
 | ||||
|                 if (process.platform === 'win32' && | ||||
|                     compressionMethod !== constants_1.CompressionMethod.Gzip) { | ||||
|                     compressionMethod = constants_1.CompressionMethod.Gzip; | ||||
|                     cacheEntry = yield cacheHttpClient.getCacheEntry(keys, paths, { | ||||
|                         compressionMethod | ||||
|                     }); | ||||
|                     if (!(cacheEntry === null || cacheEntry === void 0 ? void 0 : cacheEntry.archiveLocation)) { | ||||
|                         return undefined; | ||||
|                     } | ||||
|                     core.debug("Couldn't find cache entry with zstd compression, falling back to gzip compression."); | ||||
|                 } | ||||
|                 else { | ||||
|                     // Cache not found
 | ||||
|                     return undefined; | ||||
|                 } | ||||
|             } | ||||
|             archivePath = path.join(yield utils.createTempDirectory(), utils.getCacheFileName(compressionMethod)); | ||||
|             core.debug(`Archive Path: ${archivePath}`); | ||||
| @@ -53263,6 +53348,11 @@ var CompressionMethod; | ||||
|     CompressionMethod["ZstdWithoutLong"] = "zstd-without-long"; | ||||
|     CompressionMethod["Zstd"] = "zstd"; | ||||
| })(CompressionMethod = exports.CompressionMethod || (exports.CompressionMethod = {})); | ||||
| var ArchiveToolType; | ||||
| (function (ArchiveToolType) { | ||||
|     ArchiveToolType["GNU"] = "gnu"; | ||||
|     ArchiveToolType["BSD"] = "bsd"; | ||||
| })(ArchiveToolType = exports.ArchiveToolType || (exports.ArchiveToolType = {})); | ||||
| // The default number of retry attempts.
 | ||||
| exports.DefaultRetryAttempts = 2; | ||||
| // The default delay in milliseconds between retry attempts.
 | ||||
| @@ -53271,6 +53361,12 @@ exports.DefaultRetryDelay = 5000; | ||||
| // over the socket during this period, the socket is destroyed and the download
 | ||||
| // is aborted.
 | ||||
| exports.SocketTimeout = 5000; | ||||
| // The default path of GNUtar on hosted Windows runners
 | ||||
| exports.GnuTarPathOnWindows = `${process.env['PROGRAMFILES']}\\Git\\usr\\bin\\tar.exe`; | ||||
| // The default path of BSDtar on hosted Windows runners
 | ||||
| exports.SystemTarPathOnWindows = `${process.env['SYSTEMDRIVE']}\\Windows\\System32\\tar.exe`; | ||||
| exports.TarFilename = 'cache.tar'; | ||||
| exports.ManifestFilename = 'manifest.txt'; | ||||
| //# sourceMappingURL=constants.js.map
 | ||||
| 
 | ||||
| /***/ }), | ||||
|   | ||||
							
								
								
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										18
									
								
								package-lock.json
									
									
									
										generated
									
									
									
								
							| @@ -1,15 +1,15 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "3.2.2", | ||||
|   "version": "3.2.0-beta.1", | ||||
|   "lockfileVersion": 2, | ||||
|   "requires": true, | ||||
|   "packages": { | ||||
|     "": { | ||||
|       "name": "cache", | ||||
|       "version": "3.2.2", | ||||
|       "version": "3.2.0-beta.1", | ||||
|       "license": "MIT", | ||||
|       "dependencies": { | ||||
|         "@actions/cache": "^3.1.1", | ||||
|         "@actions/cache": "3.1.0-beta.3", | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.1.1", | ||||
|         "@actions/io": "^1.1.2" | ||||
| @@ -36,9 +36,9 @@ | ||||
|       } | ||||
|     }, | ||||
|     "node_modules/@actions/cache": { | ||||
|       "version": "3.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", | ||||
|       "integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", | ||||
|       "version": "3.1.0-beta.3", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", | ||||
|       "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", | ||||
|       "dependencies": { | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.0.1", | ||||
| @@ -9722,9 +9722,9 @@ | ||||
|   }, | ||||
|   "dependencies": { | ||||
|     "@actions/cache": { | ||||
|       "version": "3.1.1", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.1.tgz", | ||||
|       "integrity": "sha512-gOUdNap8FvlpoQAMYWiNPi9Ltt7jKWv9RuUVKg9cp/vQA9qTXoKiBkTioUAgIejh/qf7jrojYn3lCyIRIsoSeQ==", | ||||
|       "version": "3.1.0-beta.3", | ||||
|       "resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.1.0-beta.3.tgz", | ||||
|       "integrity": "sha512-71S1vd0WKLbC2lAe04pCYqTLBjSa8gURtiqnVBCYAt8QVBjOfwa2D3ESf2m8K2xjUxman/Yimdp7CPJDyFnxZg==", | ||||
|       "requires": { | ||||
|         "@actions/core": "^1.10.0", | ||||
|         "@actions/exec": "^1.0.1", | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| { | ||||
|   "name": "cache", | ||||
|   "version": "3.2.2", | ||||
|   "version": "3.2.0-beta.1", | ||||
|   "private": true, | ||||
|   "description": "Cache dependencies and build outputs", | ||||
|   "main": "dist/restore/index.js", | ||||
| @@ -23,7 +23,7 @@ | ||||
|   "author": "GitHub", | ||||
|   "license": "MIT", | ||||
|   "dependencies": { | ||||
|     "@actions/cache": "^3.1.1", | ||||
|     "@actions/cache": "3.1.0-beta.3", | ||||
|     "@actions/core": "^1.10.0", | ||||
|     "@actions/exec": "^1.1.1", | ||||
|     "@actions/io": "^1.1.2" | ||||
|   | ||||
| @@ -120,7 +120,7 @@ steps: | ||||
|  | ||||
| #### Reusing primary key and restored key in the save action | ||||
|  | ||||
| Usually you may want to use same `key` in both `actions/cache/restore` and `actions/cache/save` action. To achieve this, use `outputs` from the restore action to reuse the same primary key (or the key of the cache that was restored). | ||||
| Usually you may want to use same `key` in both actions/cache/restore` and `actions/cache/save` action. To achieve this, use `outputs` from the restore action to reuse the same primary key (or the key of the cache that was restored). | ||||
|  | ||||
| #### Using restore action outputs to make save action behave just like the cache action | ||||
|  | ||||
|   | ||||
| @@ -54,7 +54,7 @@ Case 1: Where an user would want to reuse the key as it is | ||||
| ```yaml | ||||
| uses: actions/cache/save@v3 | ||||
| with: | ||||
|     key: ${{ steps.restore-cache.outputs.key }} | ||||
|     key: steps.restore-cache.output.key | ||||
| ``` | ||||
|  | ||||
| Case 2: Where the user would want to re-evaluate the key | ||||
|   | ||||
| @@ -10,8 +10,7 @@ import * as utils from "./utils/actionUtils"; | ||||
| // throw an uncaught exception.  Instead of failing this action, just warn. | ||||
| process.on("uncaughtException", e => utils.logWarning(e.message)); | ||||
|  | ||||
| async function saveImpl(stateProvider: IStateProvider): Promise<number | void> { | ||||
|     let cacheId = -1; | ||||
| async function saveImpl(stateProvider: IStateProvider): Promise<void> { | ||||
|     try { | ||||
|         if (!utils.isCacheFeatureAvailable()) { | ||||
|             return; | ||||
| @@ -52,7 +51,7 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> { | ||||
|             required: true | ||||
|         }); | ||||
|  | ||||
|         cacheId = await cache.saveCache(cachePaths, primaryKey, { | ||||
|         const cacheId = await cache.saveCache(cachePaths, primaryKey, { | ||||
|             uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize) | ||||
|         }); | ||||
|  | ||||
| @@ -62,7 +61,6 @@ async function saveImpl(stateProvider: IStateProvider): Promise<number | void> { | ||||
|     } catch (error: unknown) { | ||||
|         utils.logWarning((error as Error).message); | ||||
|     } | ||||
|     return cacheId; | ||||
| } | ||||
|  | ||||
| export default saveImpl; | ||||
|   | ||||
| @@ -1,13 +1,8 @@ | ||||
| import * as core from "@actions/core"; | ||||
|  | ||||
| import saveImpl from "./saveImpl"; | ||||
| import { NullStateProvider } from "./stateProvider"; | ||||
|  | ||||
| async function run(): Promise<void> { | ||||
|     const cacheId = await saveImpl(new NullStateProvider()); | ||||
|     if (cacheId === -1) { | ||||
|         core.warning(`Cache save failed.`); | ||||
|     } | ||||
|     await saveImpl(new NullStateProvider()); | ||||
| } | ||||
|  | ||||
| run(); | ||||
|   | ||||
		Reference in New Issue
	
	Block a user