Compare commits

..

12 Commits

Author SHA1 Message Date
Shubham Tiwari
402b512df8 initial hacky 2022-07-31 15:45:57 +05:30
Vipul
71e3ee5cce Add phantsure to PR reviewers 2022-07-28 00:10:09 +05:30
Vipul
c316eb7911 Add phantsure to issue assignment 2022-07-28 00:09:39 +05:30
Shubham Tiwari
0865c47f36 new release (#855) 2022-07-13 10:27:51 +05:30
Shubham Tiwari
354a2ae15e Consuming 3.0 actions/cache (#834)
* Consuming 3.0 actions/cache

* formatting and error

* updated package version

* resolve package

* dist

* review comment

* dist

* dist
2022-07-07 21:56:17 +05:30
Vipul
baed3516c3 Merge pull request #530 from axelson/document-where-to-cache
Make it more obvious that the cache call does double duty
2022-06-27 10:56:37 +05:30
Vipul
8829e97be1 Update README.md
Co-authored-by: Lucas Costi <lucascosti@users.noreply.github.com>
2022-06-27 10:48:52 +05:30
Vipul
eec8cd3f5f Merge pull request #836 from actions/vsvipul/fix-auto-assign
Add kotewar and remove phantsure from auto-assignees lists
2022-06-26 20:03:54 +05:30
Vipul
5cc84c0123 Add kotewar and remove phantsure from auto-assignees lists 2022-06-26 05:26:49 +00:00
Sampark Sharma
afc669e7fc Merge pull request #819 from mpilgrem/haskell-stack-example
Adapt existing Haskell Stack example for Windows
2022-06-23 16:13:06 +05:30
Mike Pilgrem
d25c51bbfd Adapt existing Haskell Stack example for Windows
The default `STACK_ROOT` is `~/.stack` only on Unix-like operating systems. On Windows, the default is `%APPDATA%/stack` (usually `%HOME%\AppData\Roaming\stack`).

On Unix-like OSs, Stack stores GHC and other tools in a `programs` directory in the `STACK_ROOT`. On Windows, Stack stores those tools and MSYS2 in `%LOCALAPPDATA%\Programs\stack` (usually `%HOME%\AppData\Local\Programs\stack`).
2022-06-21 13:15:08 +01:00
Jason Axelson
2086306d9c Make it more obvious that the cache call does double duty 2021-02-06 13:36:42 -10:00
11 changed files with 103 additions and 44 deletions

View File

@@ -7,6 +7,7 @@ addAssignees: false
# A list of reviewers to be added to pull requests (GitHub user name)
reviewers:
- phantsure
- kotewar
- aparna-ravindra
- tiwarishub
- vsvipul

View File

@@ -11,5 +11,5 @@ jobs:
- name: 'Auto-assign issue'
uses: pozil/auto-assign-issue@v1.4.0
with:
assignees: phantsure,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
assignees: phantsure,kotewar,tiwarishub,aparna-ravindra,vsvipul,bishal-pdmsft
numOfAssignee: 1

View File

@@ -72,6 +72,8 @@ jobs:
run: /primes.sh -d prime-numbers
```
> Note: You must use the `cache` action in your workflow before you need to use the files that might be restored from the cache. If the provided `key` doesn't match an existing cache, a new cache is automatically created if the job completes successfully.
## Implementation Examples
Every programming language and framework has its own way of caching.

View File

@@ -14,4 +14,7 @@
- Fixed avoiding empty cache save when no files are available for caching. ([issue](https://github.com/actions/cache/issues/624))
### 3.0.4
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
- Fixed tar creation error while trying to create tar with path as `~/` home folder on `ubuntu-latest`. ([issue](https://github.com/actions/cache/issues/689))
### 3.0.5
- Removed error handling by consuming actions/cache 3.0 toolkit, Now cache server error handling will be done by toolkit. ([PR](https://github.com/actions/cache/pull/834))

BIN
actions-cache-3.0.1.tgz Normal file

Binary file not shown.

40
dist/restore/index.js vendored
View File

@@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@@ -5464,6 +5470,7 @@ const buffer = __importStar(__webpack_require__(293));
const fs = __importStar(__webpack_require__(747));
const stream = __importStar(__webpack_require__(794));
const util = __importStar(__webpack_require__(669));
const timer = __importStar(__webpack_require__(581));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
@@ -5654,10 +5661,14 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}),
timer.setTimeout(60 * 60 * 1000, 'timeout')]);
if (result === 'timeout') {
throw new Error("Segment download timed out");
}
fs.writeFileSync(fd, result);
}
}
@@ -37272,9 +37283,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@@ -37305,9 +37316,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return ['--use-compress-program', 'zstdmt --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', 'zstdmt'];
default:
return ['-z'];
}
@@ -37338,9 +37349,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@@ -42343,7 +42354,12 @@ function clean(key)
/* 578 */,
/* 579 */,
/* 580 */,
/* 581 */,
/* 581 */
/***/ (function(module) {
module.exports = require("timers/promises");
/***/ }),
/* 582 */
/***/ (function(module) {

45
dist/save/index.js vendored
View File

@@ -1113,7 +1113,13 @@ function resolvePaths(patterns) {
.replace(new RegExp(`\\${path.sep}`, 'g'), '/');
core.debug(`Matched: ${relativeFile}`);
// Paths are made relative so the tar entries are all relative to the root of the workspace.
paths.push(`${relativeFile}`);
if (relativeFile === '') {
// path.relative returns empty string if workspace and file are equal
paths.push('.');
}
else {
paths.push(`${relativeFile}`);
}
}
}
catch (e_1_1) { e_1 = { error: e_1_1 }; }
@@ -5464,6 +5470,7 @@ const buffer = __importStar(__webpack_require__(293));
const fs = __importStar(__webpack_require__(747));
const stream = __importStar(__webpack_require__(794));
const util = __importStar(__webpack_require__(669));
const timer = __importStar(__webpack_require__(581));
const utils = __importStar(__webpack_require__(15));
const constants_1 = __webpack_require__(931);
const requestUtils_1 = __webpack_require__(899);
@@ -5654,10 +5661,14 @@ function downloadCacheStorageSDK(archiveLocation, archivePath, options) {
const segmentStart = downloadProgress.segmentOffset + downloadProgress.segmentSize;
const segmentSize = Math.min(maxSegmentSize, contentLength - segmentStart);
downloadProgress.nextSegment(segmentSize);
const result = yield client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
});
const result = yield Promise.race([client.downloadToBuffer(segmentStart, segmentSize, {
concurrency: options.downloadConcurrency,
onProgress: downloadProgress.onProgress()
}),
timer.setTimeout(60 * 60 * 1000, 'timeout')]);
if (result === 'timeout') {
throw new Error("Segment download timed out");
}
fs.writeFileSync(fd, result);
}
}
@@ -37272,9 +37283,9 @@ function extractTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@@ -37305,9 +37316,9 @@ function createTar(archiveFolder, sourceDirectories, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -T0 --long=30'];
return ['--use-compress-program', 'zstdmt --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -T0'];
return ['--use-compress-program', 'zstdmt'];
default:
return ['-z'];
}
@@ -37338,9 +37349,9 @@ function listTar(archivePath, compressionMethod) {
function getCompressionProgram() {
switch (compressionMethod) {
case constants_1.CompressionMethod.Zstd:
return ['--use-compress-program', 'zstd -d --long=30'];
return ['--use-compress-program', 'unzstd --long=30'];
case constants_1.CompressionMethod.ZstdWithoutLong:
return ['--use-compress-program', 'zstd -d'];
return ['--use-compress-program', 'unzstd'];
default:
return ['-z'];
}
@@ -42343,7 +42354,12 @@ function clean(key)
/* 578 */,
/* 579 */,
/* 580 */,
/* 581 */,
/* 581 */
/***/ (function(module) {
module.exports = require("timers/promises");
/***/ }),
/* 582 */
/***/ (function(module) {
@@ -46795,10 +46811,9 @@ function run() {
const cacheId = yield cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(constants_1.Inputs.UploadChunkSize)
});
if (cacheId == -1) {
return;
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
core.info(`Cache saved with key: ${primaryKey}`);
}
catch (error) {
utils.logWarning(error.message);

View File

@@ -223,6 +223,8 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
## Haskell - Stack
### Linux or macOS
```yaml
- uses: actions/cache@v3
name: Cache ~/.stack
@@ -240,6 +242,27 @@ We cache the elements of the Cabal store separately, as the entirety of `~/.caba
${{ runner.os }}-stack-work-
```
### Windows
```yaml
- uses: actions/cache@v3
name: Cache %APPDATA%\stack %LOCALAPPDATA%\Programs\stack
with:
path: |
~\AppData\Roaming\stack
~\AppData\Local\Programs\stack
key: ${{ runner.os }}-stack-global-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}
restore-keys: |
${{ runner.os }}-stack-global-
- uses: actions/cache@v3
name: Cache .stack-work
with:
path: .stack-work
key: ${{ runner.os }}-stack-work-${{ hashFiles('stack.yaml') }}-${{ hashFiles('package.yaml') }}-${{ hashFiles('**/*.hs') }}
restore-keys: |
${{ runner.os }}-stack-work-
```
## Java - Gradle
>Note: Ensure no Gradle daemons are running anymore when your workflow completes. Creating the cache package might fail due to locks being held by Gradle. Refer to the [Gradle Daemon documentation](https://docs.gradle.org/current/userguide/gradle_daemon.html) on how to disable or stop the Gradle Daemons.

18
package-lock.json generated
View File

@@ -1,15 +1,15 @@
{
"name": "cache",
"version": "3.0.4",
"version": "3.0.5",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "cache",
"version": "3.0.4",
"version": "3.0.5",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/cache": "file:actions-cache-3.0.1.tgz",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"
@@ -36,9 +36,10 @@
}
},
"node_modules/@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "3.0.1",
"resolved": "file:actions-cache-3.0.1.tgz",
"integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==",
"license": "MIT",
"dependencies": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",
@@ -9533,9 +9534,8 @@
},
"dependencies": {
"@actions/cache": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/@actions/cache/-/cache-3.0.0.tgz",
"integrity": "sha512-GL9CT1Fnu+pqs8TTB621q8Xa8Cilw2n9MwvbgMedetH7L1q2n6jY61gzbwGbKgtVbp3gVJ12aNMi4osSGXx3KQ==",
"version": "file:actions-cache-3.0.1.tgz",
"integrity": "sha512-ucvw0xvFpe0/vfNQ/rc11ste0nidCdBAJ5j5F01BxBqjxmGH2doVzfPlqSIGhcN7wKI074x2ATb9+7HSrTqGHg==",
"requires": {
"@actions/core": "^1.2.6",
"@actions/exec": "^1.0.1",

View File

@@ -1,6 +1,6 @@
{
"name": "cache",
"version": "3.0.4",
"version": "3.0.5",
"private": true,
"description": "Cache dependencies and build outputs",
"main": "dist/restore/index.js",
@@ -23,7 +23,7 @@
"author": "GitHub",
"license": "MIT",
"dependencies": {
"@actions/cache": "^3.0.0",
"@actions/cache": "file:actions-cache-3.0.1.tgz",
"@actions/core": "^1.7.0",
"@actions/exec": "^1.1.1",
"@actions/io": "^1.1.2"

View File

@@ -47,11 +47,10 @@ async function run(): Promise<void> {
const cacheId = await cache.saveCache(cachePaths, primaryKey, {
uploadChunkSize: utils.getInputAsInt(Inputs.UploadChunkSize)
});
if (cacheId == -1) {
return;
}
core.info(`Cache saved with key: ${primaryKey}`);
if (cacheId != -1) {
core.info(`Cache saved with key: ${primaryKey}`);
}
} catch (error: unknown) {
utils.logWarning((error as Error).message);
}