Skip to content

Commit

Permalink
feat: add action
Browse files Browse the repository at this point in the history
  • Loading branch information
Platane committed Jan 10, 2024
1 parent f3b4ce5 commit 2a9ea32
Show file tree
Hide file tree
Showing 13 changed files with 788 additions and 0 deletions.
106 changes: 106 additions & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,106 @@
name: main

on:
push:

jobs:
put-to-cache:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- run: |
mkdir -p a/b/c
echo "1234" >> a/b/c/file.txt
echo "000" >> file.txt
- uses: ./
with:
path: |
a/b
file.txt
key: cache-${{ github.run_id }}-${{ github.sha }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}

read-cache:
needs: [put-to-cache]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./
with:
path: |
a/b
file.txt
key: cache-${{ github.run_id }}-${{ github.sha }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}

- run: |
test -f a/b/c/file.txt
test -f file.txt
read-partial-cache:
needs: [put-to-cache]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: ./
with:
path: |
a/b
key: cache-${{ github.run_id }}-${{ github.sha }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}

- run: |
test -f a/b/c/file.txt
read-cache-and-overwrite:
needs: [put-to-cache]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- run: |
mkdir -p a/b/c
echo "-" >> a/b/c/file.txt
echo "-" >> a/b/c/file2.txt
- uses: ./
with:
path: |
a/b
key: cache-${{ github.run_id }}-${{ github.sha }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}

- run: |
test -f a/b/c/file.txt
test -f a/b/c/file2.txt
lookup-cache:
needs: [put-to-cache]
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4

- uses: ./restore
id: restore
with:
lookup-only: true
key: cache-${{ github.run_id }}-${{ github.sha }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}

- run: echo ${{ steps.restore.outputs.cache-hit }}
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
.env
31 changes: 31 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
@@ -1 +1,32 @@
# s3-cache

> Drop in replace for [actions/cache](https://github.com/actions/cache), cache artifact on s3
# Usage

```yaml
steps:
- uses: rayonapp/s3-cache
with:
path: |
path/to/file/a
another/file
a/directory
key: cache-${{ hashFiles('**/**.rs') }}
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_ACCESS_SECRET }}
aws-region: ${{ secrets.AWS_REGION }}
aws-cache-bucket: ${{ secrets.AWS_CACHE_BUCKET }}
```
# Motivation
Handle cache expiration ourselves.
No restriction based on branches, [github-restrictions-for-accessing-a-cache](https://docs.github.com/en/actions/using-workflows/caching-dependencies-to-speed-up-workflows#restrictions-for-accessing-a-cache)
# Implementation
- zip every file (or directory recursively) in path
- make a zip of all of them
- upload the resulting zip with cache key as name
30 changes: 30 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
name: s3-cache
description: |
Cache using s3
runs:
using: node20
main: main.js
post: post.js
post-if: success()
inputs:
key:
description: cache key
required: true
path:
description: path to files to cache,use multiline for multiple files

aws-access-key-id:
description: aws-access-key-id
required: true
aws-secret-access-key:
description: aws-secret-access-key
required: true
aws-region:
description: aws-region
required: true
aws-cache-bucket:
description: aws-cache-bucket
required: true
outputs:
cache-hit:
description: true if a cache is found for this key
92 changes: 92 additions & 0 deletions cache.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,92 @@
const s3 = require("./s3");
const { join: path_join } = require("path");
const fs = require("fs");
const { execSync, execFileSync } = require("child_process");

/**
* set the cache-hit output
* cache-hit=true if a cache if found for this key
*/
const lookUp = async (key) => {
console.log("look up", { key });

const cacheHit = await s3.exist(key);

console.log("cacheHit=", cacheHit.toString());

execSync(`echo "cache-hit=${cacheHit.toString()}" >> $GITHUB_OUTPUT`);
};

/**
* push files to the cache
*
* for each path, create a zip file in a tmp dir
* then zip all of them
* and push the resulting file to s3 with the cache key as name
*/
const put = async (key, paths) => {
const a = Date.now();
console.log("put", { key, paths });

const tmpDir = fs.mkdtempSync("s3-cache");

try {
for (const path of paths) {
if (!fs.existsSync(path)) throw new Error(`file don't exist: ${path}`);

const pathKey = path.replace(/\//g, "_") + ".zip";

execFileSync("zip", [path_join(tmpDir, pathKey), "-r", path]);
}

execFileSync("zip", ["__payload.zip", "-r", "."], { cwd: tmpDir });

const payload = fs.readFileSync(path_join(tmpDir, "__payload.zip"));

await s3.put(key, payload);

console.log("uploaded in", Date.now() - a, "ms");
} finally {
fs.rmSync(tmpDir, { recursive: true });
}
};

/**
* get files from the cache
*/
const get = async (key, paths) => {
const a = Date.now();
console.log("get", { key, paths });

const payload = await s3.get(key);

if (payload) {
const tmpDir = fs.mkdtempSync("s3-cache");

try {
fs.writeFileSync(
path_join(tmpDir, "__payload.zip"),
Buffer.from(payload)
);

execFileSync("unzip", ["__payload.zip"], { cwd: tmpDir });

for (const filename of paths) {
const pathKey = filename.replace(/\//g, "_") + ".zip";

if (!fs.existsSync(path_join(tmpDir, pathKey)))
throw new Error(`file don't exist in the cache: ${filename}`);

execFileSync("unzip", ["-o", path_join(tmpDir, pathKey)]);
}

execSync(`echo "cache-hit=true" >> $GITHUB_OUTPUT`);

console.log("downloaded in", Date.now() - a, "ms");
} finally {
fs.rmSync(tmpDir, { recursive: true });
}
}
};

module.exports = { get, put, lookUp };
Loading

0 comments on commit 2a9ea32

Please sign in to comment.