diff --git a/.github/workflows/build-image.yml b/.github/workflows/build-image.yml index c56c141..5c314f2 100644 --- a/.github/workflows/build-image.yml +++ b/.github/workflows/build-image.yml @@ -49,7 +49,7 @@ jobs: runs-on: ${{ inputs.os }} steps: - name: 📦 Check out the codebase - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + uses: actions/checkout@v4 - name: 🛠️ Install goss and dgoss uses: e1himself/goss-installation-action@v1.2.1 @@ -93,7 +93,7 @@ jobs: - name: 🚀 Build image and push to docker-hub for "${{ inputs.image-platform }}" uses: docker/build-push-action@v5 with: - context: ${{ inputs.image-template-path }}/${{ inputs.image-template }} + context: '${{ inputs.image-template-path }}/${{ inputs.image-template }}' platforms: ${{ inputs.image-platform }} push: ${{ inputs.push-to-hub == true }} tags: ${{ steps.meta.outputs.tags }} @@ -107,3 +107,5 @@ jobs: export IMAGE_TEMPLATE=${{ inputs.image-template }} export IMAGE_TAG=${{ inputs.image-namespace }}:${{ inputs.image-template }}-${{ inputs.image-version }} make test + +... diff --git a/.github/workflows/shellcheck.yml b/.github/workflows/shellcheck.yml index 028504b..e9b669b 100644 --- a/.github/workflows/shellcheck.yml +++ b/.github/workflows/shellcheck.yml @@ -25,7 +25,7 @@ jobs: runs-on: ${{ inputs.os }} steps: - name: 📦 Check out the codebase - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 # v4 + uses: actions/checkout@v4 with: fetch-depth: 0 diff --git a/.github/workflows/triage.yml b/.github/workflows/triage.yml index 40e625f..1d12ae3 100644 --- a/.github/workflows/triage.yml +++ b/.github/workflows/triage.yml @@ -16,7 +16,7 @@ name: 🏷️ Add labels jobs: label: - uses: wayofdev/gh-actions/.github/workflows/apply-labels.yml@master + uses: wayofdev/gh-actions/.github/workflows/apply-labels.yml@v2.0.0 with: os: ubuntu-latest secrets: diff --git a/README.md b/README.md index 2a593d3..ca39e3f 100644 --- a/README.md +++ b/README.md @@ -70,6 +70,7 @@ Check each action's README file for detailed instructions on how to use it. | [`actions/phive/install`](./actions/phive/install/README.md) | Install dependencies with [Phive](https://phar.io). | | [`actions/playwright/install`](./actions/playwright/install/README.md) | Installs [Playwright](https://playwright.dev/) along with its dependencies. | | [`actions/pnpm/install`](./actions/pnpm/install/README.md) | Installs mono-repository dependencies using [PNPM](https://pnpm.io/). | +| [`actions/s3/cache`](./actions/s3/cache/README.md) | Cache artifacts, or restore them using S3. |
diff --git a/actions/s3-cache/README.md b/actions/s3-cache/README.md deleted file mode 100644 index 24ac04d..0000000 --- a/actions/s3-cache/README.md +++ /dev/null @@ -1,151 +0,0 @@ -# Custom S3 cache - -This action allows caching dependencies and saving them in an AWS S3 bucket to reuse in other jobs and workflows to improve workflow execution time. - - -## Inputs - -* `cache_action` - - Specify what to do with the cache: save to an s3 bucket or restore from the s3 bucket into `cache_path`. - - - Type: string - - Required - - Possible values: save, restore - -* `cache_path` - - Absolute or relative path to a folder with cache. When cache_action is **save** the path itself will not be saved, only the contents of the directory (including all subdirectories). When cache_action is **restore** all folders in `cache_path` will be created first and cache will be restored from the S3 bucket into this folder. - - - Type: string - - Required - - Default: . - -* `s3_bucket_name` - - AWS S3 bucket name which will be used to save cache to and restore it from. - - - Type: string - - Required - -* `cache_key` - - A cache key which is used only to save cache to S3 bucket - - - Type: string - - Required only when `cache_action` is **save** - -* `restore_keys` - - An ordered list of keys to use for restoring cache from the s3 bucket - - - Type: list of strings - - Required only when `cache_action` is **restore** - - You can specify multiple keys by putting each key on its own line: - ```yaml - restore_keys: |- - ${{ runner.os }}-cache-${{ hashfiles('**/.package-lock.json') }} - ${{ runner.os }}-cache - ``` - The first matching key will be restored. - -## Environment Variables - -- `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY`(Required) - credential with access to provided AWS S3 bucket name -- `AWS_REGION`(Required) - AWS region. - -## Example Cache Workflow - -### Save Cache - -```yaml -name: Create cache - -on: push - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Create cache - uses: wayofdev/gh-actions/actions/s3-cache@v1 - with: - cache_action: save - cache_path: ${GITHUB_WORKSPACE}/.cache - s3_bucket_name: my_s3_bucket - cache_key: ${{ runner.os }}-cache-${{ hashfiles('**/.package-lock.json') }} - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_REGION: ${{ secrets.AWS_REGION }} - -``` - -### Restore Cache - -```yaml -name: Restore cache - -on: push - -jobs: - build: - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v3 - - - name: Create cache - uses: wayofdev/gh-actions/actions/s3-cache@v1 - with: - cache_action: restore - cache_path: ${GITHUB_WORKSPACE}/.cache - s3_bucket_name: my_s3_bucket - restore_keys: | - ${{ runner.os }}-cache-${{ hashfiles('**/.package-lock.json') }} - ${{ runner.os }}-cache - env: - AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} - AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} - AWS_REGION: ${{ secrets.AWS_REGION }} -``` - -### Creating a Cache Key -A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions. - -For example, using the `hashFiles` function allows you to create a new cache when dependencies change. The `hashFiles` function is specific to GitHub Actions. - -```yaml - cache_key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} -``` - -Additionally, you can use arbitrary command output in a cache key, such as a date or software version: - - -```yaml -# http://man7.org/linux/man-pages/man1/date.1.html - - name: Get Date - id: get-date - run: | - echo "date=$(/bin/date -u "+%Y%m%d")" >> $GITHUB_OUTPUT - shell: bash - - - uses: wayofdev/gh-actions/actions/s3-cache@v1 - with: - cache_action: save - cache_path: ${GITHUB_WORKSPACE}/.cache - s3_bucket_name: my_s3_bucket - cache_key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }} - -``` - -See [GitHub Contexts and Expressions](https://docs.github.com/en/actions/learn-github-actions/contexts#github-context) for more cache key examples. - -## Limitations - -This action has not been tested on self-hosted runners or when running inside a container. - diff --git a/actions/s3-cache/action.yaml b/actions/s3-cache/action.yaml deleted file mode 100644 index af96513..0000000 --- a/actions/s3-cache/action.yaml +++ /dev/null @@ -1,38 +0,0 @@ ---- - -name: 'Custom S3 cache' -description: 'Save and restore cache artifacts from AWS s3 bucket' -author: Alina Freydina - -inputs: - cache_action: - description: "An action to do with cache: save or restore" - required: true - cache_path: - description: Absolute or relative path where cache will be restored to or saved from - required: true - default: . - s3_bucket_name: - description: AWS S3 bucket name to save cache to or restore cache from - required: true - cache_key: - description: A cache key which used only to save cache to s3 bucket - required: false - restore_keys: - description: 'An ordered list of keys to use for restoring cache from s3 bucket' - required: false - -runs: - using: "composite" - steps: - - name: Run action script - run: $GITHUB_ACTION_PATH/cache.sh - shell: bash - env: - INPUT_CACHE_ACTION: "${{ inputs.cache_action }}" - INPUT_CACHE_PATH: "${{ inputs.cache_path }}" - INPUT_S3_BUCKET_NAME: "${{ inputs.s3_bucket_name }}" - INPUT_CACHE_KEY: "${{ inputs.cache_key }}" - INPUT_RESTORE_KEYS: "${{ inputs.restore_keys }}" - -... diff --git a/actions/s3/cache/README.md b/actions/s3/cache/README.md new file mode 100644 index 0000000..006cd3f --- /dev/null +++ b/actions/s3/cache/README.md @@ -0,0 +1,212 @@ +
+ WayOfDev Logo for light theme + WayOfDev Logo for dark theme +
+

+ +
+ Build Status + Software License + Commits since latest release + Codecov + +
+
+ +# S3 / Cache + +This GitHub Action allows you to save and restore cache artifacts from an S3 bucket. It provides a convenient way to cache dependencies, build artifacts, or any other files that can be reused across multiple workflow runs, helping to speed up your CI/CD pipeline. + +
+ +## 📋 Features + +- Save cache to an S3 bucket +- Restore cache from an S3 bucket +- Specify custom cache keys and restore keys +- Compress cache files using tar and gzip for efficient storage and transfer + +
+ +## 📥 Inputs + +* `cache-action` + + Specify what to do with the cache: save to a s3 bucket or restore from the s3 bucket into `cache_path`. + + - Type: string + - Required + - Possible values: save, restore + +* `cache-path` + + Absolute or relative path to a folder with cache. When `cache-action` is `save` the path itself will not be saved, only the contents of the directory (including all subdirectories). When `cache-action` is `restore` all folders in `cache_path` will be created first and cache will be restored from the S3 bucket into this folder. + + - Type: string + - Required + - Default: . + +* `s3-bucket-name` + + AWS S3 bucket name which will be used to save cache to and restore it from. + + - Type: string + - Required + +* `cache-key` + + A cache key which is used only to save cache to S3 bucket + + - Type: string + - Required only when `cache-action` is `save` + +* `restore-keys` + + An ordered list of keys to use for restoring cache from the s3 bucket + + - Type: list of strings + - Required only when `cache-action` is `restore` + + You can specify multiple keys by putting each key on its own line: + ```yaml + restore-keys: |- + ${{ runner.os }}-cache-${{ hashfiles('**/.package-lock.json') }} + ${{ runner.os }}-cache + ``` + The first matching key will be restored. + +
+ +## 🌎 Environment Variables + +The action requires the following environment variables to be set: + +- `AWS_ACCESS_KEY_ID`: The AWS access key ID with permissions to access the S3 bucket. +- `AWS_SECRET_ACCESS_KEY`: The AWS secret access key associated with the access key ID. +- `AWS_REGION`: The AWS region where the S3 bucket is located. + +
+ +## ⚙️ Usage Examples + +### → Saving Cache + +```yaml +--- + +name: 🔍 Continuous integration + +on: # yamllint disable-line rule:truthy + push: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📦 Check out the codebase + uses: actions/checkout@v4 + + - name: 📤 Save cache + uses: wayofdev/gh-actions/actions/s3/cache@v3.0.0 + with: + cache-action: save + cache-path: ./node_modules + s3-bucket-name: my-cache-bucket + cache-key: ${{ runner.os }}-node-modules-${{ hashFiles('**/package-lock.json') }} + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + +... +``` + +### → Restoring Cache + +```yaml +--- + +name: 🔍 Continuous integration + +on: # yamllint disable-line rule:truthy + push: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: 📦 Check out the codebase + uses: actions/checkout@v4 + + - name: ♻️ Restore cache + uses: wayofdev/gh-actions/actions/s3/cache@v3.0.0 + with: + cache-action: restore + cache-path: ${GITHUB_WORKSPACE}/.cache + s3-bucket-name: my_s3_bucket + restore-keys: | + ${{ runner.os }}-cache-${{ hashfiles('**/.package-lock.json') }} + ${{ runner.os }}-cache + env: + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ secrets.AWS_REGION }} + +... +``` + +In the restore example, the action will attempt to restore the cache using the provided `restore-keys` in the specified order. If a cache hit occurs for a key, the restoration process will stop, and the cached files will be extracted to the `cache-path`. +### → Error Handling + +The action will fail if any of the required inputs or environment variables are missing or if an invalid `cache-action` is provided. Appropriate error messages will be logged to help troubleshoot the issue. + +### → Creating a Cache Key +A cache key can include any of the contexts, functions, literals, and operators supported by GitHub Actions. + +For example, using the `hashFiles` function allows you to create a new cache when dependencies change. The `hashFiles` function is specific to GitHub Actions. + +```yaml +cache-key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} +``` + +Additionally, you can use arbitrary command output in a cache key, such as a date or software version: + + +```yaml +--- + +name: 🔍 Continuous integration + +on: # yamllint disable-line rule:truthy + push: + +jobs: + build: + runs-on: ubuntu-latest + steps: + - name: Get current timestamp + id: get-date + run: | + echo "date=$(/bin/date -u "+%Y%m%d")" >> $GITHUB_OUTPUT + shell: bash + + - name: 📤 Save cache + uses: wayofdev/gh-actions/actions/s3-cache@v1 + with: + cache-action: save + cache-path: ${GITHUB_WORKSPACE}/.cache + s3-bucket-name: my_s3_bucket + cache-key: ${{ runner.os }}-${{ steps.get-date.outputs.date }}-${{ hashFiles('**/lockfiles') }} + +... +``` + +See [GitHub Contexts and Expressions](https://docs.github.com/en/actions/learn-github-actions/contexts#github-context) for more cache key examples. + +
+ +## ☝️ Limitations + +This action has not been tested on self-hosted runners or when running inside a container, or other S3 buckets, other than AWS. + +
diff --git a/actions/s3/cache/action.yml b/actions/s3/cache/action.yml new file mode 100644 index 0000000..af72f65 --- /dev/null +++ b/actions/s3/cache/action.yml @@ -0,0 +1,38 @@ +--- + +name: 'Custom S3 cache' +description: 'Save and restore cache artifacts from AWS s3 bucket' +author: Alina Freydina + +inputs: + cache-action: + description: 'An action to do with cache example: `save` or `restore`' + required: true + cache-path: + description: Absolute or relative path where cache will be restored to, or saved from + required: true + default: . + s3-bucket-name: + description: AWS S3 bucket name to save cache to, or restore cache from + required: true + cache-key: + description: A cache key which used only to save cache to s3 bucket + required: false + restore-keys: + description: 'An ordered list of keys to use for restoring cache from s3 bucket' + required: false + +runs: + using: 'composite' + steps: + - name: Run action script + run: ${{ github.action_path }}/cache.sh + shell: bash + env: + INPUT_CACHE_ACTION: "${{ inputs.cache-action }}" + INPUT_CACHE_PATH: "${{ inputs.cache-path }}" + INPUT_S3_BUCKET_NAME: "${{ inputs.s3-bucket-name }}" + INPUT_CACHE_KEY: "${{ inputs.cache-key }}" + INPUT_RESTORE_KEYS: "${{ inputs.restore-keys }}" + +... diff --git a/actions/s3-cache/cache.sh b/actions/s3/cache/cache.sh similarity index 73% rename from actions/s3-cache/cache.sh rename to actions/s3/cache/cache.sh index 77f0273..2488364 100755 --- a/actions/s3-cache/cache.sh +++ b/actions/s3/cache/cache.sh @@ -3,18 +3,18 @@ set -euo pipefail function save_cache() { - - if [[ $(aws s3 ls s3://${S3_BUCKET}/${CACHE_KEY}/ --region $AWS_REGION | head) ]]; then + + if [[ $(aws s3 ls s3://"${S3_BUCKET}"/"${CACHE_KEY}"/ --region "$AWS_REGION" | head) ]]; then echo "Cache is already existed for key: ${CACHE_KEY}" else echo "Saving cache for key ${CACHE_KEY}" - + tmp_dir="$(mktemp -d)" - (cd $CACHE_PATH && tar czf "${tmp_dir}/archive.tgz" ./*) - size="$(ls -lh "${tmp_dir}/archive.tgz" | cut -d ' ' -f 5 )" + (cd "$CACHE_PATH" && tar czf "${tmp_dir}/archive.tgz" ./*) + local size="$(ls -lh "${tmp_dir}/archive.tgz" | cut -d ' ' -f 5 )" - aws s3 cp "${tmp_dir}/archive.tgz" "s3://${S3_BUCKET}/${CACHE_KEY}/archive.tgz" --region $AWS_REGION > /dev/null - copy_exit_code=$? + aws s3 cp "${tmp_dir}/archive.tgz" "s3://${S3_BUCKET}/${CACHE_KEY}/archive.tgz" --region "$AWS_REGION" > /dev/null + local copy_exit_code=$? rm -rf "${tmp_dir}" echo "Cache size: ${size}" @@ -27,16 +27,16 @@ function save_cache() { function restore_cache() { for key in ${RESTORE_KEYS}; do - if [[ $(aws s3 ls s3://${S3_BUCKET}/ --region $AWS_REGION | grep $key | head) ]]; then - k=$(aws s3 ls s3://${S3_BUCKET}/ --region $AWS_REGION | grep $key | head -n 1 | awk '{print $2}') + if [[ $(aws s3 ls s3://"${S3_BUCKET}"/ --region "$AWS_REGION" | grep "$key" | head) ]]; then + local k=$(aws s3 ls s3://"${S3_BUCKET}"/ --region "$AWS_REGION" | grep "$key" | head -n 1 | awk '{print $2}') tmp_dir="$(mktemp -d)" - mkdir -p $CACHE_PATH + mkdir -p "$CACHE_PATH" + + aws s3 cp s3://"${S3_BUCKET}"/"${k//\//}"/archive.tgz "$tmp_dir"/archive.tgz --region "$AWS_REGION" > /dev/null + tar xzf "${tmp_dir}/archive.tgz" -C "$CACHE_PATH" - aws s3 cp s3://${S3_BUCKET}/${k//\//}/archive.tgz $tmp_dir/archive.tgz --region $AWS_REGION > /dev/null - tar xzf "${tmp_dir}/archive.tgz" -C $CACHE_PATH - echo "Restoring cache for key ${key}" - du -sm ${CACHE_PATH}/* + du -sm "${CACHE_PATH}"/* exit 0 else echo "Cache with key $key not found."