118 lines
5 KiB
YAML
118 lines
5 KiB
YAML
|
# PREREQUISITES:
|
||
|
# The following secrets must be stored in your repository where this Action runs:
|
||
|
# AWS_ACCESS_KEY_ID
|
||
|
# AWS_CLOUDFRONT_DISTRO_ID
|
||
|
# AWS_S3_BUCKET_NAME
|
||
|
# AWS_SECRET_ACCESS_KEY
|
||
|
|
||
|
name: AWS DEPLOY CI
|
||
|
off: # change to `on:` to turn on
|
||
|
workflow_dispatch:
|
||
|
branches:
|
||
|
- main
|
||
|
push:
|
||
|
paths:
|
||
|
- content/**/*
|
||
|
- hugo.toml
|
||
|
# pull_request:
|
||
|
# branches:
|
||
|
# - production
|
||
|
env:
|
||
|
# Default AWS region where S3 pushes and CloudFront invalidations will occur.
|
||
|
AWS-DEFAULT-REGION: us-east-2
|
||
|
# Name of the branch in your repository which will store your generated site.
|
||
|
SITE-BRANCH: site
|
||
|
|
||
|
jobs:
|
||
|
build:
|
||
|
# In this phase, the code is pulled from main and the site rendered in Hugo. The built site is stored as an artifact for other stages. # deploy:
|
||
|
runs-on: ubuntu-20.04
|
||
|
concurrency:
|
||
|
group: ${{ github.workflow }}-${{ github.ref }}
|
||
|
steps:
|
||
|
- uses: actions/checkout@v3
|
||
|
with:
|
||
|
submodules: true # Fetch Hugo themes (true OR recursive)
|
||
|
fetch-depth: 0 # Fetch all history for .GitInfo and .Lastmod
|
||
|
|
||
|
- name: Setup Hugo
|
||
|
uses: peaceiris/actions-hugo@v2
|
||
|
with:
|
||
|
hugo-version: 'latest'
|
||
|
extended: true
|
||
|
|
||
|
- name: Build
|
||
|
run: hugo -e "production" -d "dist" --minify
|
||
|
# If build succeeds, store the dist/ dir as an artifact to be used in subsequent phases.
|
||
|
- name: Upload output public dir as artifact
|
||
|
uses: actions/upload-artifact@v1
|
||
|
with:
|
||
|
name: dist
|
||
|
path: dist/
|
||
|
publish:
|
||
|
# In the publish phase, the site is pushed up to a different branch which only stores the dist/ folder ("site" branch) and is also delta synchronized to the S3 bucket. CloudFront invalidation happens last.
|
||
|
runs-on: ubuntu-20.04
|
||
|
needs: build
|
||
|
steps:
|
||
|
# Check out the site branch this time since we have to ultimately commit those changes there.
|
||
|
- name: Checkout site branch
|
||
|
uses: actions/checkout@v3
|
||
|
with:
|
||
|
submodules: true
|
||
|
fetch-depth: 0
|
||
|
ref: ${{ env.SITE-BRANCH }}
|
||
|
# Download the artifact containing the newly built site. This overwrites the dist/ dir from the check out above.
|
||
|
- name: Download artifact from build stage
|
||
|
uses: actions/download-artifact@v1
|
||
|
with:
|
||
|
name: public
|
||
|
# Add all the files/changes in dist/ that were pulled down from the build stage and then commit them.
|
||
|
# The final line sets a GitHub Action output value that can be read by other steps.
|
||
|
# This function cannot store mult-line values so newline chars must be stripped.
|
||
|
- name: Commit files
|
||
|
id: can_commit
|
||
|
run: |
|
||
|
git config --local user.email "action@github.com"
|
||
|
git config --local user.name "GitHub Action"
|
||
|
git add -A dist/
|
||
|
commit_message=$(git commit -m "Publish generated Hugo site." -a | tr -d '\n' || true)
|
||
|
echo "commit_message=$commit_message >> $GITHUB_OUTPUT"
|
||
|
# Checks if previous stage had any valid commit.
|
||
|
- name: Nothing to commit
|
||
|
id: nothing_committed
|
||
|
if: contains(steps.can_commit.outputs.commit_message, 'nothing to commit')
|
||
|
run: echo "Saw that no changes were made to Hugo site."
|
||
|
# Push those changes back to the site branch.
|
||
|
- name: Push to site branch
|
||
|
if: steps.nothing_committed.conclusion == 'skipped'
|
||
|
uses: ad-m/github-push-action@master
|
||
|
with:
|
||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||
|
branch: ${{ env.SITE-BRANCH }}
|
||
|
# Store the AWS credentials on the runner.
|
||
|
- name: Configure AWS credentials
|
||
|
if: steps.nothing_committed.conclusion == 'skipped'
|
||
|
uses: aws-actions/configure-aws-credentials@v1-node16
|
||
|
with:
|
||
|
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
|
||
|
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
|
||
|
aws-region: ${{ env.AWS-DEFAULT-REGION }}
|
||
|
- name: Delta sync site to S3 with aws cli
|
||
|
if: steps.nothing_committed.conclusion == 'skipped'
|
||
|
run: aws s3 sync --size-only --delete --exclude "/authors/*/page/*" --cache-control max-age=2592000 dist/ s3://${{ secrets.AWS_S3_BUCKET_NAME }}
|
||
|
# Use s5cmd to perform only a delta sync to the destination S3 bucket. This minimizes transfer traffic since it only uploads changed files.
|
||
|
# - name: Delta sync site to S3 bucket
|
||
|
# if: steps.nothing_committed.conclusion == 'skipped'
|
||
|
# run: |
|
||
|
# curl -sLO https://github.com/peak/s5cmd/releases/download/v1.0.0/s5cmd_1.0.0_Linux-64bit.tar.gz
|
||
|
# tar -xzf s5cmd_1.0.0_Linux-64bit.tar.gz
|
||
|
# chmod +x s5cmd
|
||
|
# sudo mv s5cmd /usr/local/bin/
|
||
|
# echo "****Showing working dir and listing files.****"
|
||
|
# pwd && ls -lah
|
||
|
# echo "****Running delta sync against S3.****"
|
||
|
# s5cmd cp -s -n -u dist/ s3://${{ secrets.AWS_S3_BUCKET_NAME }}
|
||
|
# Use the aws cli tool to perform a glob invalidation of the entire site against CloudFront.
|
||
|
- name: Invalidate cache on CloudFront
|
||
|
if: steps.nothing_committed.conclusion == 'skipped'
|
||
|
run: aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CLOUDFRONT_DISTRO_ID }} --paths "/*"
|