diff --git a/.github/scripts/mkdocs.sh b/.github/scripts/mkdocs.sh new file mode 100755 index 0000000..eaacba8 --- /dev/null +++ b/.github/scripts/mkdocs.sh @@ -0,0 +1,72 @@ +#!/bin/bash + +# Documentation Building Script +# +# This script makes the lightningstream HTML documentation ready for copying to S3 +# +# Environment Variables Required: +# - AWS_ACCESS_KEY_ID: The AWS access key ID +# - AWS_SECRET_ACCESS_KEY: The AWS secret access key +# - AWS_REGION: The AWS region where resources are located +# - AWS_S3_BUCKET_DOCS: The name of the S3 bucket for documentation +# - BUILD_PATH: The root of the lightningstream directory +# +# Usage: +# ./mkdocs.sh + +set -e # Exit immediately if a command exits with a non-zero status + +# Main script execution +if [ "$#" -lt 1 ] || [ "$#" -gt 4 ]; then + echo "Usage: $0 " + exit 1 +fi + +mkdocs_file="$1" +version="$2" +image="$3" +subdir="$4" + +publish_script="${BUILD_PATH}/.github/scripts/publish_to_s3.sh" + +# Prep temporary output location +mkdir -p ${PWD}/output/"${version}" + +docker run -v "${PWD}:${PWD}" $image sh -c "pip install mkdocs-swagger-ui-tag && mkdocs build -f $mkdocs_file -d ${PWD}/output/${version}" + +latestVersion=`aws s3 ls s3://"${AWS_S3_BUCKET_DOCS}"/docs.powerdns.com/$subdir/ | awk '{print $2}' | grep -v latest | awk -F '/' '/\// {print $1}' | sort -V | tail -1` + +if [ "$latestVersion" == "" ]; then + latestVersion="0" +fi + +echo "Publishing version $version. Latest version already in S3 is $latestVersion" + +$publish_script ${PWD}/output/${version} $subdir/${version} + +if (( $(echo "$latestVersion" "$version" | awk '{if ($1 < $2) print 1;}') )); then + echo "This version is newer than the latest version in S3, publishing this version to latest" + $publish_script ${PWD}/output/${version} $subdir/latest + latestVersion="$version" +fi + +# Build versions.json +versionsData=$(echo "[]" | jq) + +while read -r docsVersion; do + if [ "$docsVersion" != "" ] && [ "$docsVersion" != "latest" ]; then + if [ $docsVersion == $latestVersion ]; then + versionsData=$(echo $versionsData | jq ". += [{\"title\": \"${docsVersion}\", \"version\": \"${latestVersion}\", \"aliases\": [\"latest\"]}]") + else + versionsData=$(echo $versionsData | jq ". += [{\"title\": \"${docsVersion}\", \"version\": \"${docsVersion}\", \"aliases\": []}]") + fi + fi +done < <(aws s3 ls s3://"${AWS_S3_BUCKET_DOCS}"/docs.powerdns.com/$subdir/ | awk '{print $2}' | awk -F '/' '/\// {print $1}') + +echo ${versionsData} > ${PWD}/output/versions.json + +$publish_script ${PWD}/output/versions.json $subdir + +$publish_script ${PWD}/doc/html/index.html $subdir + +exit 0 diff --git a/.github/scripts/publish_to_s3.sh b/.github/scripts/publish_to_s3.sh new file mode 100755 index 0000000..30e02ef --- /dev/null +++ b/.github/scripts/publish_to_s3.sh @@ -0,0 +1,103 @@ +#!/bin/bash + +# PowerDNS Documentation Publishing Script +# +# This script uploads documentation to an S3 bucket and invalidates the CloudFront cache. This uses the AWS CLI. +# +# Environment Variables Required: +# - AWS_ACCESS_KEY_ID: The AWS access key ID +# - AWS_SECRET_ACCESS_KEY: The AWS secret access key +# - AWS_REGION: The AWS region where resources are located +# - AWS_S3_BUCKET_DOCS: The name of the S3 bucket for documentation +# - AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: The CloudFront distribution ID +# +# Usage: +# ./publish.sh [TARGET_DIR] + +set -e # Exit immediately if a command exits with a non-zero status + +# Check if AWS CLI is installed +if ! command -v aws &> /dev/null; then + echo "AWS CLI is not installed. Please install it and try again." + exit 1 +fi + +# Function to get content type based on file extension +get_content_type() { + case "${1##*.}" in + html) echo "text/html" ;; + css) echo "text/css" ;; + js) echo "application/javascript" ;; + json) echo "application/json" ;; + png) echo "image/png" ;; + jpg|jpeg) echo "image/jpeg" ;; + *) echo "application/octet-stream" ;; + esac +} + +# Function to upload file or directory to S3 +upload_to_s3() { + local source_path="$1" + local dest_dir="$2" + + if [ -d "$source_path" ]; then + for file in "$source_path"/*; do + if [ -d "$file" ]; then + upload_to_s3 "$file" "${dest_dir}/$(basename "$file")" + else + upload_file_to_s3 "$file" "${dest_dir}" + fi + done + else + upload_file_to_s3 "$source_path" "${dest_dir}" + fi +} + +# Function to upload a single file to S3 +upload_file_to_s3() { + local file="$1" + local dest_dir="$2" + local content_type=$(get_content_type "$file") + aws s3 cp "$file" "s3://${AWS_S3_BUCKET_DOCS}/${dest_dir}/$(basename "$file")" --content-type "$content_type" || { + echo "Failed to upload $file to S3" + exit 1 + } +} + +# Function to invalidate CloudFront cache +invalidate_cloudfront() { + local invalidation_path="$1" + aws cloudfront create-invalidation --distribution-id "${AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS}" --paths "${invalidation_path}" || { + echo "Failed to create CloudFront invalidation for ${invalidation_path}" + exit 1 + } +} + +# Main function to publish to site +publish_to_site() { + local source_path="$1" + local target_dir="${2:-}" + local site_dir="docs.powerdns.com" + + local full_target_dir="${site_dir}/${target_dir}" + upload_to_s3 "$source_path" "$full_target_dir" + + local invalidation_path="/${target_dir}*" + invalidate_cloudfront "$invalidation_path" + + echo "Published from ${source_path} to docs.powerdns.com${target_dir:+/}${target_dir}" + echo "Invalidated CloudFront cache for ${invalidation_path}" +} + +# Main script execution +if [ "$#" -lt 1 ] || [ "$#" -gt 2 ]; then + echo "Usage: $0 [TARGET_DIR]" + exit 1 +fi + +source_path="$1" +target_dir="${2:-}" + +publish_to_site "$source_path" "$target_dir" + +exit 0 \ No newline at end of file diff --git a/.github/workflows/documentation.yml b/.github/workflows/documentation.yml index 83e66ae..5874b35 100644 --- a/.github/workflows/documentation.yml +++ b/.github/workflows/documentation.yml @@ -3,7 +3,7 @@ name: 'Documentation' on: push: # docs is the branch on which we are developing this, can be removed later - branches: [ "main", "docs" ] + branches: [ "main"] pull_request: branches: [ "main" ] @@ -11,52 +11,39 @@ permissions: contents: read jobs: - - build-upload-docs: - name: Build and upload docs + upload_docs: + # on a ubuntu VM runs-on: ubuntu-22.04 - + env: + # Github.refname is wrong for pull requests - have to use head ref for them + MYREF: ${{ github.event.pull_request && github.head_ref || github.ref_name }} + AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }} + AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + AWS_REGION: ${{ vars.AWS_REGION }} + AWS_S3_BUCKET_DOCS: ${{ vars.AWS_S3_BUCKET_DOCS }} + AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS: ${{ vars.AWS_CLOUDFRONT_DISTRIBUTION_ID_DOCS }} + BUILD_PATH: ${{ github.workspace }} + MKDOCS_IMAGE: ${{ vars.MKDOCS_IMAGE }} + HARBOR_HOST: ${{ vars.HARBOR_HOST }} + HARBOR_USER: ${{ vars.HARBOR_USER }} steps: - uses: actions/checkout@v4 - - uses: actions/setup-python@v4 with: - python-version: '3.10' - - - id: get-version - run: echo "ls_version=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - - - id: setup-ssh - run: |- - mkdir -m 700 -p ~/.ssh - echo "$SSH_KEY" > ~/.ssh/id_ed25519 - echo "$HOST_KEY" > ~/.ssh/known_hosts - chmod 600 ~/.ssh/* - echo "have_ssh_key=yes" >> $GITHUB_OUTPUT - env: - SSH_KEY: ${{secrets.WEB1_DOCS_SECRET}} - HOST_KEY: ${{vars.WEB1_HOSTKEY}} - if: ${{ (github.ref_name == 'main' || github.ref_name == 'docs') && env.SSH_KEY != '' }} - - - run: python3 -m venv .venv - - run: .venv/bin/pip3 install -r docs/requirements.txt - - run: .venv/bin/mkdocs build - - run: mv site lightningstream-html-docs - - run: tar cf lightningstream-html-docs.tar lightningstream-html-docs - - - uses: actions/upload-artifact@v4 + fetch-depth: 0 + submodules: recursive + # We need to login to the OX registry because we use an image to build the docs + - name: Login to OX Registry + uses: docker/login-action@v3 with: - name: lightningstream-html-docs-${{steps.get-version.outputs.ls_version}} - path: ./lightningstream-html-docs.tar - - - run: bzip2 lightningstream-html-docs.tar - if: ${{ (github.ref_name == 'main' || github.ref_name == 'docs') }} - - - run: |- - rsync -crv --delete --no-p --chmod=g=rwX --exclude '*~' ./lightningstream-html-docs/ "$LS_USERNAME@$DOCS_HOST:$LS_DOCS_DIR" - rsync -crv --no-p --chmod=g=rwX --exclude '*~' ./lightningstream-html-docs.tar.bz2 "$LS_USERNAME@$DOCS_HOST:$LS_DOCS_DIR/html-docs.tar.bz2" - env: - DOCS_HOST: ${{vars.DOCS_HOST}} - LS_DOCS_DIR: ${{vars.LS_DOCS_DIR}} - LS_USERNAME: ${{vars.LS_USERNAME}} - if: ${{ (github.ref_name == 'main' || github.ref_name == 'docs') && steps.setup-ssh.outputs.have_ssh_key != '' }} - + registry: ${{ env.HARBOR_HOST }} + username: ${{ env.HARBOR_USER }} + password: ${{ secrets.HARBOR_PASSWORD }} + - name: Check for release tag + id: release_check + run: | + if [[ ${{ github.ref }} =~ ^refs/tags/v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then + echo "release=YES" >> $GITHUB_OUTPUT + fi + - if: ${{ steps.release_check.outputs.release == 'YES' }} + run: | + ${{ env.BUILD_PATH }}/.github/scripts/mkdocs.sh ${{ env.BUILD_PATH }}/mkdocs.yml ${{ env.MYREF }} ${{ env.MKDOCS_IMAGE }} lightningstream