Skip to content

Merge pull request #147 from netwrix/main #38

Merge pull request #147 from netwrix/main

Merge pull request #147 from netwrix/main #38

name: Build and deploy Docusaurus site to Azure Blob Storage
on:
push:
branches:
- main
- dev
workflow_dispatch:
inputs:
environment:
description: 'Environment to deploy to'
required: true
default: 'development'
type: choice
options:
- development
- production
jobs:
determine-environment:
runs-on: ubuntu-latest
outputs:
environment: ${{ steps.set-env.outputs.environment }}
steps:
- name: Determine environment
id: set-env
run: |
if [[ "${{ github.event_name }}" == "workflow_dispatch" ]]; then
echo "environment=${{ github.event.inputs.environment }}" >> $GITHUB_OUTPUT
elif [[ "${{ github.ref }}" == "refs/heads/main" ]]; then
echo "environment=production" >> $GITHUB_OUTPUT
else
echo "environment=development" >> $GITHUB_OUTPUT
fi
build:
runs-on: self-hosted
needs: determine-environment
environment: ${{ needs.determine-environment.outputs.environment }}
permissions:
contents: read
steps:
- name: Check out source code
uses: actions/checkout@v4
- name: Set up Node.js
uses: actions/setup-node@v3
with:
node-version: '22.x'
- name: Cache Node.js dependencies
uses: actions/cache@v3
with:
path: ~/.npm
key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-node-
- name: Cache Docusaurus build
uses: actions/cache@v3
id: cache-build
with:
path: |
.docusaurus
build
key: ${{ runner.os }}-docusaurus-build-${{ hashFiles('src/**', 'docs/**', 'blog/**', 'docusaurus.config.js', 'sidebars.js', 'package-lock.json') }}
restore-keys: |
${{ runner.os }}-docusaurus-build-
- name: Cache webpack
uses: actions/cache@v3
with:
path: node_modules/.cache
key: ${{ runner.os }}-webpack-${{ hashFiles('**/package-lock.json') }}
restore-keys: |
${{ runner.os }}-webpack-
- name: Install dependencies and build site
run: |
npm ci
# Check if we have a valid cached build
if [[ "${{ steps.cache-build.outputs.cache-hit }}" == "true" ]]; then
echo "Build cache found, checking if rebuild needed..."
# Docusaurus will intelligently rebuild only what's changed
else
echo "No build cache found, performing full build..."
fi
# Run build - Docusaurus will use its internal caching
npm run build
env:
NODE_OPTIONS: "--max-old-space-size=16384"
DOCUSAURUS_URL: "https://${{ secrets.STORAGE_ACCOUNT_NAME }}.z13.web.core.windows.net"
# Add any other environment-specific build variables here
NODE_ENV: ${{ needs.determine-environment.outputs.environment }}
- name: Upload artifact for deployment
uses: actions/upload-artifact@v4
with:
name: build-output
path: build/
deploy:
runs-on: ubuntu-latest
needs: [build, determine-environment]
environment: ${{ needs.determine-environment.outputs.environment }}
steps:
- name: Download build artifact
uses: actions/download-artifact@v4
with:
name: build-output
path: build/
- name: Install azcopy
run: |
# Download and install azcopy for faster uploads
wget -O azcopy.tar.gz https://aka.ms/downloadazcopy-v10-linux
tar -xf azcopy.tar.gz --strip-components=1
sudo mv azcopy /usr/local/bin/
azcopy --version
- name: Upload to Azure Blob Storage with AzCopy
run: |
echo "Deploying to ${{ needs.determine-environment.outputs.environment }} environment"
echo "Starting high-performance sync of changed files..."
# Create SAS token for azcopy (more secure than using key directly)
end_date=$(date -u -d "30 minutes" '+%Y-%m-%dT%H:%MZ')
sas_token=$(az storage container generate-sas \
--account-name ${{ secrets.STORAGE_ACCOUNT_NAME }} \
--account-key ${{ secrets.STORAGE_ACCOUNT_KEY }} \
--name '$web' \
--permissions dlrw \
--expiry $end_date \
--output tsv)
# Use azcopy sync for incremental updates (only uploads changed files)
# This is MUCH faster for subsequent deployments
azcopy sync "./build/" \
"https://${{ secrets.STORAGE_ACCOUNT_NAME }}.blob.core.windows.net/\$web?$sas_token" \
--delete-destination=true \
--compare-hash=MD5 \
--log-level=INFO \
--cap-mbps=0 \
--block-size-mb=4
echo "Sync completed! Only changed files were uploaded."
- name: Set blob content types efficiently
run: |
echo "Setting content types for web files..."
# For 60,000 files, we should skip individual content type updates
# and rely on Azure Static Website to serve files with correct types
echo "With 60,000+ files, setting individual content types would take too long."
echo "Azure Static Website automatically serves files with appropriate content types based on file extensions."
# Alternatively, set content types during upload with azcopy
# by using a mapping file (much more efficient)
# If you absolutely need to set custom content types, consider:
# 1. Setting them during the build process
# 2. Using Azure CDN rules to set content types
# 3. Using a separate script that runs less frequently
echo "Content type handling delegated to Azure Static Website defaults."
- name: Purge CDN endpoint (if configured)
run: |
if [[ -n "${{ secrets.CDN_ENDPOINT_NAME }}" ]] && [[ -n "${{ secrets.CDN_PROFILE_NAME }}" ]] && [[ -n "${{ secrets.CDN_RESOURCE_GROUP }}" ]]; then
echo "Note: CDN purge requires Azure login. Skipping CDN purge when using storage key authentication."
echo "To use CDN purge, you'll need to use Azure AD authentication or purge CDN manually."
else
echo "CDN configuration not found, skipping CDN purge."
fi
- name: Display deployment URL
run: |
echo "🚀 Deployment complete!"
echo "Environment: ${{ needs.determine-environment.outputs.environment }}"
echo "URL: https://${{ secrets.STORAGE_ACCOUNT_NAME }}.z13.web.core.windows.net"
if [[ -n "${{ secrets.CUSTOM_DOMAIN }}" ]]; then
echo "Custom Domain: ${{ secrets.CUSTOM_DOMAIN }}"
fi