#!/bin/bash -e exec 100> $HOME/homepage.lock flock 100 src=/srv/data/tsc-cloud/homepage/hugo-page dst=/srv/http/tsc/hugo prod=/srv/data/tsc-cloud/homepage/production cmd="$SSH_ORIGINAL_COMMAND" # echo "$cmd" publishToStage= publishToProduction=y branch=develop parseCMD() { while [ $# -gt 0 ] do part="$1" shift case "$part" in stage) publishToStage=y publishToProduction= ;; --branch) branch="$1" shift ;; --debug) set -x ;; *) echo "Unknown command $part" exit 1 ;; esac done } parseCMD $cmd doPublishToStage() { rsync -ah --delete --delete-delay public/ "$dst/" } doPublishToProduction() { # This complicated two step publication is required to speed up the transfer. # The sftp-based production server does not allow to use checksums. # Hugo recreates all files with the time stamp at building. # Thus, all files would be transmitted as the time has changed. # To overcome this, the intermediate cache should have the same times as the upstream server. echo "Syncing to intermediate stage" rsync -rlpcv --delete --delete-delay public/ $prod/ echo "Syncing from intermediate stage to production web server" rclone sync --stats 3s $prod/ ionos:/ echo "Cleaning up empty directories" rclone rmdirs ionos:/ } # exit 1 cd "$src" echo "Fetching the latest git commits" git fetch echo "Switching to the latest commit on branch $branch" git reset --hard "origin/$branch" echo "Updating NPM packages" npm ci echo "Dropping old public folder" rm -rf public echo "Building the page" npm run build echo "Synchronizing files to web server" if [ -n "$publishToStage" ] then echo "Pushing to stage" time doPublishToStage fi if [ -n "$publishToProduction" ] then echo "Publishing to production server" time doPublishToProduction fi echo "Deployment done"