#!/usr/bin/env bash
getBlogSlug(){
echo "$1" | sed 's/^content\//\//' | sed 's/.md$/.html/' | sed -E 's/[0-9]+-//'
}
blog(){
echo "Generating Blog pages"
[ ! -f "tmp/templates/tagList.html" ] && echo "No taglist file" | tee "tmp/templates/tagList.html" > /dev/stderr
local newer=""
[ -f tmp/lastBlogUpdate ] && newer="-newer tmp/lastBlogUpdate"
find content/ -type f -name '*.md' $newer | while read file; do
newFileName=$(getBlogSlug "$file")
mkdir -p "public_html/${newFileName%/*}"
pandoc --template=templates/blog.html -f markdown -t html5 "$file" > "public_html/$newFileName"
done
touch tmp/lastBlogUpdate
}
makeIntro(){
local file="$1"
local output="tmp/intros/$file"
local rssoutput="tmp/rss/$file"
# We delete tmp each time the build script runs, so if the file exists, we have already generated one this time
[ -f "$output" ] && return 1
mkdir -p "${output%/*}"
mkdir -p "${rssoutput%/*}"
local info="$(sed -n '/---/,/---/p' "$file" | sed '/^$/,$d' | sed -n '1,/---/p' | sed '/^---$/d')"
local slug=$(getBlogSlug "$file")
local date="$(echo "$info" | yq -r .date)"
local rfc822="$(date --utc -d "$date" "+%a, %d %b %Y %H:%M:%S") UT"
local tags="$(echo "$info" | yq -r 'if ( .tags | type ) == "array" then .tags else [ .tags ] end | join("\n")' | awk '{print "
" $0 "
"}' )"
local title="$(echo "$info" | yq -r .title)"
local description="$(echo "$info" | yq -r .description)"
echo "
"
else
echo "Need to generate the taglist" > /dev/stderr
fi
}
tags(){
echo "Generating Tags"
# Loops through each blog and puts it in tag lists, although only blogs that have changed
local newer=""
[ -f tmp/lastTagUpdate ] && newer="-newer tmp/lastTagUpdate"
find content/blog/ -type f -name '*.md' -not -name 'xxx-*' $newer | while read file; do
sed -n '/---/,/---/p' "$file" | sed -n '1,/---/p' | sed '/^---$/d' | sed '/^$/,$d' | yq -r 'if ( .tags | type ) == "array" then .tags else [ .tags ] end | join("\n")' | while read tag; do
tag=$(echo "$tag" | tr ' ' '_')
# Adds the file to the tags list if it's not already in there
grep -q "$file" tmp/tag/"$tag" 2> /dev/null || echo "$file" >> tmp/tag/"$tag"
done
done
echo "Generating Taglist HTML"
html_tag_list > tmp/templates/tagList.html
# We should now have a folder with a text file for each tag containing each blog
echo "Generating Tag Index Pages"
find tmp/tag/ -type f $newer | while read tag; do
filename="$(echo $tag | sed 's/^tmp//' | tr '[A-Z]' '[a-z]').html"
tagIndex "$tag" > "public_html/$filename"
done
touch tmp/lastTagUpdate
}
card(){
echo "Generating Card"
content/card.curl > public_html/card
}
rss(){
echo "Generating RSS Feed"
lastUpdate="$(date --utc "+%a, %d %b %Y %H:%M:%S") UT"
# First loop makes all the intros needed and stores them in tmp/intros
find content/blog/ -type f -name '*.md' | grep -v 'xxx' | sort -r | head -n 20 | while read file; do
makeIntro "$file" &
done
wait
(
echo "
https://jonathanh.co.uk
$lastUpdate"
find content/blog/ -type f -name '*.md' | grep -v 'xxx' | sort -r | head -n 20 | while read file; do
grep -Eq '^draft: true' "$file" || getIntro "$file" rss
done
echo "";
) > public_html/feed.rss
}
clean(){
rm -rf tmp > /dev/null 2> /dev/null
# Don't remove the public_html folder or docker won't re-attach it
rm -rf public_html/*
}
# Make sure the folders we will need exist
mkdir -p public_html/{blog,tag}
mkdir -p tmp/{tag,templates,intros}
case "$1" in
clean) clean ;;
index) index ;;
blog) blog ;;
tags) tags ;;
card) card ;;
rss) rss ;;
all) tags && blog && index && rss && card ;;
esac