This site has continuously evolved since I made the first commit while procrastinating my undergrad dissertation,
commit 632cb1f0c97c07fb99b48192444397e56ea5310f
Author: Ryan Gibb <redacted>
Date: Fri Jan 22 11:27:55 2021 +0000
Initial commit
diff --git a/index.html b/index.html
new file mode 100644
index 0000000..557db03
--- /dev/null
+++ b/index.html
@@ -0,0 +1 @@
+Hello WorldI started off writing plain HTML, then switching to writing in markdown and using pandoc to convert to HTML, and gradually accumulated bash scripts and makefiles to add more functionality, such as generating an Atom feed. This became unmaintainable and at the start of 2025 I overhauled it to use the Hakyll static site generator There’s a few drafts in the git repository which I don’t want to make public yet, so I include the source code used to generate this website below. It’s quite particular to my needs – Hakyll give you a big bag of tools which you can compose in your own way – but it may be useful as a reference.
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
import Bib
import BibHakyll
import Control.Applicative ((<|>))
import Control.Monad (filterM, forM, liftM, (>=>), forM_)
import Control.Monad.IO.Class (liftIO)
import Data.Aeson
import Data.Aeson.Types (Parser)
import qualified Data.ByteString.Lazy as BSL
import Data.Char (isAlphaNum)
import qualified Data.Char as C
import Data.Either (fromRight)
import qualified Data.HashMap.Strict as HM
import qualified Data.List as L
import qualified Data.Map as M
import Data.Maybe (catMaybes, fromMaybe, isJust, listToMaybe, mapMaybe)
import Data.Monoid (mappend)
import Data.Text (Text, intercalate, isInfixOf, pack, unpack)
import qualified Data.Text as T
import Data.Time (UTCTime (UTCTime))
import Data.Time.Format (formatTime, parseTimeM)
import Data.Time.Locale.Compat (defaultTimeLocale)
import Graphics.HsExif
import Hakyll
import Numeric (showFFloat)
import System.Directory (doesFileExist)
import System.FilePath (takeBaseName, takeFileName)
import Text.Blaze.Html (toHtml, toValue, (!))
import qualified Text.Blaze.Html as ExifTag
import Text.Blaze.Html.Renderer.String (renderHtml)
import qualified Text.Blaze.Html5 as H
import qualified Text.Blaze.Html5.Attributes as A
import Text.Pandoc
import Text.Pandoc.Highlighting (pygments)
import Text.Pandoc.Lua (applyFilter)
import Data.Ord (comparing)
import Data.Time (UTCTime(UTCTime), parseTimeOrError, defaultTimeLocale) --, parseTimeM, parseTime)
indexFiles =
"static/home.org"
.||. "static/logs.org"
.||. "static/news.org"
.||. "static/index.org"
.||. "static/photos.org"
.||. "static/papers.org"
tagFiles =
"static/projects.org"
.||. "static/research.org"
.||. "static/technology.org"
.||. "static/self-hosting.org"
htmlFiles = "static/**.md" .||. "static/**.org"
postFiles = htmlFiles .&&. complement indexFiles .&&. complement tagFiles
photoFiles = "static/photos/*"
logFiles = fromRegex "static/[0-9][0-9][0-9][0-9]-[0-9][0-9]-[0-9][0-9].*"
articleFiles = postFiles .&&. complement logFiles
dateFormat :: String
dateFormat = "%e %b %Y"
feedConfiguration :: FeedConfiguration
feedConfiguration =
FeedConfiguration
{ feedTitle = "ryan.freumh.org",
feedDescription = "ryan.freumh.org",
feedAuthorName = "Ryan Gibb",
feedAuthorEmail = "ryan@freumh.org",
feedRoot = "https://ryan.freumh.org"
}
main :: IO ()
main = hakyll $ do
tags <- buildTags postFiles (fromCapture "*.html")
match tagFiles $ do
route idRoute
compile tagCompiler
tagsRules tags $ \tag pattern -> do
route idRoute
compile $ do
let title = titleCase tag
let file = "static/" ++ tag ++ ".org"
posts <- recentFirst =<< filterM isPublished =<< loadAll pattern
let ctx =
constField "title" title
`mappend` listField "posts" (postContext dateFormat dateFormat tags) (return posts)
`mappend` defaultContext
exists <- unsafeCompiler $ doesFileExist file
if exists
then do
body <- load $ fromFilePath file
makeItem (itemBody body)
>>= applyAsTemplate (indexContext posts (postContext dateFormat dateFormat tags))
>>= loadAndApplyTemplate "templates/default.html" ctx
>>= relativizeUrls
else
makeItem ""
>>= loadAndApplyTemplate "templates/tag.html" ctx
>>= loadAndApplyTemplate "templates/default.html" ctx
>>= relativizeUrls
match "static/home.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
posts <- recentFirst =<< filterM isPublished =<< loadAll articleFiles
indexCompiler posts (postContext dateFormat dateFormat tags)
match "static/logs.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
-- so that we pick up published from the title in postContext
posts <- reverse <$> loadAllSnapshots logFiles "feed"
indexCompiler posts (postContext dateFormat dateFormat tags)
match "static/news.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
posts <- recentFirst =<< filterM isPublished =<< loadAll postFiles
indexCompiler posts (postContext dateFormat dateFormat tags)
match "static/index.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
posts <- filterM isNotDraft =<< loadAll (htmlFiles .&&. complement "static/index.org")
indexCompiler posts (postContext dateFormat dateFormat tags)
match "static/photos.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
photos <- recentFirst =<< (loadAll (photoFiles .&&. hasNoVersion) :: Compiler [Item CopyFile])
photosCompiler photos
match "papers.bib" $ do
route idRoute
compile bibFileCompiler
match "static/papers.org" $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ do
(Bibs bibFile) <- loadBody "papers.bib" :: Compiler Bibs
let sortedBibs = reverse $ fmap fst $ L.sortBy (comparing snd) $ fmap (\b -> (b, bibDate b)) bibFile
let bibsCtx = listField "papers" (bibContext dateFormat) (mapM makeItem sortedBibs)
getResourceBody
>>= renderPandoc
>>= applyAsTemplate bibsCtx
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
(Bibs bibs) <- preprocess $ do
parseBibFile <$> readFile "papers.bib"
forM_ bibs $ \b ->
create [fromCapture "papers/*.bib" $ name b] $ do
route idRoute
compile $ do
bibFile <- loadBody "papers.bib" :: Compiler Bibs
makeItem b
>>= loadAndApplyTemplate "templates/bib" (bibContext dateFormat)
matchMetadata articleFiles isNotDraftMeta $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ postCompiler tags "templates/post.html"
matchMetadata logFiles isNotDraftMeta $ do
route $ staticRoute `composeRoutes` setExtension "html"
compile $ postCompiler tags "templates/log.html"
create ["atom.xml"] $ do
route idRoute
compile $ do
let feedContext = postContext dateFormat "%Y-%m-%dT%H:%M:%S%Q%Ez" tags `mappend` bodyField "content"
posts <- recentFirst =<< filterM isPublished =<< loadAllSnapshots postFiles "feed"
atomTemplate <- loadBody "templates/atom.xml"
atomItemTemplate <- loadBody "templates/atom-item.xml"
renderAtomWithTemplates atomTemplate atomItemTemplate feedConfiguration feedContext posts
create ["sitemap.xml"] $ do
route idRoute
compile $ do
posts <- loadAll htmlFiles
let sitemapCtx =
listField "posts" (urlField "loc" `mappend` postContext dateFormat dateFormat tags) (return posts)
`mappend` constField "root" "https://ryan.freumh.org"
`mappend` defaultContext
makeItem ""
>>= loadAndApplyTemplate "templates/sitemap.xml" sitemapCtx
match "404.md" $ do
route $ setExtension "html"
compile $ do
getResourceBody
>>= loadAndApplyTemplate "templates/default.html" defaultContext
matchMetadata "static/**" isNotDraftMeta $ do
route staticRoute
compile copyFileCompiler
match "static/*.css" $ do
route staticRoute
compile compressCssCompiler
match "ieee-with-url.csl" $
compile cslCompiler
match "references.bib" $
compile biblioCompiler
match "templates/*" $
compile templateBodyCompiler
staticRoute :: Routes
staticRoute = gsubRoute "static/" (const "")
indexCompiler :: [Item a] -> Context a -> Compiler (Item String)
indexCompiler posts context = do
getResourceBody
>>= transformRender
>>= applyAsTemplate (indexContext posts context)
>>= linkCompiler
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
tagCompiler :: Compiler (Item String)
tagCompiler = do
getResourceBody
>>= bibRender "ieee-with-url.csl" "references.bib"
>>= linkCompiler
>>= relativizeUrls
postCompiler :: Tags -> Identifier -> Compiler (Item String)
postCompiler tags template = do
getResourceBody
>>= saveSnapshot "body"
>>= bibRenderFeed "ieee-with-url.csl" "references.bib"
>>= loadAndApplyTemplate template (postContext dateFormat dateFormat tags)
>>= linkCompiler
>>= saveSnapshot "feed"
getResourceBody
>>= saveSnapshot "body"
>>= bibRender "ieee-with-url.csl" "references.bib"
>>= loadAndApplyTemplate template (postContext dateFormat dateFormat tags)
>>= linkCompiler
>>= loadAndApplyTemplate "templates/default.html" (postContext dateFormat dateFormat tags)
>>= relativizeUrls
linkCompiler :: Item String -> Compiler (Item String)
linkCompiler = pure . fmap (withUrls rewriteLinks)
photosCompiler :: [Item a] -> Compiler (Item String)
photosCompiler photos = do
getResourceBody
>>= renderPandoc
>>= applyAsTemplate (photosContext photos)
>>= loadAndApplyTemplate "templates/default.html" defaultContext
>>= relativizeUrls
readerOptions :: ReaderOptions
readerOptions =
def
{ readerExtensions = foldr enableExtension pandocExtensions [Ext_citations, Ext_smart]
}
writerOptions :: WriterOptions
writerOptions =
def
{ writerExtensions = enableExtension Ext_smart pandocExtensions,
writerHighlightStyle = Just pygments,
writerCiteMethod = Citeproc
}
transformRender :: Item String -> Compiler (Item String)
transformRender =
renderPandocWithTransformM defaultHakyllReaderOptions defaultHakyllWriterOptions pandocTransform
bibRender :: String -> String -> Item String -> Compiler (Item String)
bibRender cslFileName bibFileName pandoc = do
csl <- load $ fromFilePath cslFileName
bib <- load $ fromFilePath bibFileName
let transform =
withItemBody
( \(Pandoc (Meta meta) bs) ->
pure $
Pandoc
(Meta $ M.insert "link-citations" (MetaBool True) meta)
bs
)
>=> processPandocBiblios csl [bib]
>=> withItemBody pandocTransform
renderPandocItemWithTransformM readerOptions writerOptions transform pandoc
bibRenderFeed :: String -> String -> Item String -> Compiler (Item String)
bibRenderFeed cslFileName bibFileName pandoc = do
csl <- load $ fromFilePath cslFileName
bib <- load $ fromFilePath bibFileName
let transform =
withItemBody
( \(Pandoc (Meta meta) bs) ->
pure $
Pandoc
(Meta $ M.insert "link-citations" (MetaBool True) meta)
bs
)
>=> processPandocBiblios csl [bib]
>=> withItemBody pandocTransformFeed
renderPandocItemWithTransformM readerOptions writerOptions transform pandoc
pandocTransform :: Pandoc -> Compiler Pandoc
pandocTransform =
unsafeCompiler
. runIOorExplode
. ( applyFilter def [] "scripts/org-keywords.lua"
>=> applyFilter def [] "scripts/elem-ids.lua"
>=> applyFilter def [] "scripts/footnote-commas.lua"
>=> applyFilter def [] "scripts/anchor-links.lua"
)
pandocTransformFeed :: Pandoc -> Compiler Pandoc
pandocTransformFeed =
unsafeCompiler
. runIOorExplode
. ( applyFilter def [] "scripts/org-keywords.lua"
>=> applyFilter def [] "scripts/elem-ids.lua"
>=> applyFilter def [] "scripts/footnote-commas.lua"
)
indexContext :: [Item a] -> Context a -> Context String
indexContext posts itemContext =
listField "posts" itemContext (return posts)
`mappend` defaultContext
photosContext :: [Item a] -> Context String
photosContext photos =
listField "photos" photoContext (return photos)
`mappend` defaultContext
postContext :: String -> String -> Tags -> Context String
postContext titleDateFormat dateFormat tags =
field "prev" (adjacentLogField (-1) dateFormat)
`mappend` field "next" (adjacentLogField 1 dateFormat)
`mappend` dateFieldFromTitle "title" titleDateFormat
`mappend` dateField "published" dateFormat
`mappend` myDateField "updated" dateFormat
`mappend` myTagsField "tags" tags
`mappend` defaultContext
-- https://github.com/emmanueltouzery/hsexif/issues/23#issuecomment-2835135828
formatNumeric f (ExifRational num den) = f num den ""
formatNumeric f (ExifRationalList values) = go values ""
where
go [] = id
go [(n, d)] = f n d
go ((n, d) : ns) = f n d . showString ", " . go ns
formatNumeric _ value = show value
formatAsNumber :: Int -> ExifValue -> String
formatAsNumber n = formatNumeric fmt
where
fmt num den s = trim0 (fltString num den) ++ s
trim0 = reverse . dropWhile ('.' ==) . dropWhile ('0' ==) . reverse
fltString num den = showFFloat (Just n) (fromIntegral num / fromIntegral den :: Double) ""
ppExposureTime :: ExifValue -> String
ppExposureTime v@(ExifRational num den) =
let seconds = fromIntegral num / (fromIntegral den :: Double)
value
| seconds <= 0.25 && seconds > 0 = "1/" ++ show (round (1 / seconds) :: Int)
| otherwise = formatAsNumber 1 v
in T.unpack $ T.append (T.pack value) " sec."
ppExposureTime v = show v
photoContext :: Context a
photoContext =
dateField "published" dateFormat
`mappend` urlField "url"
`mappend` pathField "path"
`mappend` titleField "title"
`mappend` thumbnailField "thumb"
`mappend` videoField "video"
`mappend` exifDateField "published" dateFormat
`mappend` exifLatField "lat"
`mappend` exifLongField "lon"
`mappend` exifField "make" make show
`mappend` exifField "model" model show
`mappend` exifField "focallength" focalLength (formatAsFloatingPoint 2)
`mappend` exifField "aperture" apertureValue (formatAsFloatingPoint 2)
`mappend` exifField "exposure" exposureTime ppExposureTime
`mappend` exifField "iso" isoSpeedRatings show
`mappend` locationField "loc"
exifField :: String -> ExifTag -> (ExifValue -> String) -> Context a
exifField key tag print =
field key $ \item -> do
metadata <- exifMetadata item
case M.lookup tag metadata of
Nothing -> noResult ""
Just value -> return $ print value
exifLatField :: String -> Context a
exifLatField key =
field key $ \item -> do
metadata <- exifMetadata item
case getGpsLatitudeLongitude metadata of
Nothing -> noResult ""
Just (lat, _) -> return $ show lat
exifLongField :: String -> Context a
exifLongField key =
field key $ \item -> do
metadata <- exifMetadata item
case getGpsLatitudeLongitude metadata of
Nothing -> noResult ""
Just (_, lon) -> return $ show lon
exifDateField :: String -> String -> Context a
exifDateField key format =
field key $ \item -> do
metadata <- exifMetadata item
case getDateTimeOriginal metadata of
Nothing -> noResult ""
Just date -> return $ formatTime defaultTimeLocale format date
-- TODO don't load metadata individually for each field
exifMetadata :: Item a -> Compiler (M.Map ExifTag ExifValue)
exifMetadata item = do
let identifier = itemIdentifier item
exifData <- unsafeCompiler (parseFileExif (toFilePath identifier))
return $ fromRight M.empty exifData
data PhotoLocation = PhotoLocation
{ displayName :: T.Text,
addressMap :: HM.HashMap T.Text T.Text
}
deriving (Show)
instance FromJSON PhotoLocation where
parseJSON = withObject "PhotoLocation" $ \v ->
PhotoLocation
<$> v .: "display_name"
<*> v .: "address"
readCachedLocation :: FilePath -> IO (Either String PhotoLocation)
readCachedLocation photoPath = do
let cacheFile = "reverse-geocoding/" ++ takeFileName photoPath ++ ".json"
exists <- doesFileExist cacheFile
if not exists
then return $ Left "Cache file not found"
else eitherDecode <$> BSL.readFile cacheFile
formatLocation :: HM.HashMap T.Text T.Text -> T.Text
formatLocation m =
let country = HM.lookup "country" m
city = HM.lookup "city" m
state_district = HM.lookup "state_district" m
heirarchy
| country == Just "United States" && city == Just "New York" =
[ ["borough"],
["state"],
["country"]
]
| country == Just "United States" =
[ ["city", "town", "village", "road"],
["state"],
["country"]
]
| country == Just "United Kingdom" && city == Just "London" =
[ ["suburb"],
["city"],
["country"]
]
| country == Just "United Kingdom" && state_district == Just "Greater London" =
[ ["city"],
["state_district"],
["country"]
]
| country == Just "United Kingdom" =
[ ["city", "town", "village"],
["country"]
]
| country == Just "France" && city == Just "Paris" =
[ ["suburb"],
["city"],
["country"]
]
| country == Just "Italy" =
[ ["quarter"],
["city", "town", "village"],
["state"],
["country"]
]
| otherwise =
[ ["historic"],
["city", "state", "region", "town"],
["country"]
]
lookupFirst ks = listToMaybe $ mapMaybe (`HM.lookup` m) ks
fields = map lookupFirst heirarchy
in T.intercalate ", " (catMaybes fields)
locationField :: String -> Context a
locationField key = field key $ \item -> do
let fp = toFilePath (itemIdentifier item)
eLoc <- unsafeCompiler $ readCachedLocation fp
case eLoc of
Left _ -> noResult ""
Right loc ->
let txt = formatLocation (addressMap loc)
in if T.null txt then noResult "" else return (T.unpack txt)
myDateField :: String -> String -> Context String
myDateField name format =
field name $ \item -> do
metadata <- getMetadata (itemIdentifier item)
let date :: Maybe UTCTime
date = lookupString name metadata >>= parseTimeM True defaultTimeLocale "%Y-%m-%d"
case date of
Nothing -> noResult ""
Just date -> return $ formatTime defaultTimeLocale format date
dateFieldFromTitle :: String -> String -> Context String
dateFieldFromTitle key format =
field key $ \item ->
case dateFromTitle item of
Nothing -> noResult ""
Just date ->
return $ formatTime defaultTimeLocale format date
thumbnailField :: String -> Context a
thumbnailField key = field key $ \item -> do
mRoute <- getRoute (itemIdentifier item)
case mRoute of
Nothing -> noResult ""
Just url ->
if ".mp4" `L.isSuffixOf` url
then noResult ""
else
return $
T.unpack $
T.replace "photos/" "photos/thumb/" (T.pack url)
videoField :: String -> Context a
videoField key = field key $ \item -> do
mRoute <- getRoute (itemIdentifier item)
case mRoute of
Nothing -> noResult ""
Just url ->
if ".mp4" `L.isSuffixOf` url
then
return $
T.unpack $
T.replace "static/photos/" "photos/" (T.pack url)
else noResult ""
myTagsField :: String -> Tags -> Context String
myTagsField key tags = field key $ \item -> do
tags' <- getTags $ itemIdentifier item
if null tags'
then noResult ""
else do
links <- forM tags' $ \tag -> do
route' <- getRoute $ tagsMakeId tags tag
return $ simpleRenderLink tag route'
return $ renderHtml $ mconcat . L.intersperse ", " $ catMaybes links
renderTag :: String -> Maybe FilePath -> Maybe H.Html
renderTag _ Nothing = Nothing
renderTag tag (Just filePath) =
Just $
H.a ! A.href (toValue $ toUrl filePath) $
toHtml tag
isPublished :: Item a -> Compiler Bool
isPublished item = do
metadata <- getMetadata (itemIdentifier item)
case lookupString "published" metadata of
Just value -> return (value /= "false")
Nothing -> return (isJust (dateFromTitle item))
isNotDraft :: Item a -> Compiler Bool
isNotDraft item = do
metadata <- getMetadata (itemIdentifier item)
return $ isNotDraftMeta metadata
isNotDraftMeta :: Metadata -> Bool
isNotDraftMeta metadata = do
case lookupString "published" metadata of
Just value -> value /= "false"
Nothing -> True
dateFromTitle :: Item a -> Maybe UTCTime
dateFromTitle item =
let filePath = toFilePath (itemIdentifier item)
title = takeBaseName filePath
in parseTimeM True defaultTimeLocale "%Y-%m-%d" title
rewriteLinks :: String -> String
rewriteLinks url
| "://" `T.isInfixOf` turl = url
-- workaround https://github.com/jgm/pandoc/issues/6916
| "::" `T.isInfixOf` turl =
let (basePart, rest) = T.breakOn "::" turl
cleanedBase = replaceExts basePart
headingPart = T.drop 2 rest -- Remove the "::"
generatedId = generateId headingPart
in T.unpack $ cleanedBase <> "#" <> generatedId
| otherwise =
let (base, fragment) = T.breakOn "#" turl
processedBase = replaceExts base
in T.unpack $ processedBase <> fragment
where
turl = T.pack url
replaceExts = replaceExt ".md" ".html" . replaceExt ".org" ".html"
replaceExt :: T.Text -> T.Text -> T.Text -> T.Text
replaceExt oldExt newExt url =
let (base, fragment) = T.breakOn "#" url
cleanedBase = if "::" `T.isSuffixOf` base then T.dropEnd 2 base else base
processedBase =
if oldExt `T.isSuffixOf` cleanedBase
then T.replace oldExt newExt cleanedBase
else cleanedBase
in processedBase <> fragment
generateId :: T.Text -> T.Text
generateId heading =
let lower = T.toLower heading
spaced = T.replace (T.pack " ") (T.pack "-") lower
filtered = T.filter (\c -> isAlphaNum c || c == '-' || c == '_' || c == '.') spaced
parts = T.split (== '-') filtered
nonEmptyParts = filter (not . T.null) parts
cleaned = if null nonEmptyParts then T.pack "section" else T.intercalate (T.pack "-") nonEmptyParts
in cleaned
adjacentLogField :: Int -> String -> Item String -> Compiler String
adjacentLogField offset format item = do
posts <- loadAllSnapshots logFiles "body" :: Compiler [Item String]
let adjacent = getAdjacentLog posts item offset
case adjacent of
Nothing -> noResult ""
Just a -> do
mroute <- getRoute (itemIdentifier a)
let filePath = toFilePath (itemIdentifier item)
title = takeBaseName filePath
date = fmap (formatTime defaultTimeLocale format) (dateFromTitle a)
label = fromMaybe title date
return $ maybe "" (\r -> "<a href=\"" ++ r ++ "\">" ++ label ++ "</a>") mroute
getAdjacentLog :: [Item a] -> Item b -> Int -> Maybe (Item a)
getAdjacentLog posts current offset =
case L.elemIndex (itemIdentifier current) (map itemIdentifier posts) of
Nothing -> Nothing
Just idx ->
let newIndex = idx + offset
in if newIndex >= 0 && newIndex < length posts
then Just (posts !! newIndex)
else Nothing
titleCase :: String -> String
titleCase (x : xs) = C.toUpper x : map C.toLower xs
bibDate :: Bib -> UTCTime
bibDate b = let
latexifyPlain' = fromRight (error $ "bibDate for entry " <> Bib.name b) . latexifyPlain
date = latexifyPlain' $ fromMaybe (error $ "bibDate: no date in entry " <> Bib.name b) $ bibIndex b "date"
parsed = parseTimeOrError True defaultTimeLocale "%Y-%m-%d" date :: UTCTime
in parsedThe directory tree looks something like,
./ieee-with-url.csl
./references.bib
./scripts/anchor-links.lua
./scripts/elem-ids.lua
./scripts/footnote-commas.lua
./static/about.org
./static/articles.org
./static/home.org
./static/index.org
./static/logs.org
./static/news.org
./static/papers.org
./static/photos.org
./static/research.org
./static/keys
./static/code.css
./static/style.css
./static/favicon.ico
./static/rss.svg
./static/2023-10-09.md
./static/2023-10-16.md
./static/2023-10-23.md
./static/...
./static/fonts/...
./static/images/...
./static/papers/...
./static/photos/...
./static/resources/...
./templates/atom-item.xml
./templates/atom.xml
./templates/default.html
./templates/log.html
./templates/post-list.html
./templates/post.html
./templates/sitemap.xml
./templates/tag.html
NB this is using BibHakyll.hs and Bib.hs.