feat: standalone

This commit is contained in:
Aaron Yarborough 2025-05-02 18:13:07 +01:00
parent 92788a9c69
commit f2f3bcb190
67 changed files with 8066 additions and 1312 deletions

2
.env.development Normal file
View file

@ -0,0 +1,2 @@
# NEXT_PUBLIC_CONTENT_API_BASE_URL=http://localhost:8055
NEXT_PUBLIC_CONTENT_API_BASE_URL=https://cms.aaronjy.me

4
.gitignore vendored
View file

@ -38,3 +38,7 @@ next-env.d.ts
deploy-vars.sh deploy-vars.sh
node_modules node_modules
.env .env
.env.production
tmp/*
!.gitkeep

View file

@ -1,3 +1,3 @@
echo Formatting... echo Formatting...
npm run format # npm run format
echo Successfully formatted. echo Successfully formatted.

View file

@ -38,5 +38,8 @@
"liveServer.settings.multiRootWorkspaceName": "www-aaronjy-2024", "liveServer.settings.multiRootWorkspaceName": "www-aaronjy-2024",
"[css]": { "[css]": {
"editor.defaultFormatter": "vscode.css-language-features" "editor.defaultFormatter": "vscode.css-language-features"
},
"[json]": {
"editor.defaultFormatter": "vscode.json-language-features"
} }
} }

View file

@ -1,10 +0,0 @@
---
title: 'One Thousand and One Nights: A Retelling'
author: Hanan Al-Shaykh
stars: 4
readDate: 2025-04-23T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/636bfc4c-4911-4dc0-98c1-7f6ca8c3f14f'
thumbnailUrl: 'https://cdn.thestorygraph.com/2z3i3jpvmh2vc31i6ndpplbc29dy'
tags: 'fiction, fantasy, short stories'
---

View file

@ -1,10 +0,0 @@
---
title: '1984'
author: George Orwell
stars: 3
readDate: 2023-07-31T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/6ff3f487-8d37-4ac5-8190-6622d6562639'
thumbnailUrl: 'https://cdn.thestorygraph.com/v43bj24inkwioiogb5uz8nqmpnpw'
tags: 'fiction, dystopian, classics'
---

View file

@ -1,10 +0,0 @@
---
title: A Monster Calls
author: Patrick Ness
stars: 4
readDate: 2024-05-31T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/170c1204-1410-4246-babb-c80e31aebea9'
thumbnailUrl: 'https://cdn.thestorygraph.com/50qkuazqx2lidfd0hk74ltifz9nf'
tags: 'fiction, young adult'
---

View file

@ -1,11 +0,0 @@
---
title: Childhood's End
author: Arthur C. Clarke
stars: 4.5
readDate: 2025-04-08T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/3806010b-e254-472d-bd4d-fb26239c7c1a'
thumbnailUrl: 'https://cdn.thestorygraph.com/wavl2rh0he0xkxizwf1cf6oc7src'
tags: 'science fiction, classics'
---
I loved every minute of this! I especially loved the invasion trope turned on its head, and the way Clarke writes is incredible. Without giving too much away, I do think he rushed the ending parts, though - it felt like he was wrapping the story up fairly quickly towards the end. I'll probably end up coming back to this.

View file

@ -1,10 +0,0 @@
---
title: Diary of An Oxygen Thief
author: Anonymous
stars: 4
readDate: 2024-03-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/9ce581f2-d9ac-4be1-abf7-4597684bab7f'
thumbnailUrl: 'https://cdn.thestorygraph.com/l5zkpt8v2wj76ri6nkq7ismqsskl'
tags: 'romance, fiction'
---

View file

@ -1,11 +0,0 @@
---
title: 'Eleven Kinds of Loneliness '
author: Richard Yates
stars: 1.5
readDate: 2025-04-20T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/b41da262-35d9-4177-a8dc-1294d43ec7c7'
thumbnailUrl: 'https://cdn.thestorygraph.com/zvjnz56igytbt4d7qxptxqc3sh3f'
tags: 'fiction, literary, short stories'
---
I went into this hoping for an evocative collection of short stories, but the only emotion they evoked in me was boredom. I had to put this down.

View file

@ -1,10 +0,0 @@
---
title: No God But God
author: Reza Aslan
stars: 4
readDate: 2023-01-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/27cb3f11-77c6-4eca-9344-d64885e6f1c4'
thumbnailUrl: 'https://cdn.thestorygraph.com/gf3c92roqrgdbdsgphns5n111t93'
tags: 'non-fiction, religion, history'
---

View file

@ -1,10 +0,0 @@
---
title: On Tyranny
author: Timothy Snyder
stars: 4
readDate: 2025-02-28T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/e36e1a7c-90d5-4fca-92cc-20b225228db9'
thumbnailUrl: 'https://cdn.thestorygraph.com/dejgj7kypzdfvcszr2uwqrrcm8uh'
tags: 'non-fiction, politics, history'
---

View file

@ -1,10 +0,0 @@
---
title: Sex & Punishment
author: Eric Berkowitz
stars: 3
readDate: 2024-05-31T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/79ffd129-2325-45d0-826d-c6969a09e239'
thumbnailUrl: 'https://cdn.thestorygraph.com/lqtuj9d7fdj1qw1lei01yby42h9z'
tags: 'non-fiction, history'
---

View file

@ -1,11 +0,0 @@
---
title: The Alchemist
author: Paulo Coelho
stars: 2.5
readDate: 2025-04-06T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/bdbb04f8-8b6b-40e4-bf0f-32487b0289e9'
thumbnailUrl: 'https://cdn.thestorygraph.com/nwl3h8xcqglu9f3tq2bwwvq9stfa'
tags: 'fiction, classics'
---
I really wanted to like this book, given it's a classic and all, but I found it way too fluffy. The whole Personal Legend thing I just found really superficial, especially when the boy decided to drop what seemed, to me at least, to be a near-perfect life (along with the one person who helped him when he arrived) for a chance at gold in Egypt. I didn't finish it, so this whole parable could have worked itself out in a "the gold is the friends we made along the way" sort of way, but I couldn't bring myself to find out. Shame, really, because the story got me half-way there. It tailed off in the desert for me.

View file

@ -1,10 +0,0 @@
---
title: 'The Dangers of Smoking in Bed '
author: 'Mariana Enríquez '
stars: 3.5
readDate: 2024-07-31T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/0934ee12-7ca4-4137-bd44-22aa3acee43c'
thumbnailUrl: 'https://cdn.thestorygraph.com/x8q91nifuth4g021iao1mw4y6ptf'
tags: 'fiction, horror, short stories'
---

View file

@ -1,11 +0,0 @@
---
title: The Invisible Man
author: H.G. Wells
stars: 3.25
readDate: 2025-04-01T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/e50f3335-135e-404f-adb2-aeb83149f3a6'
thumbnailUrl: 'https://cdn.thestorygraph.com/wcjmylopzow042nlh921gullvg5n'
tags: 'science fiction, classics'
---
I read this straight off the back of The Time Machine (which I loved, mind you) so I forgive Wells for disappointing me a little with this one! I found myself rushing to the end, and tried desperately to find a profounder meaning than it paints on the surface. Though, I did find it explored an interesting question: if you could protect against any external consequences, would your morals hold up, or would they fall away, morphing you into something no longer human? It's essentially an exploration of the maxim 'power corrupts', but still an interesting read.

View file

@ -1,10 +0,0 @@
---
title: The Nature of Alexander
author: Mary Renault
stars: 4.5
readDate: 2023-03-31T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/bc111e8b-1b3f-466a-9efd-c3316ae4b533'
thumbnailUrl: 'https://cdn.thestorygraph.com/68zjkhp67u2bi6qh3melbyqaly06'
tags: 'non-fiction, history, biography'
---

View file

@ -1,11 +0,0 @@
---
title: The Time Machine
author: H.G. Wells
stars: 4.5
readDate: 2025-03-30T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/77fcb338-d56d-4fee-9c47-51b490cf4167'
thumbnailUrl: 'https://cdn.thestorygraph.com/ldpiatx7jwvm6t7mnrob580ytlzx'
tags: 'fiction, classics, science fiction'
---
I couldn't put this one down. I was a little apprehensive coming into it that I didn't much care for H.G. Wells; I ended up putting The War of the Worlds down half-way through, but this was totally different. Part sci-fi, party mystery, part social criticism, this book checked a lot of boxes for me. I also really like his writing style! He gives just enough to paint a vivid picture without being caught in the usual trap of describing everything in unnecessary detail. Can definitely recommend!

View file

@ -1,10 +0,0 @@
---
title: 'Wintering: The Power of Rest and Retreat in Difficult Times'
author: Katherine May
stars: 3.75
readDate: 2025-03-30T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/449d6a0c-5704-4000-a196-57f1272f70f9'
thumbnailUrl: 'https://cdn.thestorygraph.com/dbopdfc5lnrqlb8tlkqymuo5d0ak'
tags: 'non-fiction, memoir'
---

View file

@ -1,10 +0,0 @@
---
tags: 'non-fiction, classics, history'
title: A Night To Remember
author: Walter Lord
stars: 3.5
readDate: 2024-06-30T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/5192ee9e-ffb2-4c72-a7c3-8051d950d66f'
thumbnailUrl: 'https://cdn.thestorygraph.com/pdwshwni6jyc7ivp55j6tsxzngfl'
---

View file

@ -1,10 +0,0 @@
---
title: Alice's Adventures in Wonderland
author: Lewis Carroll
stars: 3
readDate: 2023-04-30T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/83b0e44a-06fe-4042-9d2d-e4f41244fb9c'
thumbnailUrl: 'https://cdn.thestorygraph.com/l83t3e6wh6tq7dqxbvrba34ee2nb'
tags: 'fiction, classics, fantasy'
---

View file

@ -1,10 +0,0 @@
---
title: Animal Farm
author: George Orwell
stars: 4
readDate: 2023-01-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/f4b706d9-4ed9-4b15-85e0-2493581de818'
thumbnailUrl: 'https://cdn.thestorygraph.com/jztibk5xvnynw7mh7hfw0orhbzuh'
tags: 'fiction, classics, dystopian'
---

View file

@ -1,10 +0,0 @@
---
title: Cities That Shaped The Ancient World
author: John Julius Norwich
stars: 3.5
readDate: 2023-02-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/20bc1ff4-56bb-4e88-a403-bc150d45f9d2'
thumbnailUrl: 'https://cdn.thestorygraph.com/i8znw2yrd6p4m76dx6rvyuyd48h2'
tags: 'non-fiction, history'
---

View file

@ -1,11 +0,0 @@
---
title: The Song of Achilles
author: Madline Miller
stars: 2.5
readDate: 2025-03-22T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/9202845d-26cd-4a85-b15f-408116117028'
thumbnailUrl: 'https://cdn.thestorygraph.com/m8cw3kb3qx4h2jl8kg0u4m3txhie'
tags: 'fiction, fantasy, romance'
---
This one really tailed off half-way through. I found the characters very one-dimensional (Patrcolus pines after Achilles, Achilles has muscles and can swing a sword fast), but the story took me at least up until the half-way mark. Weirdly I wasn't interested too much when the actual Iliad story line started to get going - maybe because I've heard it a million times before - and their essentially non-existent romance (more accurately an inch-deep obsession, I'd argue) didn't exactly inspire me to carry on reading.

View file

@ -1,10 +0,0 @@
---
title: Star Maker
author: Olaf Stapledon
stars: 4.5
readDate: 2023-03-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/c1d60727-8e24-4a1f-9f0e-974d68a934d2'
thumbnailUrl: 'https://cdn.thestorygraph.com/bln6q5k1v7ealnwss4msv0jixlhk'
tags: 'fiction, classics, science fiction'
---

View file

@ -1,10 +0,0 @@
---
title: 'Stasiland: Stories from Behind the Berlin Wall'
author: Anna Funder
stars: 4
readDate: 2023-02-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/9202845d-26cd-4a85-b15f-408116117028'
thumbnailUrl: 'https://cdn.thestorygraph.com/gphzjvwbhr8d5agrieobx0yy2xhb'
tags: 'non-fiction, history, politics'
---

View file

@ -1,10 +0,0 @@
---
title: Stray Reflections
author: Muhammad Iqbal
stars: 5
readDate: 2023-03-01T00:00:00.000Z
url: null
thumbnailUrl: null
tags: 'non-fiction, politics, philosophy'
---

View file

@ -1,10 +0,0 @@
---
title: The Marmalade Diaries
author: Ben Aitken
stars: 4.5
readDate: 2023-01-01T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/406fe719-07de-44ba-b4e7-86714f638cf9'
thumbnailUrl: 'https://cdn.thestorygraph.com/ou589mae0cxxup4cqq1mlnqdaj62'
tags: non-fiction
---

View file

@ -1,10 +0,0 @@
---
title: The Midnight Library
author: Matt Haig
stars: 4
readDate: 2024-06-30T23:00:00.000Z
url: 'https://app.thestorygraph.com/books/d9c7ed04-6148-4e01-a118-d96cba16f507'
thumbnailUrl: 'https://cdn.thestorygraph.com/b9g1h8bhrqz7qs2fagzhsixbp6xy'
tags: 'fiction, literary, science fiction'
---

View file

@ -1,11 +0,0 @@
---
title: 'To Be Taught, If Fortunate'
author: Becky Chambers
stars: 4.5
readDate: 2025-03-22T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/fca2631f-b3e2-4b9d-a2fd-4c1ec5e4d3f7'
thumbnailUrl: 'https://cdn.thestorygraph.com/8ep9zjc581zefkzfyhtizqjnz8u5'
tags: 'fiction, science fiction'
---
Really enjoyed this! It was reflective yet lighthearted. I also love this specific flavour of sci-fi, where vastly different species of aliens and worlds are thought up and articulated beautifully by the author.

View file

@ -1,11 +0,0 @@
---
title: When the Moon Hits Your Eye
author: John Scalzi
stars: 2.5
readDate: 2025-03-26T00:00:00.000Z
url: 'https://app.thestorygraph.com/books/4f3c3b02-3d2b-4765-93ac-4656133bbec5'
thumbnailUrl: 'https://cdn.thestorygraph.com/718cb49yqfu02zekeq22yl8lgm8v'
tags: 'fiction, science fiction'
---
Some interesting chapters in here exploring how governments and various public/private institutions could deal with the moon randomly turning to choose, but a lot of fluff in between them. Can't really recommend, unless you want to skip to the good bits like I did.

View file

@ -1,14 +0,0 @@
---
title: Awesome kitties
date: 2019-03-17T19:31:20.591Z
cats:
- description: 'Maru is a Scottish Fold from Japan, and he loves boxes.'
name: Maru (まる)
- description: Lil Bub is an American celebrity cat known for her unique appearance.
name: Lil Bub
- description: 'Grumpy cat is an American celebrity cat known for her grumpy appearance.'
name: Grumpy cat (Tardar Sauce)
---
Welcome to my awesome page about cats of the internet.
This page is built with NextJS, and content is managed in Decap CMS

View file

@ -1,150 +0,0 @@
competencies:
- Software Development
- Software Architecture
- UI/UX Design
- Full-Stack Development
- Team Leadership
- Recruitment and Onboarding
- Web Application Development
- Cloud Hosting (AWS, GCP)
- Database Management
- User Interface Design
- Project Management
education:
- 10 GCSEs @ Duchess Community High School
certifications:
- Sitecore Professional Developer Certification (Sitecore 8.2) - Aug. 2017
languages:
- name: English
proficiency: Native
- name: German
proficiency: Professional Working Proficiency
- name: Arabic (Levantine)
proficiency: Elementary
experience:
- position: Lead Consultant
employer: Hippo
start: Feb. 2024
end: Present
desc: >-
* Directly line manage 3 other engineers
* Worked as the Technical Lead on a UK public sector project for the Department for Education (DfE)
* Directly managed a 7-man, cross-functional technical team including engineers, testers and technical architects
* Planned and implemented a migration from a 10+ repo microservice architecture to a monolothic monorepo, making it easier to develop and deploy changes
* Cut deployment times from 1-2 days to 20 minutes on average
* Designed and documented architectural changes in line with GDS architectural and technology standards
skills: "ASP.NET Core · Microsoft Azure · Azure Data Factory · Git · C# · Full-Stack Development · Umbraco · Web Development · Microsoft SQL Server · Cloud Development · Microservices · Technical Requirements · Agile Methodologies"
- position: Software Development Tutor
employer: Yarbz Digital Ltd
start: Sep. 2023
end: Mar. 2024
desc: I taught students of all levels modern software development, including
coding fundamentals, computer science theory and modern software
technologies.
- position: Freelance Software Consultant
employer: Yarbz Digital Ltd
start: Aug. 2021
end: Mar. 2024
desc: >-
* Designed the architecture for and developed the MVP for a recruitment platform and accompanying browser extension, which was used to pull in and process 1,000+ user profiles, proving its feasibility
* Developed Fifty Five and Five's flagship website (fiftyfiveandfive.com), improving their Google PageSpeed Insights score to near-100s across the board (Performance, Accessibility, Best Practices and SEO)
* Built front-end of sportank.com, a social network for American Football enthusiasts and athletes
* Designed and built a proof-of-concept gRPC web client, proving real-time voice input streaming from web browsers to medical speech recognition software.
* Maintained and improved the Integra Planner event management platform used by thousands of people for multi-day event
skills: "ASP.NET · Amazon Elastic Container Registry (ECR) · Angular · TypeScript · Amazon ECS · Python (Programming Language) · Node.js · ASP.NET MVC · Content Management Systems (CMS) · Amazon Web Services (AWS) · Next.js · Microsoft Azure · Git · React.js · C# · Full-Stack Development · Umbraco · WordPress · Web Development · JavaScript · Front-End Development · MySQL · Microsoft SQL Server · Cloud Development · PHP · NoSQL · AWS Lambda · HTML · Microservices · Technical Requirements · Firebase · ASP.NET Core · Agile Methodologies · Google Cloud Platform (GCP) · MongoDB · User Interface Programming"
- position: Contract Software Engineer
employer: The Data Shed
start: Jan. 2023
end: Aug. 2023
desc: >-
* Designed and built the front-end of a bespoke fund recovery system, which allowed more than 100,000 customers affected to recover funds lost by loan mis-selling
skills: "Amazon Elastic Container Registry (ECR) · TypeScript · Amazon ECS · Tailwind CSS · Node.js · Amazon Web Services (AWS) · Next.js · React.js · docker · Front-End Development · NoSQL · AWS Lambda · HTML · Agile Methodologies · User Interface Programming"
- position: Software Architect
employer: T101
start: Feb. 2020
end: Jul. 2021
desc: >-
* Lead on the complete platform re-architecture and development for Recon, UK-based dating app with 200,000 monthly active users
* Directly managed a team of 5 software engineers
* Architected and lead on the implementation of key features, including authentication, authorisation, instant messaging, user profiles, galleries, and geocoding
skills: "ASP.NET · Angular · TypeScript · Amazon ECS · ASP.NET MVC · Kubernetes · Amazon Web Services (AWS) · Git · C# · Full-Stack Development · Web Development · JavaScript · Front-End Development · MySQL · Microsoft SQL Server · Cloud Development · NoSQL · HTML · .NET Core · Microservices · Technical Requirements · ASP.NET Core · Agile Methodologies · Google Cloud Platform (GCP) · User Interface Programming"
- position: Senior Software Developer
employer: Datatrial
start: Apr. 2019
end: Feb. 2020
desc: >-
* Worked on developing new/improving existing functional modules for
Datatrial's Nucleus offering, which aims to provide a web platform for
facilitating clinical trials.
skills: "ASP.NET · TypeScript · ASP.NET MVC · Git · C# · Full-Stack Development · Web Development · JavaScript · Front-End Development · MySQL · Microsoft SQL Server · Aurelia · HTML · Agile Methodologies · User Interface Programming"
- position: Software Engineer
employer: pipdig
start: Aug 2018
end: Apr 2019
desc: >-
* Developing a mix of commercial sites and bespoke blogs, I was
responsible for the entire product life-cycle. This included requirements
gathering, development, management of the project and ultimately
delivering and maintaining the product. Responsibilities also included
improving internal software development practices and working to increase
efficiency across a wide range of small, fast-paced projects.
skills: "TypeScript · Content Management Systems (CMS) · Git · Full-Stack Development · WordPress · Web Development · JavaScript · Front-End Development · MySQL · Microsoft SQL Server · Cloud Development · PHP · HTML · Agile Methodologies · User Interface Programming"
- position: Senior Software Developer
employer: The Works
start: Apr. 2018
end: Aug. 2018
desc: >-
* I was the sole developer for an event management platform at Newcastle
University, enhancing it by developing key features.
skills: "ASP.NET · ASP.NET MVC · Content Management Systems (CMS) · Git · C# · Full-Stack Development · Web Development · JavaScript · Front-End Development · Microsoft SQL Server · HTML · Agile Methodologies · User Interface Programming · Umbraco"
- position: Software Developer
employer: Orangebus
start: Jan. 2017
end: Apr. 2018
desc: >-
* My responsibilities involved developing and managing a variety of
different projects across different industries.
skills: "ASP.NET MVC · Git · C# · Full-Stack Development · JavaScript · Front-End Development · Microsoft SQL Server · HTML · Agile Methodologies · User Interface Programming"
- position: Junior Software Developer
employer: True Potential LLP
start: Oct. 2015
end: Dec. 2016
desc: >-
* Primarily a web developer, my responsibilities included developing
interactivity on the front-end, back-end services and designing database
structures for large-scale web applications that are in-use by over 2
million clients as of November 2016.
skills: "ASP.NET MVC · Git · C# · Full-Stack Development · Web Development · JavaScript · Visual Basic .NET (VB.NET) · Front-End Development · Microsoft SQL Server · HTML · Agile Methodologies · User Interface Programming"
- position: IT Support / Trainee Software Developer
employer: Innovation Property (UK)
start: Jan. 2013
end: Sep. 2015
desc: >-
* I worked as an IT Support Technician and Developer Trainee. My
responsibilities included dealing with IT issues via an IT help desk
system. I also worked on improvements to internally-developed software
that was used by our Arboricultural staff. I also provided updates to an
internal MVC application used by office staff to log data, arrange
appointments for external staff and contact clients.
skills: "ASP.NET MVC · Git · C# · Full-Stack Development · JavaScript · Front-End Development · Microsoft SQL Server · HTML · Agile Methodologies · User Interface Programming"

View file

@ -1,17 +0,0 @@
---
title: Attitudes to reading, and how mine have changed
pubdate: 2025-03-18T00:00:00.000Z
desc: I was discussing reading habits with my good friend Beth, specifically around reading multiple books at once vs. reading a single book at a time...
tags:
- reading
- books
---
I was discussing reading habits with my good friend Beth, specifically around reading multiple books at once vs. reading a single book at a time (we're both very members of the former group), and it got me thinking as to why there seems to me to be this split in reading habits, and where it might come from.
My own reading habits have changed throughout the course of my life. As a child, I used to read and re-read space and dinosaur encyclopaedias, magazines, manga, and all sorts of written media. This tailed off, however, in my teenage years until it seemed the only time I had a book in my hands was in a classroom. For me, I believe this tail-off was largely due to the kinds of literature we were made to read in school: Twilight by Stephenie Meyer, a book I wouldn't extinguish were I to find it alight today; assortments of poetry of varying qualities, though specifically to study literary devices (Iambic Pentameter be damned); An Inspector Calls, which arguably isn't the worst introduction to screenplays, though a character's Suicide By Bleach was sadly my only takeaway (that, and the definition of metrosexual) as I remember precious little else.
Suffice to say, the material the English education system put in front of me sadly did more to dissuade than it did to inspire me to read. I didn't start reading again regularly until I picked up a copy of Mary Renault's 'The Nature of Alexander' off the back of a podcast recommendation, and I haven't stopped since. History books were my way of reconnecting with the joy of reading that died in me in my adolescence, and now - happily - I read anything and everything I can. From history to philosophy, science fiction to theology, I maintain a modest yet eclectic collection of books (both physical and digital), and prefer to read a handful of them at a time!
School offers us a very specific and uninspiring idea of what reading should be: books are a means to an end, must be scrutinised in painful detail, and must be read from cover-to-cover. My 27-year-old post-educational view stands in complete contrast: I read to enjoy and pass the time, scrutinise or not (depending on many factors: my mood, the type of book, the subject matter, ...), and I rarely read a book from cover to cover.
All of this isn't to say that the sole reason for England's [falling literacy levels](https://literacytrust.org.uk/parents-and-families/adult-literacy/what-do-adult-literacy-levels-mean/) (in 2016, 16.4% were functionally illiterate, meanwhile rising to 18% since 2024) are purely down to what schools are offering up to read, but I do wonder whether it would've taken me so long to pick a book up again (or whether I'd have stopped reading at all) had I been tasked with reading something I really wanted to read, rather than some dire, overhyped Wattpad cringefest (Stephenie Meyer, get to fuck)

View file

@ -1,125 +0,0 @@
---
title: Migrating from GitHub to Forgejo
pubdate: 2025-03-16T00:00:00.000Z
desc: I recently moved all of my reposfrom GitHub to a self-hosted Forgejo instance running on my own servers.
tags:
- tech
- hosting
---
I recently moved all of my repos (public and private) from GitHub to a self-hosted [Forgejo](https://forgejo.org/) instance running on my own servers.
There were a few reasons for this:
- I believe it's important to own and control my own data
- I do not want my private repositories to be used to train LLMs
- In protest of the current (2025) USA administration, I am reducing my usage and reliance on US products and services
Hopefully this post can serve as a guide for anybody who wants to do the same!
## The setup
My overall setup looks like this:
- Ubuntu VPS in Germany, hosted with [Hetzner](https://www.hetzner.com/) (a German VPS provider)
- A Forgejo instance running on Docker
- [Caddy](https://caddyserver.com/) as a reverse proxy routing `git.aaronjy.me` to the Forgejo Docker container
## The process
The overall process was pretty simple:
1. Get a list of all of my public and private repos
2. Use a script to call Forgejo's `/api/repos/migrate` endpoint to copy the repo from GitHub to Forgejo
3. Delete the repo on GitHub
### Step 1 - Get a list of all my repos
I used the [GitHub CLI](https://cli.github.com/) for this, using the `gh repos list` command. I wanted to move my private repos across first, so I wrote two commands: one for private repos, and one for public ones. Both commands write the JSON output to a respective JSON file.
```sh
# Get all private repos
gh repo list --visibility=private --json id,name,owner,sshUrl,url --limit 200 > gh-private-repos
# Get all public repos
gh repo list --visibility=public --json id,name,owner,sshUrl,url --limit 200 > gh-public-repos
```
The output looks like this:
```json
[
{
"id": "R_kgDOOCEBIw",
"name": "kielder-commerce",
"owner": {
"id": "MDQ6VXNlcjM4NTU4MTk=",
"login": "AaronJY"
},
"sshUrl": "git@github.com:AaronJY/kielder-commerce.git",
"url": "https://github.com/AaronJY/kielder-commerce"
},
...
]
```
## Step 2 - Use Forgejo's API to migrate from GitHub
Usefully, Forgejo has a build-in endpoint for migrating GitHub repos: `/api/repos/migrate`
All I had to do was write a script that sent each repo from the JSON to the endpoint to start the migration process, and Forgejo handles the rest.
My (nodejs) script ended up looking like this:
```js
require('dotenv').config()
const fetch = require("node-fetch");
const repos = require("./github-private-repos.json"); // <- migrate your public or private repos
const forgejoAccessToken = process.env.FORGEJO_ACCESS_TOKEN; // <- You need to generate an access token on Forgejo
if (!forgejoAccessToken) {
console.error("Forgejo access token not set.");
process.exit(1);
}
for (const repo of repos) {
const reqBody = {
clone_addr: `${repo.url}.git`,
repo_name: repo.name,
private: true, // <- set to `false` if migrating public repos
service: "github",
auth_token: process.env.GITHUB_TOKEN, // <- You need to generate a GitHub access token
repo_owner: "aaron"// <- the name of your Forgejo user
};
console.log(`Migrating ${repo.name}...`);
console.log(reqBody);
fetch("https://git.aaronjy.me/api/v1/repos/migrate", {
method: "POST",
body: JSON.stringify(reqBody),
headers: {
"Authorization": "token " + forgejoAccessToken,
"Content-Type": "application/json"
}
}).then(response => {
response.json().then(data => {
console.log(data);
})
if (!response.ok) {
throw "Failed with status code: " + response.status;
}
console.log("Successfully migrated " + repo.url);
}).catch(error => {
console.error("Migrate API request failed: " + error);
});
}
```
## Step 3 - Delete repos from GitHub
Once everything was moved across (and a quick sanity check was done), I looped through all of my repos in the JSON files and called the `gh repo delete` command on the GitHub CLI.
```sh
gh repo delete https://github.com/AaronJY/kielder-commerce --yes
```
## Still to do...
I still need to route SSH traffic to Forgejo's internal SSH server to allow SSH operations with repos, rather than relying on HTTPS interactions. Caddy can't be used for this as it's an HTTP proxy only, and therefore doesn't understand the SSH protocol. It might be possible to use an experimental add-on for Caddy called [caddy-l4](https://github.com/mholt/caddy-l4) that enables layer 4 proxying (on the TCP/UDP level), though it might be easier to tweak my server's IP tables to forward traffic from a custom port to the SSH port on Foregejo's Docker container.

View file

@ -1,105 +0,0 @@
---
title: Performance considerations when writing a TCP game server in dotnet
pubdate: 2025-02-23T21:12:37.864Z
moddate: 2025-03-21T21:12:47.864Z
desc: While writing a TCP game server in dotnet for a hobby project, I learned a few ways to improve the efficiency and scalability of the server while running into some performance issues. Here's what I learned!
tags:
- tech
- programming
- dotnet
---
While writing a TCP game server in dotnet for a hobby project (check it out [here](https://github.com/AaronJY/GServer)), I learned a few ways to improve the efficiency and scalability of the server while running into some performance issues.
Here's what I learned!
## 1. Use ConcurrentDictionary to main a thread-safe record of connect clients
The [ConcurrentDictionary<TKey, TValue>](https://learn.microsoft.com/en-us/dotnet/api/system.collections.concurrent.concurrentdictionary-2?view=net-9.0) class is a thread-safe dictionary class provided by dotnet. It differs from the standard `Dictionary<TKey, TValue>` class in that it supports thread-safe read and write access by multiple threads concurrently. As my game server utilises the dotnet thread pool (through async/await), it's vital to use a thread-safe dictionary implementation to keep track of connected clients, as many threads in the thread pool will be used as more and more clients connect, meaning many threads will be reading from/writing to the dictionary I use to track connected clients.
I define my dictionary, which creates a pairing between a connected TcpClient and a `ClientState` instance I use to track client-specific state, such as the player's username, last heartbeat, etc.
```csharp
private readonly ConcurrentDictionary<TcpClient, ClientState> _clients = new();
```
When a client connects, I add them to the dictionary.
```csharp
TcpClient client = await _tcpListener.AcceptTcpClientAsync();
ClientState clientState = new(client);
_clients.TryAdd(client, clientState);
// Handle client asynchronously using the thread pool
_ = Task.Run(() => HandleClientAsync(client, clientState));
```
## 2. Use async await to utilise a thread pool to handle connections at scale
My first iteration of the game server relied on manually creating a worker thread for each connected client. While this may be find for handling a small handful of clients (100-500 perhaps), I want my game server to be as performant and scalable as possible.
The bottlenecks introduced by this approach are memory usage and CPU load:
**Memory usage**
When creating a new thread in dotnet, the OS assigns it its own memory region called the 'stack' which is used for holding thread-specific memory such as variables, execution state, and other bits. The default stack size (as configured by the OS) is usually 1MB. Using a thread per connection means allocating a 1MB per connection for each thread stack, which in practice means 1000 connections * 1MB per stack = 1GB of memory. This puts a massive bottleneck on the number of connections my server can handle!
**CPU load**
Spawning thousands of threads also introduced a CPU load bottleneck in the form of 'context switching'. The CPU can only handle so many threads simultaneoulsy, roughly equal to the number of logical cores (e.g. 4 CPU cores = 4 threads, 8 with hyper threading = 16 threads, etc.) When the number of threads exceeds the number of cores avaialble, the CPU starts to 'context switch' which essentially means it flicks through all of the running threads giving them all a chance to run. This switching requires the CPU to work, which increases CPU load which would be better used processing game server requests (rather than switching between thosands of running threads!)
```csharp
TcpClient client = await _tcpListener.AcceptTcpClientAsync(); // <-- Async accpept TCP client
ClientState clientState = new(client);
_ = _clients.TryAdd(client, clientState);
_ = HandleClientAsync(client, clientState).ContinueWith(t =>
{
if (t.Exception != null)
{
Console.WriteLine($"Error handling client: {t.Exception.InnerException?.Message}");
}
}, TaskContinuationOptions.OnlyOnFaulted);
```
## 3. Use ArrayPool for memory-efficient storage of buffers
When reading data sent by a TCP client to the server (in my case, the game client has sent a load of data to the server that I want to read), the standard approach is to create a 'buffer', which is essentially a place in-memory we reserve to store our client's data for processing.
My initial approach was to create a new buffer of type `byte[]` and store the data in there. While this may not be a problem for lower-traffic game servers, I want my game server to be as performant as possible! The downside of this approach is that we allocate a new place in memory every time we process data sent b the client, which means:
1. We have to reserve a new chunk of memory every time a client sends us data, and...
2. The garbage collector has to dispose of each buffer (i.e. free up that memory) every time it finishes processing said data
A great way to optimise this approach is by using dotnet's `ArrayPool<T>`, which is a dotnet-manged pool of arrays of any given type. This way, we ask dotnet for one of its arrays every time we want to store client data in a buffer for processing, and we simply release it (i.e. give it back to the pool) when we're done. Because dotnet manages this pool--the memory is already allocated and managed by dotnet for the arrays in its pool--we don't have to reserve and release memory for every buffer, releieving pressure on both our server's memory and CPU too, as the garbage collector has nothing to clean up!
```csharp
// Get a new array for the buffer from the pool
byte[] buffer = ArrayPool<byte>.Shared.Rent(client.ReceiveBufferSize);
try
{
while (client.Connected)
{
int bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length);
if (bytesRead == 0)
break;
await messageHandler.HandleMessageAsync(stream.Socket, new MessageMemoryStream(buffer), state);
}
}
catch (Exception e)
{
Console.WriteLine($"Error occured reading buffer: {e.Message}");
}
finally
{
// Give the array back to the pool!
ArrayPool<byte>.Shared.Return(buffer);
}
```
## Source code
You can find the source code for the game server on GitHub: https://github.com/AaronJY/GServer

View file

@ -1,10 +0,0 @@
---
title: "Quickwrite: A reflection on Wintering by Katherine May"
pubdate: 2025-03-09T00:00:00.000Z
desc: "Katherine May draws a unique parallel in her book Wintering between the coming of and living through the winter months, and those liminal times in your life when a gap opens up underfoot and swallows us whole...."
tags:
- books
- reading
---
Katherine May draws a unique parallel in her book Wintering between the coming of and living through the winter months, and those liminal times in your life when a gap opens up underfoot and swallows us whole. Its never clear when such a gap appears to steal us away, and how long exactly well spend in the liminality we fall into, and most of us claw at the walls in an attempt to scale up and out, back into the warm light of day how things used to be. Katherines book is an account of how she learned to see it more as a wave to ride than a locked room to break free from. Her ultimate message is an argument to reframe it, or to see it for what it really is; not as a bleak, timeless realm devoid of hope and light, but of a necessary state we all find ourselves in at various points in our lives, and that this realm offers its own medicines for those who care to look. These seasonal and spiritual changes Katherine refers to as Wintering aptly a verb to articulate its ephemeral nature serve much in the same way a good nights sleep does: while a deep and peaceful sleep purges metabolic waste and toxins from our brains, a wintering can offer a more spiritual cleanse; a hibernation after a hot and unrelenting summer; a mirror held up in front to break our blind and frenzied sprint towards a goal weve long forgotten; a beloved teacher from our school days with a soft voice, reassuring smile and placations to calm our nerves.

View file

@ -1,136 +0,0 @@
---
title: Deploying aaronjy.me on a Google Storage bucket
pubdate: 2024-05-01T00:00:00.000Z
desc: "Google Cloud Storage is an effective solution for hosting static sites, offering a simple and scalable way to manage web assets. A manual deployment strategy involves four key steps: backing up existing files to a backup bucket, removing sensitive files for security, uploading the latest site files from the build directory, and invalidating Googles global cache to ensure users access updated content."
tags:
- tech
- hosting
---
Google actually has [documentation](https://cloud.google.com/storage/docs/hosting-static-website) on how to deploy a static site to a storage bucket, but I wanted to talk about how I handle deployments, as Google doesn't covert that!
## Networking
This site is just a collection of static assets (HTML, JS, CSS and images) that live inside a Google Cloud Storage bucket. When you load the site, the below route is taken once your request reaches GCP.
![Route diagram showing networking path from user to destination on GCP](/img/screenshot-2024-03-13-at-11.58.55.png "Route diagram showing networking path from year to destination on GCP")
1. As you can see, you:
2. Hit a load balancer, which then
3. Directs you to a backend service, which then
4. Decides either to either a) serve content directly from the storage bucket, or
b) service it from the cache (if available)
The setup is pretty simple, and doesn't really deviate from Google's suggested setup configuration for static sites hosted from a bucket.
## Deploying
Setting up a seamless deployment strategy gets a little tricker, however. I opted to set up a manual deployment strategy, which involves calling `npm run deploy` to kick off the deployment. This in turn calls a bash script that handles the deployment.
The script consists of 4 deployment steps:
1. Backup existing bucket files to a backup bucket
2. Remove sensitive files before deploying (e.g. `admin/index.html` for Decap CMS)
3. Upload the latest files to the hosting bucket
4. Invalidate Google's cache, so users receive the latest version of the site
### Step 1 - Backing up existing files
Before we do anything, we need to back up what we have already. I created a storage bucket specifically for holding backup files for this purpose, and use the gcloud CLI to copy the live files across to the backup bucket.
```sh
BUCKET_URL="gs://aaronjy-www"
BACKUP_BUCKET_URL="gs://aaronjy-www-backup"
echo "------------------------------"
echo "BACKUP CURRENT SITE FILES"
echo "------------------------------"
TIMESTAMP=$(date +%Y-%m-%d_%H:%M:%S)
gcloud transfer jobs create $BUCKET_URL $BACKUP_BUCKET_URL/$(date +%Y-%m-%d_%H:%M:%S)/ --no-async --delete-from=source-after-transfer;
```
The backed-up files are copied into a dated folder, and the `--delete-from` flag ensures the live websites files are deleted from the hosting bucket once they've been backed up.
### Step 2 - Removing sensitive files
Because I'm using Decap CMS for content management locally, I need to manually remove the `admin/` folder where Decap lives, as I don't want that to be available on the live site.
```sh
echo "------------------------------"
echo "REMOVE SENSITIVE FILES"
echo "------------------------------"
rm -rfv ./out/admin/
```
### Step 3 - Upload files to hosting bucket
Now we come to actually uploading the new files to the live site. I take everything from the `/out` directory (where Next.js throws its build output) and upload them directly to the hosting bucket.
```sh
echo "------------------------------"
echo "UPLOADING NEW SITE FILES"
echo "------------------------------"
gcloud storage cp --recursive ./out/* $BUCKET_URL --gzip-in-flight-all
```
The `--gzip-in-flight-all` is a handy edition, as the cli will apply gzip compression locally, and Google will uncompress them before dumping them in the bucket on the other end, resulting in a lower upload size/quicker deployment time.
### Step 3 - Invalidate the global cache
As Google uses a global cache for bucket files, we must invalidate it to ensure users get the latest website version.
```sh
echo "------------------------------"
echo "INVALIDATING GLOBAL CACHE"
echo "------------------------------"
echo "WARNING: This is an async operation that can take upwards of 10 minutes depending on how fast Google Cloud CDN invalidates its cache. It does take around 10 minutes on average."
gcloud compute url-maps invalidate-cdn-cache lb-aaronjy-www --path "/*" --async
```
This can take anywhere between 7-10 minutes, so the `--async` flag has been applied because we don't need to sit and wait for it.
### Full deployment script
Here's the deployment script in full:
```sh
BUCKET_URL="gs://aaronjy-www"
BACKUP_BUCKET_URL="gs://aaronjy-www-backup"
echo "------------------------------"
echo "BACKUP CURRENT SITE FILES"
echo "------------------------------"
TIMESTAMP=$(date +%Y-%m-%d_%H:%M:%S)
gcloud transfer jobs create $BUCKET_URL $BACKUP_BUCKET_URL/$(date +%Y-%m-%d_%H:%M:%S)/ --no-async --delete-from=source-after-transfer;
echo "------------------------------"
echo "REMOVE SENSITIVE FILES"
echo "------------------------------"
rm -rfv ./out/admin/
echo "Removed all sensitive files."
echo "------------------------------"
echo "UPLOADING NEW SITE FILES"
echo "------------------------------"
gcloud storage cp --recursive ./out/* $BUCKET_URL --gzip-in-flight-all
echo "------------------------------"
echo "INVALIDATING GLOBAL CACHE"
echo "------------------------------"
echo "WARNING: This is an async operation that can take upwards of 10 minutes depending on how fast Google Cloud CDN invalidates its cache. It does take around 10 minutes on average."
gcloud compute url-maps invalidate-cdn-cache lb-aaronjy-www --path "/*" --async
echo "------------------------------"
echo "DONE!"
echo "------------------------------"
```

View file

@ -1,68 +0,0 @@
---
title: Supporting content file structure changes on a static site
pubdate: 2024-03-18T16:47:32.150Z
desc: Static site generators (SSGs) convert complex site sources into HTML, CSS, and JS, allowing flexible hosting options. While they offer benefits like speed and low costs, updating content can be challenging for non-technical users. A solution involves assigning unique identifiers to articles and creating a URL mapping file to simplify restructuring and managing content links.
tags:
- tech
---
Static site generators (SSGs) are great. They take your complex site source and distil it down to the web's native language: HTML, CSS and JS. You can host your files anywhere: in cloud-native storage buckets; on low-cost CPanel hosting; on global CDNs; your old Lenovo ThinkPad in your cupboard running an Apache server that hasn't been patched since 2008; the list goes on. Wanna go further and throw away your CMS? Cool, you can use markdown files and a text editor as your CMS.
Simplicity is great, and SSGs bring loads of benefits (speed, a great developer experience, low cost overheads), but they're not a silver bullet, and can present a bit of a learning curve to the non-technical tasked with updating your site's content.
## The problem
Say you have an SSG that uses MD files for the site's content. In order to edit the site's content, you simply edit the MD files, build the site, and upload your new HTML/JS/CSS files.
Your folder structure could look like this:
```text
site/
├─ content/
│ ├─ index.md <-- homepage
│ ├─ recipes/
│ │ ├─ pizza.md
│ │ ├─ chilli.md
├─ src/ <-- site source code
├─ public/ <-- static assets (images, videos, PDFs, etc.)
```
And your `content/index.md` looks like this:
```md
# Welcome!
This is my site! Want some fire recipes? Check [/recipes](these bad boys out)
```
Your site homepage would contain a link to the recipe listing page at `/recipes` using Markdown to generate the anchor tag.
Assuming your `content/` folder informs your site's URL structure, there must be a file at `content/recipes/index.md`, or the link would send you to a 404.
Now, imagine you want to restructure your site's link hierarchy. Maybe you want to house all of your recipes under a new URL: `www.site.com/yummy/recipes` With a small site like ours, the time required to do this would be trivial. We'd lift and shift everything under `content/recipes/` to `content/yummy/recipes`, and we'd also have to manually update the link in `content/index.md` to `/yummy/recipes`; no big deal.
But what if we had hundreds of files, and we wanted to restructure our file system in the same way? We'd potentially have hundreds of MD files that all need to be manually updated to have their links point to a new URL. For a developer, we could probably write some funky regex or do a mass find & replace to find and update the links en masse, but what if a non-technical editor wants to make the same change? They would potentially have to manually work through each MD file and update the links by hand.
## Map those URLs!
Essentially, the problem is that there's no way to uniquely identify a particlar piece of content/article *other than* its link, and the link changes based on the content's position in the file system.
The solution is to give each article a uniqiue identifier, and keep track of any previous links. Then all we need to do is tell our SSG to generate static files for both the current link and *all previous links*.
Here's the gist of it:
* **Generate unique IDs:** Instead of relying on file paths, assign a unique identifier (an "id" property) to the [frontmatter](https://dpericich.medium.com/what-is-front-matter-and-how-is-it-used-to-create-dynamic-webpages-9d8dc053b457) of each markdown file.
* **URL mapping for flexibility:** Create a central mapping file that acts like a translator, mapping each content ID to all the different URLs (slugs/paths) that can access it.
* **Say goodbye to path dependence:** When a visitor requests a URL, the system checks the url-map.json file. If it finds a matching ID, it grabs the "canonical path" (the preferred URL) and uses that to locate the actual file. This lets you access the same content through multiple URLs!
Carrying on from our previous example, this is what our mapping file might look like:
```text
{
"892c5a5c-1f77-43ce-a13a-b9d8bd02971c": [
"yummy/recipes", <-- The canonical/latest path
"recipes" <-- The previous path
]
}
```
There's some work in keeping the mapping file up-to-date, but depending on your workflow, you could automate this using Git hooks or a CI pipeline like GitHub actions or Drone.

View file

@ -1,8 +1,7 @@
/** @type {import('next').NextConfig} */ /** @type {import('next').NextConfig} */
const nextConfig = { const nextConfig = {
reactStrictMode: true, reactStrictMode: true,
output: 'export', output: 'standalone',
trailingSlash: true, // ensure pages get their own directory in output
images: { images: {
unoptimized: true unoptimized: true
} }

7495
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,10 +1,10 @@
{ {
"name": "www-aaronjy-me", "name": "www-aaronjy-me",
"version": "1.7.4.5", "version": "2.0.0.0",
"private": true, "private": true,
"type": "module", "type": "module",
"scripts": { "scripts": {
"dev": "TINA_PUBLIC_IS_LOCAL=true tinacms dev -c \"next dev\"", "dev": "next dev",
"build": "next build", "build": "next build",
"postbuild": "next-sitemap --config next-sitemap.config.cjs", "postbuild": "next-sitemap --config next-sitemap.config.cjs",
"start": "next start", "start": "next start",
@ -12,7 +12,9 @@
"format": "npx standard --fix", "format": "npx standard --fix",
"prepare": "husky", "prepare": "husky",
"test": "jest --verbose --passWithNoTests", "test": "jest --verbose --passWithNoTests",
"lint": "next lint" "lint": "next lint",
"export:books": "node ./util/books-as-json.js > ./tmp/books.json",
"export:writing": "node ./util/writing-as-json.js > ./tmp/writing.json"
}, },
"standard": { "standard": {
"ignore": [ "ignore": [
@ -21,12 +23,21 @@
}, },
"dependencies": { "dependencies": {
"@highlightjs/cdn-assets": "^11.11.1", "@highlightjs/cdn-assets": "^11.11.1",
"@mdx-js/mdx": "^3.1.0",
"@mdx-js/react": "^3.1.0",
"date-fns": "^4.1.0", "date-fns": "^4.1.0",
"highlight.js": "^11.11.0", "highlight.js": "^11.11.0",
"i": "^0.3.7",
"next": "^14.2.6", "next": "^14.2.6",
"next-mdx-remote-client": "^1.1.0",
"next-seo": "^6.5.0", "next-seo": "^6.5.0",
"node-html-parser": "^7.0.1",
"npm": "^11.3.0",
"react": "^18", "react": "^18",
"react-dom": "^18", "react-dom": "^18",
"rehype-code-titles": "^1.2.0",
"rehype-prism-plus": "^2.0.1",
"remark-gfm": "^4.0.1",
"tinacms": "^2.7.3" "tinacms": "^2.7.3"
}, },
"devDependencies": { "devDependencies": {

View file

@ -6,31 +6,31 @@ import React, { useEffect } from 'react'
import 'highlight.js/styles/atom-one-dark.css' import 'highlight.js/styles/atom-one-dark.css'
import hljs from 'highlight.js' import hljs from 'highlight.js'
function Article ({ attributes, html }) { function Article ({ title, excerpt, date_published, tags, html }) {
useEffect(() => { useEffect(() => {
hljs.highlightAll() hljs.highlightAll()
}, [attributes, html]) }, [html])
return ( return (
<> <>
<h1>{attributes.title}</h1> <h1>{title}</h1>
<article> <article>
<NextSeo <NextSeo
title={attributes.title} description={attributes.desc} openGraph={ title={title} description={excerpt} openGraph={
{ {
title: attributes.title, title,
description: attributes.desc, description: excerpt,
type: 'article', type: 'article',
article: { article: {
publishedTime: attributes.pubdate ?? null publishedTime: date_published ?? null
} }
} }
} }
/> />
<div> <div>
<Link href='./'>Back...</Link> <Link href='./'>Back...</Link>
{attributes.pubdate && <p>{formatDate(attributes.pubdate)}</p>} {date_published && <p>{formatDate(date_published)}</p>}
<div data-test='content' dangerouslySetInnerHTML={{ __html: html }} /> <div data-test='content' dangerouslySetInnerHTML={{ __html: html }} />
{attributes.tags && <p>Tags: {attributes.tags.join(', ')}</p>} {tags && <p>Tags: {tags.join(', ')}</p>}
</div> </div>
</article> </article>
</> </>

View file

@ -1,50 +0,0 @@
import { formatDate } from '@/lib/helpers'
import { NextSeo } from 'next-seo'
import Image from 'next/image'
import Link from 'next/link'
import React from 'react'
import style from './Book.module.css'
import ExternalLink from '../ExternalLink/ExternalLink'
function Book ({ attributes, html }) {
return (
<>
<h1>{attributes.title}<br /><small>by {attributes.author}</small></h1>
<Link href='./'>Back...</Link>
<article>
<NextSeo
title={attributes.title} description={attributes.desc} openGraph={
{
title: attributes.title,
description: attributes.desc,
type: 'article',
article: {
publishedTime: attributes.pubdate ?? null
}
}
}
/>
<div>
<div className={style.layout}>
{attributes.thumbnailUrl &&
<Image src={attributes.thumbnailUrl} width={250} height={580} alt='' className={style.thumbnail} />}
<div>
<div data-test='content' dangerouslySetInnerHTML={{ __html: html || '<p>(no review)</p>' }} />
<p>
<span className='bold'>Genres:</span>&nbsp;{attributes.tags}<br />
<span className='bold'>Rating:</span>&nbsp;{attributes.stars}/5<br />
<span className='bold'>Read on:</span>&nbsp;{formatDate(attributes.readDate)}
</p>
<p><ExternalLink href={attributes.url}>View on The StoryGraph</ExternalLink></p>
</div>
</div>
</div>
</article>
</>
)
}
export default Book

View file

@ -0,0 +1,56 @@
import { formatDate } from '@/lib/helpers'
import { NextSeo } from 'next-seo'
import Image from 'next/image'
import Link from 'next/link'
import React from 'react'
import style from './BookReview.module.css'
import ExternalLink from '../ExternalLink/ExternalLink'
function BookReview ({ review, html }) {
const { title, image, author, description, url, tags, rating, read_date } = review
const imageUrl = image ? `${process.env.NEXT_PUBLIC_CONTENT_API_BASE_URL}/assets/${image}` : undefined
return (
<>
<h1>{title}<br /><small>by {author}</small></h1>
<Link href='./'>Back...</Link>
<article>
<NextSeo
title={title} description={description} openGraph={
{
title,
description,
type: 'article',
article: {
publishedTime: read_date ?? null
}
}
}
/>
<div>
<div className={style.layout}>
{imageUrl &&
<Image src={imageUrl} width={250} height={580} alt='' className={style.thumbnail} />}
<div>
<div data-test='content' dangerouslySetInnerHTML={{ __html: html || '<p>(no review)</p>' }} />
<p>
{tags?.length && <>
<span className='bold'>Genres:</span>&nbsp;{tags.join(',')}<br />
</>}
<span className='bold'>Rating:</span>&nbsp;{rating}/5<br />
<span className='bold'>Read on:</span>&nbsp;{formatDate(read_date)}
</p>
<p><ExternalLink href={url}>View on The StoryGraph</ExternalLink></p>
</div>
</div>
</div>
</article>
</>
)
}
export default BookReview

View file

@ -1,15 +1,17 @@
import style from './BookListItem.module.css' import style from './BookReviewItem.module.css'
import Link from 'next/link' import Link from 'next/link'
export default function BookListItem ({ href, title, author, stars, readDate, url, thumbnailUrl, tags, review }) { export default function BookReviewItem ({ href, title, author, rating, image, tags }) {
const imageUrl = image ? `${process.env.NEXT_PUBLIC_CONTENT_API_BASE_URL}/assets/${image}` : undefined
return ( return (
<div className={style.item}> <div className={style.item}>
<Link href={href}> <Link href={href}>
<div <div
className={style.thumb} style={{ className={style.thumb} style={{
backgroundImage: `url(${thumbnailUrl ?? '/img/book-placeholder.jpg'})` backgroundImage: `url(${imageUrl ?? '/img/book-placeholder.jpg'})`
}} }}
><div className={style.rating}><Star />&nbsp;{stars}</div> ><div className={style.rating}><Star />&nbsp;{rating}</div>
</div> </div>
</Link> </Link>
<div> <div>
@ -17,7 +19,7 @@ export default function BookListItem ({ href, title, author, stars, readDate, ur
<Link href={href}>{title}</Link> <Link href={href}>{title}</Link>
</h2> </h2>
<p className={style.author}>{author}</p> <p className={style.author}>{author}</p>
<p>{tags}</p> {tags?.length && <p>{tags.join(', ')}</p>}
</div> </div>
</div> </div>
) )

View file

@ -1,6 +1,7 @@
import React from 'react' import React from 'react'
import style from './Resume.module.css' import style from './Resume.module.css'
import { markdownToHtml } from '@/services/content-service'
function Resume ({ function Resume ({
competencies, competencies,
@ -21,7 +22,7 @@ function Resume ({
<div> <div>
<h2 id='experience'>Professional experience</h2> <h2 id='experience'>Professional experience</h2>
{experience.map((exp, i) => ( {experience?.map((exp, i) => (
<div key={i}> <div key={i}>
<WorkExperience <WorkExperience
employer={exp.employer} employer={exp.employer}
@ -29,12 +30,12 @@ function Resume ({
start={exp.start} start={exp.start}
end={exp.end} end={exp.end}
> >
{exp.desc} {markdownToHtml(exp.description)}
</WorkExperience> </WorkExperience>
<details> {!!exp.skills?.length && <details>
<summary>Competencies</summary> <summary>Competencies</summary>
<>{exp.skills}</> <>{exp.skills.sort().join(', ')}</>
</details> </details>}
</div> </div>
))} ))}
@ -42,21 +43,21 @@ function Resume ({
<div className='sidebar'> <div className='sidebar'>
<h2 id='competencies'>Competencies</h2> <h2 id='competencies'>Competencies</h2>
<ul> <ul>
{competencies.sort().map((c, i) => ( {competencies?.sort((a, b) => a.name > b.name).map((c, i) => (
<li key={i}>{c}</li> <li key={i}>{c.name}</li>
))} ))}
</ul> </ul>
<h2 id='certifications'>Certifications</h2> <h2 id='certifications'>Certifications</h2>
<ul> <ul>
{certifications.sort().map((c, i) => ( {certifications?.sort((a, b) => a.name > b.name).map((c, i) => (
<li key={i}>{c}</li> <li key={i}>{c.name}</li>
))} ))}
</ul> </ul>
<h2 className='languages'>Languages</h2> <h2 className='languages'>Languages</h2>
<ul> <ul>
{languages.sort().map((c, i) => ( {languages?.sort().map((c, i) => (
<li key={i}> <li key={i}>
{c.name} - {c.proficiency} {c.name} - {c.proficiency}
</li> </li>
@ -64,7 +65,7 @@ function Resume ({
</ul> </ul>
<h2 className='education'>Education</h2> <h2 className='education'>Education</h2>
<p>{education}</p> <p>{education.name}</p>
</div> </div>
</div> </div>

View file

@ -7,9 +7,9 @@ export default function StaticContentList ({ entries, urlPrefix, max = 0 }) {
<tbody> <tbody>
{entries.map((e) => ( {entries.map((e) => (
<tr key={e.slug}> <tr key={e.slug}>
<td>{!!e.attributes.pubdate && <span>{formatDate(e.attributes.pubdate)}</span>}</td> <td>{!!e.date_published && <span>{formatDate(e.date_published)}</span>}</td>
<td> <td>
<Link href={`${urlPrefix}${e.slug}`}>{e.attributes.title}</Link> <Link href={`${urlPrefix}${e.slug}`}>{e.title}</Link>
</td> </td>
</tr> </tr>
)).slice(0, max > 0 ? max : entries.length)} )).slice(0, max > 0 ? max : entries.length)}

56
src/errors.js Normal file
View file

@ -0,0 +1,56 @@
// Posts
export class FailedFetchPostsError extends Error {
constructor (msg) {
super(`Failed to fetch posts: ${msg}`)
this.name = 'FailedFetchPostsError'
}
}
export class FailedFetchPostError extends Error {
constructor (slug, msg) {
super(`Failed to fetch post '${slug}: ${msg}`)
this.name = 'FailedFetchPostError'
}
}
// Book reviews
export class FailedFetchBookReviewsError extends Error {
constructor (msg) {
super(`Failed to fetch book reviews: ${msg}`)
this.name = 'FailedFetchBookReviewsError'
}
}
export class FailedFetchBookReviewError extends Error {
constructor (slug, msg) {
super(`Failed to fetch book review '${slug}: ${msg}`)
this.name = 'FailedFetchBookReviewError'
}
}
// Basic pages
export class FailedFetchBasicPagesError extends Error {
constructor (msg) {
super(`Failed to fetch basic pages: ${msg}`)
this.name = 'FailedFetchBasicPagesError'
}
}
export class FailedFetchBasicPageError extends Error {
constructor (slug, msg) {
super(`Failed to fetch basic page '${slug}: ${msg}`)
this.name = 'FailedFetchBasicPageError'
}
}
// CV
export class FailedFetchCVError extends Error {
constructor (msg) {
super(`Failed to fetch basic pages: ${msg}`)
this.name = 'FailedFetchCVError'
}
}

View file

@ -1,76 +0,0 @@
import fs from 'fs'
import fm from 'front-matter'
import showdown from 'showdown'
import { toSlug } from './helpers'
export function getMarkdownEntry (path) {
const fileContents = fs.readFileSync(path, {
encoding: 'utf-8'
})
const { attributes, body } = fm(fileContents)
const converter = new showdown.Converter({
tables: true,
tablesHeaderId: true
})
const html = converter.makeHtml(body)
const slug = toSlug(path.substring(path.lastIndexOf('/') + 1))
return {
attributes: {
...attributes,
pubdate: attributes.pubdate?.toUTCString() ?? null,
moddate: attributes.moddate?.toUTCString() ?? null
},
html,
slug
}
}
export function getStaticEntryPaths (contentPath) {
const entries = fs.readdirSync(contentPath, { withFileTypes: true })
const paths = entries.map((dirent) => ({
params: {
slug: toSlug(dirent.name)
}
}))
return {
fallback: false,
paths
}
}
export function getStaticEntryProps (contentPath, { params }) {
const path = `${contentPath}/${params.slug}.md`
const entry = getMarkdownEntry(path)
const { attributes } = entry
return { props: { ...entry, attributes } }
}
export function getStaticEntries (contentPath) {
const directoryItems = fs.readdirSync(contentPath, { withFileTypes: true })
return directoryItems.map((dirent) =>
getMarkdownEntry(`${dirent.path}/${dirent.name}`)
)
}
export function getContentTags (contentPath) {
const allTags = {}
const entries = getStaticEntries(contentPath)
for (const entry of entries) {
if (!entry.attributes.tags) { continue }
const tags = entry.attributes.tags
for (const tag of tags) {
allTags[tag] = !allTags[tag] ? 1 : allTags[tag] + 1
}
}
return allTags
}

View file

@ -1,9 +1,17 @@
import * as dateFns from 'date-fns' import * as dateFns from 'date-fns'
export function toSlug (input) { export function filenameToSlug (input) {
return input.substring(0, input.indexOf('.')).trim() return stringToSlug(input.substring(0, input.indexOf('.')))
} }
export function stringToSlug (str) {
return str
.trim()
.toLowerCase()
.replace(/[\W_]+/g, '-')
.replace(/^-+|-+$/g, '')
};
export function formatDate (date) { export function formatDate (date) {
return dateFns.format(Date.parse(date), 'PPP') return dateFns.format(Date.parse(date), 'PPP')
} }

34
src/lib/mdx-components.js Normal file
View file

@ -0,0 +1,34 @@
import StaticContentList from '@/components/StaticContentList/StaticContentList'
import { fetchPosts } from '@/services/content-service'
import { useEffect, useState, useTransition } from 'react'
export const mdxComponents = {
StaticContentList: ({ type, urlPrefix, max = 0 }) => {
const [items, setItems] = useState([])
const [isLoading, setLoading] = useState(true)
useEffect(function () {
switch (type) {
case 'posts':
(async function () {
const res = await fetchPosts([])
const json = await res.json()
setItems(json.data.sort((a, b) => a.date_published < b.date_published) ?? [])
setLoading(false)
})()
break
default:
throw `Could not render StaticContentList: content type ${type} not supported.`
}
}, [])
return (
<>
{isLoading && <p>Loading...</p>}
{!isLoading && <StaticContentList entries={items} urlPrefix={urlPrefix} max={max} />}
</>
)
}
}

79
src/pages/[[...path]].jsx Normal file
View file

@ -0,0 +1,79 @@
import { FailedFetchBasicPageError, FailedFetchBasicPagesError } from '@/errors'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { mdxComponents } from '@/lib/mdx-components'
import { fetchBasicPages } from '@/services/content-service'
import { MDXClient } from 'next-mdx-remote-client'
import {
serialize
} from 'next-mdx-remote-client/serialize'
import { NextSeo } from 'next-seo'
export async function getStaticPaths () {
const res = await fetchBasicPages(['path'])
if (!res.ok) {
throw new FailedFetchBasicPagesError(await res.text())
}
const pages = (await res.json()).data
const paths = {
paths: pages.map(page => ({
params: {
path: page.path?.split('/') // about/page -> [about, page]
.filter(p => !!p) ?? [] // deal with paths starting with '/'
}
})),
fallback: true // false or "blocking"
}
return paths
}
export async function getStaticProps ({ params }) {
const { path } = params
const res = await fetchBasicPages([], {
path: {
_eq: path?.join('/') ?? null
}
})
if (!res.ok) {
throw new FailedFetchBasicPageError(path, await res.text())
}
const page = (await res.json()).data.at(0)
if (!page) {
return {
notFound: true
}
}
const { content, title } = page
const mdxSource = await serialize({ source: content })
return {
props: {
title,
mdxSource
}
}
}
export default function BasicPage ({ title, mdxSource }) {
return (
<DefaultLayout>
<NextSeo
title={title} description={undefined} openGraph={
{
title,
description: undefined
}
}
/>
<div>
<MDXClient {...mdxSource} components={mdxComponents} />
</div>
</DefaultLayout>
)
}

View file

@ -1,83 +0,0 @@
import ExternalLink from '@/components/ExternalLink/ExternalLink'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { NextSeo } from 'next-seo'
const Title = 'About me'
export default function About () {
return (
<DefaultLayout>
<NextSeo
title={Title} openGraph={
{
title: Title
}
}
/>
<h1>{Title}</h1>
<h2>Where to find me</h2>
<section>
<ul>
<li>
<strong>
<ExternalLink href='https://letterboxd.com/aaronyarbz/'>
Letterboxd
</ExternalLink>
</strong>{' '}
is a social platform for film lovers to rate, review, and discover
movies, akin to &quot;Goodreads for film.&quot;
</li>
<li>
<strong>
<ExternalLink href='https://github.com/AaronJY'>
GitHub
</ExternalLink>
</strong>{' '}
is a web-based platform for version control and collaboration on
software development projects. Find out what I&apos;ve been working
on here!
</li>
<li>
<strong>
<ExternalLink href='https://www.linkedin.com/in/aaronjyarborough/'>
LinkedIn
</ExternalLink>
</strong>
, unfortunately. A social network for professionals.
</li>
</ul>
</section>
<h2>Tech I Like</h2>
<section>
<ul>
<li>
<strong>Web Development:</strong> I primarily use Node.js with TypeScript
(or JavaScript for smaller projects) alongside Next.js to build websites
and applications.
</li>
<li>
<strong>Scripting:</strong> My preferred scripting languages are Python
and JavaScript, as I&apos;m well-versed in them and they offer extensive
libraries that typically cover my needs.
</li>
<li>
<strong>API and Backend Development:</strong> For more robust API or backend
architecture, I often choose .NET Core with C# and ASP.NET. The strongly-typed
nature of C# and the structured framework of ASP.NET help maintain clean and
organised code.
</li>
<li>
<strong>Cloud Hosting:</strong> When possible, I opt for hosting on a
DigitalOcean droplet. If more extensive cloud services are required, I usually
opt for Google Cloud Platform (GCP), which I find more user-friendly than Azure
or AWS. I also self-host services on shared server hosting running Ubuntu Server, typically with Hetzner.
</li>
</ul>
</section>
</DefaultLayout>
)
}

View file

@ -1,14 +1,40 @@
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import React from 'react' import React from 'react'
import yaml from 'js-yaml'
import fs from 'fs'
import showdown from 'showdown'
import { NextSeo } from 'next-seo' import { NextSeo } from 'next-seo'
import Resume from '@/components/Resume/Resume' import Resume from '@/components/Resume/Resume'
import { fetchCV } from '@/services/content-service'
import { FailedFetchCVError } from '@/errors'
export const Title = 'CV' export const Title = 'CV'
function ResumePage ({ export async function getServerSideProps () {
const res = await fetchCV([])
if (!res.ok) {
throw new FailedFetchCVError(await res.text())
}
const cv = (await res.json()).data
if (!cv) {
return {
notFound: true
}
}
const { competencies, education, languages, certifications, experience } = cv
return {
props: {
competencies,
education,
languages,
certifications,
experience
}
}
}
export default function ResumePage ({
competencies, competencies,
education, education,
certifications, certifications,
@ -37,26 +63,3 @@ function ResumePage ({
</DefaultLayout> </DefaultLayout>
) )
} }
export function getStaticProps () {
const content = fs.readFileSync('./content/pages/cv.yml', {
encoding: 'utf-8'
})
const data = yaml.load(content)
const MDConverter = new showdown.Converter()
// @ts-ignore
data.experience = data.experience.map((exp) => ({
...exp,
desc: MDConverter.makeHtml(exp.desc)
}))
return {
// @ts-ignore
props: { ...data }
}
}
export default ResumePage

View file

@ -1,41 +0,0 @@
import Head from 'next/head'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import Link from 'next/link'
import StaticContentList from '@/components/StaticContentList/StaticContentList'
import { getStaticEntries } from '@/lib/content'
export const getStaticProps = () => ({
props: {
postEntries: getStaticEntries('content/writing')
}
})
export default function Home ({ postEntries }) {
return (
<DefaultLayout>
<Head>
<meta name='viewport' content='width=device-width, initial-scale=1' />
<link rel='icon' href='/favicon.ico' />
</Head>
<h1>Hello!</h1>
<section>
<p>
I&apos;m Aaron, a Brit living in Newcastle-upon-tyne, UK. I
work professionally as a Software Engineer, and study
languages, history and philosophy in my spare time.
</p>
<p>
I current work as a Lead Consultant at Hippo Digital, working on public sector project for the Department of Education. You can find out more about my work history <Link href='/cv'>on my CV</Link>.
</p>
</section>
<section>
<h2>Recent posts</h2>
<StaticContentList entries={postEntries} urlPrefix='writing/' max={5} />
</section>
</DefaultLayout>
)
}

View file

@ -1,17 +1,62 @@
import React from 'react' import React from 'react'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { getStaticEntryPaths, getStaticEntryProps } from '@/lib/content' import BookReview from '@/components/Book/BookReview'
import Book from '@/components/Book/Book' import { fetchBookReviews, markdownToHtml } from '@/services/content-service'
import { stringifyAndParse } from '@/lib/helpers' import { FailedFetchBookReviewError, FailedFetchBookReviewsError } from '@/errors'
export const getStaticPaths = () => getStaticEntryPaths('./content/books') export async function getStaticPaths () {
export const getStaticProps = (ctx) => const res = await fetchBookReviews(['slug'], {
stringifyAndParse(getStaticEntryProps('./content/books', ctx)) status: 'published'
})
export default function LibrarySingle ({ attributes, html }) { if (!res.ok) {
throw new FailedFetchBookReviewsError(await res.text())
}
const reviews = (await res.json()).data
return {
paths: reviews.map(post => ({
params: {
slug: post.slug
}
})),
fallback: true // false or "blocking"
}
}
export const getStaticProps = async ({ params }) => {
const { slug } = params
const res = await fetchBookReviews([], {
slug,
status: 'published'
})
if (!res.ok) {
throw new FailedFetchBookReviewError(slug, await res.text())
}
const review = (await res.json()).data.at(0)
if (!review) {
return {
notFound: true
}
}
const content = review.review
const html = markdownToHtml(content)
return {
props: {
review,
html
}
}
}
export default function LibrarySingle ({ review, html }) {
return ( return (
<DefaultLayout> <DefaultLayout>
<Book attributes={attributes} html={html} /> <BookReview review={review} html={html} />
</DefaultLayout> </DefaultLayout>
) )
} }

View file

@ -1,26 +1,31 @@
import BookListItem from '@/components/BookListItem/BookListItem' import BookReviewItem from '@/components/BookReviewItem/BookReviewItem'
import Grid from '@/components/Grid/Grid' import Grid from '@/components/Grid/Grid'
import { FailedFetchBookReviewsError } from '@/errors'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { getStaticEntries } from '@/lib/content'
import { stringifyAndParse } from '@/lib/helpers' import { stringifyAndParse } from '@/lib/helpers'
import { fetchBookReviews } from '@/services/content-service'
import { NextSeo } from 'next-seo' import { NextSeo } from 'next-seo'
export const Title = 'Library' export const Title = 'Library'
export const getStaticProps = () => { export async function getStaticProps () {
const bookEntries = getStaticEntries('./content/books') const res = await fetchBookReviews()
.sort((a, b) => {
return b.attributes.readDate - a.attributes.readDate if (!res.ok) {
}) throw new FailedFetchBookReviewsError(await res.text())
}
const reviews = (await res.json()).data
.sort((a, b) => new Date(b.read_date).getTime() - new Date(a.read_date).getTime())
return { return {
props: { props: {
bookEntries: stringifyAndParse(bookEntries) reviews: stringifyAndParse(reviews)
} }
} }
} }
export default function Library ({ bookEntries }) { export default function Library ({ reviews }) {
return ( return (
<DefaultLayout> <DefaultLayout>
<NextSeo <NextSeo
@ -36,8 +41,8 @@ export default function Library ({ bookEntries }) {
<section> <section>
<Grid columns={5}> <Grid columns={5}>
{bookEntries.map((entry, _) => ( {reviews.map((review, _) => (
<BookListItem key={entry.attributes.title} {...entry.attributes} href={`/library/${entry.slug}`} /> <BookReviewItem key={review.title} {...review} href={`/library/${review.slug}`} />
))} ))}
</Grid> </Grid>
</section> </section>

View file

@ -1,19 +1,33 @@
import { FailedFetchPostsError } from '@/errors'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { getContentTags, getStaticEntries } from '@/lib/content' import { fetchItems, fetchPosts, getTagsFromPosts } from '@/services/content-service'
import { NextSeo } from 'next-seo' import { NextSeo } from 'next-seo'
import Link from 'next/link' import Link from 'next/link'
import React from 'react' import React from 'react'
export const getStaticProps = () => ({ export async function getServerSideProps () {
props: { const res = await fetchPosts(['title', 'date_published', 'tags', 'slug'], {
tags: getContentTags('./content/writing'), status: 'published'
postEntries: getStaticEntries('./content/writing') })
if (!res.ok) {
throw new FailedFetchPostsError(await res.text())
} }
})
const posts = (await res.json()).data
const tags = getTagsFromPosts(posts)
return {
props: {
tags,
posts
}
}
}
export const Title = 'Tags' export const Title = 'Tags'
export default function Tags ({ tags, postEntries }) { export default function Tags ({ tags, posts }) {
return ( return (
<DefaultLayout> <DefaultLayout>
<NextSeo <NextSeo
@ -29,18 +43,18 @@ export default function Tags ({ tags, postEntries }) {
<section> <section>
{Object.keys(tags).sort().map(tag => { {Object.keys(tags).sort().map(tag => {
const posts = postEntries const tagPosts = posts
.filter(p => p.attributes.tags.includes(tag)) .filter(p => p.tags.includes(tag))
.sort((a, b) => b.attributes.title > -a.attributes.title) .sort((a, b) => b.title > -a.title)
return ( return (
<React.Fragment key={tag}> <React.Fragment key={tag}>
<h2>{tag}</h2> <h2>{tag}</h2>
<ul> <ul>
{posts.map((post, _) => { {tagPosts.map((post, _) => {
return ( return (
<li key={post.slug}> <li key={post.slug}>
<Link href={`/writing/${post.slug}`}>{post.attributes.title}</Link> <Link href={`/writing/${post.slug}`}>{post.title}</Link>
</li> </li>
) )
})} })}

View file

@ -1,15 +1,62 @@
import React from 'react' import React from 'react'
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import { getStaticEntryPaths, getStaticEntryProps } from '@/lib/content'
import Article from '@/components/Article/Article' import Article from '@/components/Article/Article'
import { fetchPosts, markdownToHtml } from '@/services/content-service'
import { FailedFetchPostError, FailedFetchPostsError } from '@/errors'
export const getStaticPaths = () => getStaticEntryPaths('./content/writing') export async function getStaticPaths () {
export const getStaticProps = (ctx) => getStaticEntryProps('./content/writing', ctx) const res = await fetchPosts(['slug'], {
status: 'published'
})
export default function WritingSingle ({ attributes, html }) { if (!res.ok) {
throw new FailedFetchPostsError(await res.text())
}
const posts = (await res.json()).data
return {
paths: posts.map(post => ({
params: {
slug: post.slug
}
})),
fallback: true // false or "blocking"
}
}
export const getStaticProps = async ({ params }) => {
const { slug } = params
const res = await fetchPosts([], {
slug,
status: 'published'
})
if (!res.ok) {
throw new FailedFetchPostError(slug, await res.text())
}
const post = (await res.json()).data.at(0)
if (!post) {
return {
notFound: true
}
}
const { content } = post
const html = markdownToHtml(content)
return {
props: {
post,
html
}
}
}
export default function WritingSingle ({ post, html }) {
return ( return (
<DefaultLayout> <DefaultLayout>
<Article attributes={attributes} html={html} /> <Article {...post} html={html} />
</DefaultLayout> </DefaultLayout>
) )
} }

View file

@ -1,21 +1,33 @@
import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout' import DefaultLayout from '@/layouts/DefaultLayout/DefaultLayout'
import React from 'react' import React from 'react'
import { getStaticEntries } from '@/lib/content'
import { NextSeo } from 'next-seo' import { NextSeo } from 'next-seo'
import StaticContentList from '@/components/StaticContentList/StaticContentList' import StaticContentList from '@/components/StaticContentList/StaticContentList'
import { fetchPosts } from '@/services/content-service'
import { FailedFetchPostsError } from '@/errors'
export const getStaticProps = () => ({ export const getServerSideProps = async () => {
props: { const res = await fetchPosts(['title', 'date_published', 'slug'], {
postEntries: getStaticEntries('./content/writing') status: 'published'
.sort((a, b) => })
new Date(b.attributes.pubdate).getTime() - new Date(a.attributes.pubdate).getTime()
) if (!res.ok) {
throw new FailedFetchPostsError(await res.text())
} }
})
const json = await res.json()
const posts = json.data
.sort((a, b) => new Date(b.date_published).getTime() - new Date(a.date_published).getTime())
return {
props: {
posts
}
}
}
export const Title = 'Writing' export const Title = 'Writing'
export default function Writing ({ postEntries }) { export default function Writing ({ posts }) {
return ( return (
<DefaultLayout> <DefaultLayout>
<NextSeo <NextSeo
@ -29,7 +41,7 @@ export default function Writing ({ postEntries }) {
<h1>{Title}</h1> <h1>{Title}</h1>
<section> <section>
<StaticContentList entries={postEntries} urlPrefix='writing/' /> <StaticContentList entries={posts} urlPrefix='writing/' />
</section> </section>
</DefaultLayout> </DefaultLayout>
) )

View file

@ -0,0 +1,58 @@
import showdown from 'showdown'
const baseUrl = process.env.NEXT_PUBLIC_CONTENT_API_BASE_URL
// @ts-ignore
export const fetchPosts = async (...args) => fetchItems('post', ...args)
export const fetchBookReviews = async (...args) => fetchItems('book_review', ...args)
export const fetchBasicPages = async (...args) => fetchItems('basic_pages', ...args)
export const fetchCV = async (...args) => fetchItems('cv', ...args)
export async function fetchItems (type, fields = undefined, filter = undefined) {
const url = new URL(`${baseUrl}/items/${type}`)
console.log(`Getting items '${type}' with fields`, fields, 'and filter', filter)
if (fields?.length) {
url.searchParams.append('fields', fields.join(','))
}
if (filter) {
url.searchParams.append(
'filter',
JSON.stringify(filter)
)
}
return await apiFetch(url.toString())
}
export function getTagsFromPosts (posts) {
const allTags = {}
for (const post of posts) {
if (!post.tags) { continue }
for (const tag of post.tags) {
allTags[tag] = !allTags[tag] ? 1 : allTags[tag] + 1
}
}
return allTags
}
export function markdownToHtml (content) {
const converter = new showdown.Converter({
tables: true,
tablesHeaderId: true
})
const html = converter.makeHtml(content)
return html
}
async function apiFetch (...args) {
console.log('API fetch:', args)
// @ts-ignore
const res = await fetch(...args)
console.log('API fetch response:', res.status, res.statusText)
return res
}

0
tmp/.gitkeep Normal file
View file

33
util/books-as-json.js Normal file
View file

@ -0,0 +1,33 @@
import { readdirSync, readFileSync } from 'fs'
import path from 'path'
import fm from 'front-matter'
const dirPath = './content/books'
const output = []
const files = readdirSync(dirPath)
for (const file of files) {
const filePath = path.join(dirPath, file)
const content = readFileSync(filePath, {
encoding: 'utf-8'
})
const { attributes, body } = fm(content, {
allowUnsafe: true
})
const entry = {
title: attributes.title,
author: attributes.author,
read_date: attributes.readDate,
rating: Math.round(attributes.stars * 2),
// "image": attributes.thumbnailUrl,
tags: attributes.tags.split(', '),
review: body,
url: attributes.url
}
output.push(entry)
}
console.log(JSON.stringify(output))

32
util/writing-as-json.js Normal file
View file

@ -0,0 +1,32 @@
import { readdirSync, readFileSync } from 'fs'
import path from 'path'
import fm from 'front-matter'
import { stringToSlug } from '../src/lib/helpers.js'
const dirPath = './content/writing'
const output = []
const files = readdirSync(dirPath)
for (const file of files) {
const filePath = path.join(dirPath, file)
const content = readFileSync(filePath, {
encoding: 'utf-8'
})
const { attributes, body } = fm(content, {
allowUnsafe: true
})
const entry = {
slug: stringToSlug(attributes.title),
title: attributes.title,
excerpt: attributes.desc,
date_published: attributes.pubdate,
tags: attributes.tags || [],
content: body
}
output.push(entry)
}
console.log(JSON.stringify(output))