How I sync my Hashnode with my Hugo blog

The Brown BoxThe Brown Box
5 min read

Hashnode provide a visual editor easy to use and create blog.
Hashnode also provide auto backup into github in markdown format.
So I gonna create blog in hashnode and sync it to my hugo blog

Step 1: make a script to convert from hashnode format to hugo format

Even they're both markdown, but both has a special properties to format the blog.
So here we have to convert mostly those properties, for the body don't need to to touch much.

Main actions:

  • Find all markdown files in a specific folder.

  • Parse the file and convert as expected format.

  • Save them to hugo content post folder

File convertPostsToHugo.js

"use strict";
const { promisify } = require("util");
const exec = promisify(require("child_process").exec);
const fs = require("fs");
const path = require("path");

let BASE_PATH = ".";

function getNewFileName(content) {
  const headLines = content.split("\n");
  for (const line of headLines) {
    if (line.startsWith("slug:")) {
      return line.substring("slug: ".length).trim() + ".md";
    }
  }
}

function convertRow(row) {
  const regex = new RegExp(`!\\[.+align="`, "i");
  if (regex.test(row)) {
    let convertedRow = row.replace(` align="right"`, "");
    convertedRow = convertedRow.replace(` align="left"`, "");
    convertedRow = convertedRow.replace(` align="center"`, "");
    return convertedRow;
  }
  return row;
}

function convertBody(body) {
  let convertedBody = "";
  const arr = body.split("\n");
  for (const row of arr) {
    const newRow = convertRow(row);
    convertedBody += newRow + "\n";
  }
  return convertedBody;
}

function convert(content) {
  const end = content.indexOf("---", 3);
  const body = content.substring(end + 3);
  const headLines = content.substring(3, end).split("\n");

  const headData = {
    title: "",
    tags: "",
    slug: "",
  };
  for (const line of headLines) {
    if (line.startsWith("title: ")) {
      headData.title = line.substring("title: ".length);
    }
    if (line.startsWith("tags: ")) {
      headData.tags = `[${line.substring("tags: ".length)}]`;
    }
    if (line.startsWith("slug: ")) {
      headData.slug = line.substring("slug: ".length);
    }
  }

  const newHead = `---
title: ${headData.title}
date: ${new Date().toISOString()}
tags: ${headData.tags}
showToc: true # do you want to show table of content ?
draft: false
description: ""
hideSummary: false
cover:
  image: "<image path/url>" # image path/url
  alt: "<alt text>" # alt text
  caption: "<text>" # display caption under cover
  relative: false # when using page bundles set this to true
  hidden: true # only hide on current single page
---`;

  const footer = headData.slug
    ? `\n\n > This post is imported from: https://thebrownbox.hashnode.dev/${headData.slug}`
    : "";

  return newHead + "\n" + convertBody(body) + "\n" + footer;
}

async function convertPost(filePath) {
  console.log(`Converting... ${filePath}`);

  const content = fs.readFileSync(filePath, "utf8");
  const newFileName = getNewFileName(content);
  if (newFileName) {
    const updatedContent = convert(content);
    const newFilePath = path.join("./content/post/", `${newFileName}`);
    console.log({ newFilePath });
    try {
      fs.writeFileSync(newFilePath, updatedContent, "utf8");
      console.log("File created/overwritten successfully");
    } catch (err) {
      console.error("Error writing to file:", err);
    }
  }

  console.log("== DONE ==");
}

async function convertPostsToHugo(parentFolderPath) {
  if (!parentFolderPath) {
    console.log("-- missing parent folder --");
    return;
  }
  BASE_PATH = parentFolderPath;
  const items = fs.readdirSync(BASE_PATH);

  let isChanged = false;

  for (const item of items) {
    const filePath = path.join(BASE_PATH, item);
    const fileStat = fs.statSync(filePath);

    if (fileStat.isFile() && item.endsWith(".md")) {
      // console.log({ filePath });
      await convertPost(filePath.toString());
      isChanged = true;
    }
  }

  console.log({ isChanged });

  if (isChanged) {
    await exec(`git config --global user.email "you@example.com"`);
    await exec(`git config --global user.name "Your Name"`);
    await exec(`git add .`);
    await exec(`git commit -m "Update post from Hashnode!"`);
    await exec(`git push`);
  }
}

convertPostsToHugo(process.argv[2]);

Step 2: Create a workflow to trigger the procedure

Currently, both hugo and hashnode back up is stored in a same repo (this make it more easy to interact with both). What we're trying to achieve here is:

  • Turn on backup in Hashnode (already did)

  • Whenever this is a push in hashnode branch copy all the files to the hugo branch

  • Then trigger the convert procedure

Some note:

  • If we commit/push in a workflow/action, if you want it to take into account (aka if you want to checkout the newest one) you have to do it in another workflow. This is why I have to create 2 workflow and trigger the 2nd from the 1st one, cause it's not gonna work if I do it in 1 workflow.

Procedure:

  • The actions is stored in hashnode branch, and trigger from it's push.

  • Move to a specific folder to handle in case we update the post (update the post quite tricky, so this is my solution)

  • Then merge hashnode to hugo

  • Trigger convert process

`move-to-hashnode-folder.yml

name: Move to hashnode folder

on:
  push:
    branches:
      - main

jobs:
  Move-To-Hashnode-Folder:
    runs-on: ubuntu-latest
    steps:
      - name: Checkout main branch
        uses: actions/checkout@v3
        with:
          ref: main

      - name: Move all files to hashnode folder
        run: |
          folder_name=$(date +"%Y-%m-%d")
          mkdir -p "hashnode/$folder_name"
          mv *.md hashnode/$folder_name/

      - name: Commit changes
        run: |
          git config user.name "cong"
          git config user.email "conghv123@gmail.com"
          git add .
          git commit -m "move files to hashnode"

      - name: Push changes
        env:
          GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
        run: |
          git push origin main
          sleep 10

      - name: Sleep
        run: sleep 20

  trigger-merge-action:
    needs: Move-To-Hashnode-Folder
    runs-on: ubuntu-latest
    steps:
      - name: Trigger Merge action
        run: |
          curl -X POST \
            -H "Accept: application/vnd.github+json" \
            -H "Authorization: Bearer ${{ secrets.GITHUB_TOKEN }}" \
            https://api.github.com/repos/${{ github.repository }}/actions/workflows/merge-and-convert-post-to-hugo.yml/dispatches \
            -d '{"ref":"main"}'

merge-and-convert-post-to-hugo.yml

name: Merge and convert Post to Hugo

on:
  workflow_dispatch: # This allows the workflow to be manually triggered or triggered by another workflow

jobs:
  Merge-Main-To-Hugo:
    runs-on: ubuntu-latest
    steps:
      - uses: actions/checkout@master

      - name: Merge main -> Hugo
        uses: devmasx/merge-branch@master
        with:
          type: now
          target_branch: Hugo
          github_token: ${{ secrets.GITHUB_TOKEN }}
      - name: Sleep
        run: sleep 20

  Convert-Post:
    needs: [Merge-Main-To-Hugo]
    runs-on: ubuntu-latest
    steps:
      - name: Checkout Hugo branch
        uses: actions/checkout@v3
        with:
          ref: Hugo

      - name: Set up Node.js environment
        uses: actions/setup-node@v3
        with:
          node-version: "18"

      - name: Run convertPostsToHugo.js script
        run: |
          node convertPostsToHugo.js hashnode/$(date +"%Y-%m-%d")
0
Subscribe to my newsletter

Read articles from The Brown Box directly inside your inbox. Subscribe to the newsletter, and don't miss out.

Written by

The Brown Box
The Brown Box