diff --git a/scripts/build-post-list.js b/scripts/build-post-list.js
index 288d7dc0c54..d6254e96f73 100644
--- a/scripts/build-post-list.js
+++ b/scripts/build-post-list.js
@@ -1,5 +1,5 @@
const { readdirSync, statSync, existsSync, readFileSync, writeFileSync } = require('fs')
-const { resolve, basename } = require('path')
+const { basename } = require('path')
const frontMatter = require('gray-matter')
const toc = require('markdown-toc')
const { slugify } = require('markdown-toc/lib/utils')
@@ -15,36 +15,36 @@ const result = {
docsTree: {}
}
const releaseNotes = []
-const basePath = 'pages'
-const postDirectories = [
- // order of these directories is important, as the blog should come before docs, to create a list of available release notes, which will later be used to release-note-link for spec docs
- [`${basePath}/blog`, '/blog'],
- [`${basePath}/docs`, '/docs'],
- [`${basePath}/about`, '/about']
-];
const addItem = (details) => {
- if(details.slug.startsWith('/docs'))
+ if (details.slug.startsWith('/docs'))
result["docs"].push(details)
- else if(details.slug.startsWith('/blog'))
+ else if (details.slug.startsWith('/blog'))
result["blog"].push(details)
- else if(details.slug.startsWith('/about'))
+ else if (details.slug.startsWith('/about'))
result["about"].push(details)
- else {}
+ else { }
}
-module.exports = async function buildPostList() {
- walkDirectories(postDirectories, result)
- const treePosts = buildNavTree(result["docs"].filter((p) => p.slug.startsWith('/docs/')))
- result["docsTree"] = treePosts
- result["docs"] = addDocButtons(result["docs"], treePosts)
- if (process.env.NODE_ENV === 'production') {
- // console.log(inspect(result, { depth: null, colors: true }))
+async function buildPostList(postDirectories, basePath, writeFilePath) {
+ try {
+ if (postDirectories.length === 0) {
+ throw new Error('Error while building post list: No post directories provided');
+ }
+ walkDirectories(postDirectories, result, basePath)
+ const treePosts = buildNavTree(result["docs"].filter((p) => p.slug.startsWith('/docs/')))
+ result["docsTree"] = treePosts
+ result["docs"] = addDocButtons(result["docs"], treePosts)
+ if (process.env.NODE_ENV === 'production') {
+ // console.log(inspect(result, { depth: null, colors: true }))
+ }
+ writeFileSync(writeFilePath, JSON.stringify(result, null, ' '))
+ } catch (error) {
+ throw new Error(`Error while building post list: ${error.message}`);
}
- writeFileSync(resolve(__dirname, '..', 'config', 'posts.json'), JSON.stringify(result, null, ' '))
}
-function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, sectionId, rootSectionId) {
+function walkDirectories(directories, result, basePath, sectionWeight = 0, sectionTitle, sectionId, rootSectionId) {
for (let dir of directories) {
let directory = dir[0]
let sectionSlug = dir[1] || ''
@@ -68,8 +68,8 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
}
details.isSection = true
if (slugElements.length > 3) {
- details.parent = slugElements[slugElements.length - 2]
- details.sectionId = slugElements[slugElements.length - 1]
+ details.parent = slugElements[slugElements.length - 2]
+ details.sectionId = slugElements[slugElements.length - 1]
}
if (!details.parent) {
details.isRootSection = true
@@ -79,7 +79,7 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
details.slug = slug
addItem(details)
const rootId = details.parent || details.rootSectionId
- walkDirectories([[fileName, slug]], result, details.weight, details.title, details.sectionId, rootId)
+ walkDirectories([[fileName, slug]], result, basePath, details.weight, details.title, details.sectionId, rootId)
} else if (file.endsWith('.mdx') && !fileName.endsWith('/_section.mdx')) {
const fileContent = readFileSync(fileName, 'utf-8')
// Passing a second argument to frontMatter disables cache. See https://github.com/asyncapi/website/issues/1057
@@ -96,18 +96,18 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
details.id = fileName
details.isIndex = fileName.endsWith('/index.mdx')
details.slug = details.isIndex ? sectionSlug : slug.replace(/\.mdx$/, '')
- if(details.slug.includes('/reference/specification/') && !details.title) {
+ if (details.slug.includes('/reference/specification/') && !details.title) {
const fileBaseName = basename(data.slug) // ex. v2.0.0 | v2.1.0-next-spec.1
const fileName = fileBaseName.split('-')[0] // v2.0.0 | v2.1.0
details.weight = specWeight--
if (fileName.startsWith('v')) {
- details.title = capitalize(fileName.slice(1))
+ details.title = capitalize(fileName.slice(1))
} else {
details.title = capitalize(fileName)
}
- if(releaseNotes.includes(details.title)){
+ if (releaseNotes.includes(details.title)) {
details.releaseNoteLink = `/blog/release-notes-${details.title}`
}
@@ -122,10 +122,10 @@ function walkDirectories(directories, result, sectionWeight = 0, sectionTitle, s
}
// To create a list of available ReleaseNotes list, which will be used to add details.releaseNoteLink attribute.
- if(file.startsWith("release-notes") && dir[1] === "/blog"){
- const fileName_without_extension = file.slice(0,-4)
+ if (file.startsWith("release-notes") && dir[1] === "/blog") {
+ const fileName_without_extension = file.slice(0, -4)
// removes the file extension. For example, release-notes-2.1.0.md -> release-notes-2.1.0
- const version = fileName_without_extension.slice(fileName_without_extension.lastIndexOf("-")+1)
+ const version = fileName_without_extension.slice(fileName_without_extension.lastIndexOf("-") + 1)
// gets the version from the name of the releaseNote .md file (from /blog). For example, version = 2.1.0 if fileName_without_extension = release-notes-2.1.0
releaseNotes.push(version)
@@ -159,3 +159,5 @@ function isDirectory(dir) {
function capitalize(text) {
return text.split(/[\s\-]/g).map(word => `${word[0].toUpperCase()}${word.substr(1)}`).join(' ')
}
+
+module.exports = { slugifyToC, buildPostList }
diff --git a/scripts/index.js b/scripts/index.js
index 33125fe7533..0fbbe394085 100644
--- a/scripts/index.js
+++ b/scripts/index.js
@@ -1,13 +1,23 @@
const { resolve } = require('path');
const fs = require('fs');
const rssFeed = require('./build-rss');
-const buildPostList = require('./build-post-list');
+const { buildPostList } = require('./build-post-list');
const buildCaseStudiesList = require('./casestudies');
const buildAdoptersList = require('./adopters');
const buildFinanceInfoList = require('./finance');
async function start() {
- await buildPostList();
+
+ const postDirectories = [
+ ['pages/blog', '/blog'],
+ ['pages/docs', '/docs'],
+ ['pages/about', '/about']
+ ];
+ const basePath = 'pages';
+ const writeFilePath = resolve(__dirname, '../config', 'posts.json');
+
+ await buildPostList(postDirectories, basePath, writeFilePath);
+
rssFeed(
'blog',
'AsyncAPI Initiative Blog RSS Feed',
diff --git a/tests/build-post-list.test.js b/tests/build-post-list.test.js
new file mode 100644
index 00000000000..75a25597df6
--- /dev/null
+++ b/tests/build-post-list.test.js
@@ -0,0 +1,192 @@
+const { existsSync, readFileSync, writeFileSync, mkdirSync, rmSync } = require('fs');
+const { resolve, join } = require('path');
+const { buildPostList, slugifyToC } = require('../scripts/build-post-list');
+
+describe('buildPostList', () => {
+ let tempDir;
+ let writeFilePath;
+ let postDirectories;
+
+ beforeEach(() => {
+ tempDir = resolve(__dirname, `test-config`);
+ writeFilePath = resolve(tempDir, 'posts.json');
+ postDirectories = [
+ [join(tempDir, 'blog'), '/blog'],
+ [join(tempDir, 'docs'), '/docs'],
+ [join(tempDir, 'about'), '/about'],
+ ];
+
+ mkdirSync(tempDir, { recursive: true });
+
+ mkdirSync(join(tempDir, 'blog'), { recursive: true });
+ writeFileSync(join(tempDir, 'blog', 'release-notes-2.1.0.mdx'), '---\ntitle: Release Notes 2.1.0\n---\nThis is a release note.');
+
+ mkdirSync(join(tempDir, 'docs'), { recursive: true });
+ writeFileSync(join(tempDir, 'docs', 'index.mdx'), '---\ntitle: Docs Home\n---\nThis is the documentation homepage.');
+
+ mkdirSync(join(tempDir, 'about'), { recursive: true });
+ writeFileSync(join(tempDir, 'about', 'index.mdx'), '---\ntitle: About Us\n---\nThis is the about page.');
+
+ mkdirSync(join(tempDir, 'docs', 'reference', 'specification'), { recursive: true });
+ });
+
+ afterEach(() => {
+ rmSync(tempDir, { recursive: true, force: true });
+ });
+
+ it('builds a post list and writes the result to a file', async () => {
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const outputExists = existsSync(writeFilePath);
+ expect(outputExists).toBe(true);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+
+ expect(output).toHaveProperty('docs');
+ expect(output).toHaveProperty('blog');
+ expect(output).toHaveProperty('about');
+ expect(output).toHaveProperty('docsTree');
+
+ const blogEntry = output.blog.find(item => item.slug === '/blog/release-notes-2.1.0');
+ expect(blogEntry).toBeDefined();
+ expect(blogEntry.title).toBe('Release Notes 2.1.0');
+ });
+
+ it('handles a directory with only section files', async () => {
+ mkdirSync(join(tempDir, 'docs', 'section1'), { recursive: true });
+ writeFileSync(join(tempDir, 'docs', 'section1', '_section.mdx'), '---\ntitle: Section 1\n---\nThis is section 1.');
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+
+ expect(output.docs.length).toBeGreaterThan(0);
+ expect(output.docs.find(item => item.title === 'Section 1')).toBeDefined();
+ });
+
+ it('handles multiple release notes correctly', async () => {
+ writeFileSync(join(tempDir, 'blog', 'release-notes-2.1.1.mdx'), '---\ntitle: Release Notes 2.1.1\n---\nThis is a release note.');
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+
+ const firstReleaseNote = output.blog.find(item => item.slug === '/blog/release-notes-2.1.0');
+ const secondReleaseNote = output.blog.find(item => item.slug === '/blog/release-notes-2.1.1');
+
+ expect(firstReleaseNote).toBeDefined();
+ expect(firstReleaseNote.title).toBe('Release Notes 2.1.0');
+
+ expect(secondReleaseNote).toBeDefined();
+ expect(secondReleaseNote.title).toBe('Release Notes 2.1.1');
+ });
+
+ it('handles errors gracefully', async () => {
+ const invalidDir = [join(tempDir, 'non-existent-dir'), '/invalid'];
+ await expect(buildPostList([invalidDir], tempDir, writeFilePath)).rejects.toThrow();
+ });
+
+ it('handles heading ids like {# myHeadingId}', () => {
+ const input = '## My Heading {#custom-id}';
+ expect(slugifyToC(input)).toBe('custom-id');
+ });
+
+ it('handles heading ids like {}', () => {
+ const input = '## My Heading {}';
+ expect(slugifyToC(input)).toBe('custom-anchor-id');
+ });
+
+ it('handles empty strings', () => {
+ expect(slugifyToC('')).toBe('');
+ });
+
+ it('does not process specification files without a title', async () => {
+ const specDir = join(tempDir, 'docs', 'reference', 'specification');
+ writeFileSync(
+ join(specDir, 'v2.1.0-no-title.mdx'),
+ '---\n---\nContent of specification without a title.'
+ );
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+ const noTitleEntry = output.docs.find(item => item.slug.includes('/reference/specification/v2.1.0-no-title'));
+
+ expect(noTitleEntry).toBeUndefined();
+ });
+
+ it('does not process specification files with "next-spec" in the filename', async () => {
+ const specDir = join(tempDir, 'docs', 'reference', 'specification');
+ writeFileSync(
+ join(specDir, 'v2.1.0-next-spec.1.mdx'),
+ '---\n---\nContent of pre-release specification v2.1.0-next-spec.1.'
+ );
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+ const nextSpecEntry = output.docs.find(item => item.slug.includes('/reference/specification/v2.1.0-next-spec.1'));
+
+ expect(nextSpecEntry).toBeUndefined();
+ });
+
+ it('does not process specification files with "explorer" in the filename', async () => {
+ const specDir = join(tempDir, 'docs', 'reference', 'specification');
+ writeFileSync(
+ join(specDir, 'explorer.mdx'),
+ '---\n---\nContent of explorer specification.'
+ );
+
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+
+ const output = JSON.parse(readFileSync(writeFilePath, 'utf-8'));
+ const explorerEntry = output.docs.find(item => item.slug.includes('/reference/specification/explorer'));
+
+ expect(explorerEntry).toBeUndefined();
+ });
+
+ it('throws an error if the directory cannot be read', async () => {
+ const invalidDir = [join(tempDir, 'non-existent-dir'), '/invalid'];
+
+ let error;
+ try {
+ await buildPostList([invalidDir], tempDir, writeFilePath);
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error).toBeDefined();
+ expect(error.message).toMatch(/Error while building post list/);
+ });
+
+
+ it('throws an error if the front matter cannot be parsed', async () => {
+ writeFileSync(join(tempDir, 'docs', 'invalid.mdx'), '---\ninvalid front matter\n---\nContent');
+
+ let error;
+ try {
+ await buildPostList(postDirectories, tempDir, writeFilePath);
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error).toBeDefined();
+ expect(error.message).toMatch(/Error while building post list/);
+ });
+
+ it('throws an error if no post directories are provided', async () => {
+
+ let error;
+
+ try {
+ await buildPostList([], tempDir, writeFilePath);
+ } catch (err) {
+ error = err;
+ }
+
+ expect(error).toBeDefined();
+ expect(error.message).toMatch(/Error while building post list/);
+ });
+
+});
diff --git a/tests/index.test.js b/tests/index.test.js
index 78e2c216958..37b124547ef 100644
--- a/tests/index.test.js
+++ b/tests/index.test.js
@@ -1,5 +1,5 @@
const rssFeed = require('../scripts/build-rss');
-const buildPostList = require('../scripts/build-post-list');
+const { buildPostList } = require('../scripts/build-post-list');
const buildCaseStudiesList = require('../scripts/casestudies');
const buildAdoptersList = require('../scripts/adopters');
const buildFinanceInfoList = require('../scripts/finance');