Skip to content

Commit

Permalink
refactor: remove experimental import assertions (#9955)
Browse files Browse the repository at this point in the history
* refactor(build): extract getWebFeatureStatus() without import assertion

* refactor(build): extract getWebSpec() without import assertion

* chore(kumascript): remove import assertion

* fix(build): use cwd() as base for node_modules
  • Loading branch information
caugner authored Nov 7, 2023
1 parent 54eba1d commit 378ff81
Show file tree
Hide file tree
Showing 8 changed files with 156 additions and 62 deletions.
2 changes: 1 addition & 1 deletion build/blog.ts
Original file line number Diff line number Diff line change
Expand Up @@ -403,7 +403,7 @@ export async function buildPost(
postProcessSmallerHeadingIDs($);
wrapTables($);
try {
const [sections] = extractSections($);
const [sections] = await extractSections($);
doc.body = sections;
} catch (error) {
console.error(
Expand Down
33 changes: 20 additions & 13 deletions build/extract-sections.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@ import { extractSpecifications } from "./extract-specifications.js";

type SectionsAndFlaws = [Section[], string[]];

export function extractSections($: cheerio.CheerioAPI): [Section[], string[]] {
export async function extractSections(
$: cheerio.CheerioAPI
): Promise<[Section[], string[]]> {
const flaws: string[] = [];
const sections: Section[] = [];
const section = cheerio
Expand All @@ -17,13 +19,13 @@ export function extractSections($: cheerio.CheerioAPI): [Section[], string[]] {
const iterable = [...(body.childNodes as cheerio.Element[])];

let c = 0;
iterable.forEach((child) => {
for (const child of iterable) {
if (
(child as cheerio.Element).tagName === "h2" ||
(child as cheerio.Element).tagName === "h3"
) {
if (c) {
const [subSections, subFlaws] = addSections(section.clone());
const [subSections, subFlaws] = await addSections(section.clone());
sections.push(...subSections);
flaws.push(...subFlaws);
section.empty();
Expand All @@ -35,10 +37,11 @@ export function extractSections($: cheerio.CheerioAPI): [Section[], string[]] {
// That might make the DOM nodes more compact and memory efficient.
c++;
section.append(child);
});
}

if (c) {
// last straggler
const [subSections, subFlaws] = addSections(section);
const [subSections, subFlaws] = await addSections(section);
sections.push(...subSections);
flaws.push(...subFlaws);
}
Expand Down Expand Up @@ -160,7 +163,9 @@ export function extractSections($: cheerio.CheerioAPI): [Section[], string[]] {
* specifications: {....}
* }]
*/
function addSections($: cheerio.Cheerio<cheerio.Element>): SectionsAndFlaws {
async function addSections(
$: cheerio.Cheerio<cheerio.Element>
): Promise<SectionsAndFlaws> {
const flaws: string[] = [];

const countPotentialSpecialDivs = $.find("div.bc-data, div.bc-specs").length;
Expand Down Expand Up @@ -215,7 +220,7 @@ function addSections($: cheerio.Cheerio<cheerio.Element>): SectionsAndFlaws {
const iterable = [...(div.childNodes as cheerio.Element[])];
let c = 0;
let countSpecialDivsFound = 0;
iterable.forEach((child) => {
for (const child of iterable) {
if (
child.tagName === "div" &&
child.attribs &&
Expand All @@ -237,13 +242,15 @@ function addSections($: cheerio.Cheerio<cheerio.Element>): SectionsAndFlaws {
// XXX That `_addSingleSpecialSection(section.clone())` might return a
// and empty array and that means it failed and we should
// bail.
subSections.push(..._addSingleSpecialSection(section.clone()));
subSections.push(
...(await _addSingleSpecialSection(section.clone()))
);
section.empty();
} else {
section.append(child);
c++;
}
});
}
if (c) {
const [proseSections, proseFlaws] = _addSectionProse(section.clone());
subSections.push(...proseSections);
Expand All @@ -258,7 +265,7 @@ function addSections($: cheerio.Cheerio<cheerio.Element>): SectionsAndFlaws {
}
return [subSections, flaws];
}
const specialSections = _addSingleSpecialSection($);
const specialSections = await _addSingleSpecialSection($);

// The _addSingleSpecialSection() function will have sucked up the <h2> or <h3>
// and the `div.bc-data` or `div.bc-specs` to turn it into a special section.
Expand All @@ -282,9 +289,9 @@ function addSections($: cheerio.Cheerio<cheerio.Element>): SectionsAndFlaws {
return [proseSections, flaws];
}

function _addSingleSpecialSection(
async function _addSingleSpecialSection(
$: cheerio.Cheerio<cheerio.Element>
): Section[] {
): Promise<Section[]> {
let id: string | null = null;
let title: string | null = null;
let isH3 = false;
Expand Down Expand Up @@ -347,7 +354,7 @@ function _addSingleSpecialSection(
},
];
} else if (specialSectionType === "specifications") {
const specifications = extractSpecifications(query, specURLsString);
const specifications = await extractSpecifications(query, specURLsString);

return [
{
Expand Down
56 changes: 25 additions & 31 deletions build/extract-specifications.ts
Original file line number Diff line number Diff line change
@@ -1,13 +1,13 @@
import { packageBCD } from "./resolve-bcd.js";
import bcd from "@mdn/browser-compat-data/types";
import { Specification } from "../libs/types/document.js";
import specs from "web-specs/index.json" assert { type: "json" };
import web from "../kumascript/src/api/web.js";
import { getWebSpec } from "./web-specs.js";

export function extractSpecifications(
export async function extractSpecifications(
query: string | undefined,
specURLsString: string
): Specification[] {
): Promise<Specification[]> {
if (query === undefined && specURLsString === "") {
return [];
}
Expand Down Expand Up @@ -92,36 +92,30 @@ export function extractSpecifications(

// Use BCD specURLs to look up more specification data
// from the web-specs package
const specifications = specURLs
.map((specURL) => {
const spec = specs.find(
(spec) =>
specURL.startsWith(spec.url) ||
specURL.startsWith(spec.nightly.url) ||
spec.nightly.alternateUrls.some((s) => specURL.startsWith(s)) ||
// When grabbing series nightly, make sure we're grabbing the latest spec version
(spec.shortname === spec.series.currentSpecification &&
specURL.startsWith(spec.series.nightlyUrl))
);
const specificationsData = {
bcdSpecificationURL: specURL,
title: "Unknown specification",
};
if (spec) {
specificationsData.title = spec.title;
} else {
const specList = web.getJSONData("SpecData");
const titleFromSpecData = Object.keys(specList).find(
(key) => specList[key]["url"] === specURL.split("#")[0]
);
if (titleFromSpecData) {
specificationsData.title = titleFromSpecData;
const specifications = (
await Promise.all(
specURLs.map(async (specURL) => {
const spec = await getWebSpec(specURL);
const specificationsData = {
bcdSpecificationURL: specURL,
title: "Unknown specification",
};
if (spec) {
specificationsData.title = spec.title;
} else {
const specList = web.getJSONData("SpecData");
const titleFromSpecData = Object.keys(specList).find(
(key) => specList[key]["url"] === specURL.split("#")[0]
);
if (titleFromSpecData) {
specificationsData.title = titleFromSpecData;
}
}
}

return specificationsData;
})
.filter(Boolean);
return specificationsData;
})
)
).filter(Boolean);

return specifications;
}
23 changes: 8 additions & 15 deletions build/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ import path from "node:path";

import chalk from "chalk";
import * as cheerio from "cheerio";
import webFeatures from "web-features/index.json" assert { type: "json" };
import * as Sentry from "@sentry/node";

import {
Expand All @@ -12,7 +11,7 @@ import {
MacroRedirectedLinkError,
} from "../kumascript/src/errors.js";

import { Doc, WebFeature, WebFeatureStatus } from "../libs/types/document.js";
import { Doc, WebFeatureStatus } from "../libs/types/document.js";
import { Document, execGit, slugToFolder } from "../content/index.js";
import { CONTENT_ROOT, REPOSITORY_URLS } from "../libs/env/index.js";
import * as kumascript from "../kumascript/index.js";
Expand Down Expand Up @@ -44,6 +43,7 @@ import {
postProcessExternalLinks,
postProcessSmallerHeadingIDs,
} from "./utils.js";
import { getWebFeatureStatus } from "./web-features.js";
export { default as SearchIndex } from "./search-index.js";
export { gather as gatherGitHistory } from "./git-history.js";
export { buildSPAs } from "./spas.js";
Expand Down Expand Up @@ -380,7 +380,7 @@ export async function buildDocument(
browserCompat &&
(Array.isArray(browserCompat) ? browserCompat : [browserCompat]);

doc.baseline = addBaseline(doc);
doc.baseline = await addBaseline(doc);

// If the document contains <math> HTML, it will set `doc.hasMathML=true`.
// The client (<Document/> component) needs to know this for loading polyfills.
Expand Down Expand Up @@ -484,7 +484,7 @@ export async function buildDocument(
// section blocks are of type "prose" and their value is a string blob
// of HTML.
try {
const [sections, sectionFlaws] = extractSections($);
const [sections, sectionFlaws] = await extractSections($);
doc.body = sections;
if (sectionFlaws.length) {
injectSectionFlaws(doc, sectionFlaws, options);
Expand Down Expand Up @@ -558,18 +558,11 @@ export async function buildDocument(
return { doc: doc as Doc, liveSamples, fileAttachmentMap };
}

function addBaseline(doc: Partial<Doc>): WebFeatureStatus | undefined {
async function addBaseline(
doc: Partial<Doc>
): Promise<WebFeatureStatus | undefined> {
if (doc.browserCompat) {
for (const feature of Object.values<WebFeature>(webFeatures)) {
if (
feature.status &&
feature.compat_features?.some(
(query) => doc.browserCompat?.includes(query)
)
) {
return feature.status;
}
}
return await getWebFeatureStatus(...doc.browserCompat);
}
}

Expand Down
14 changes: 13 additions & 1 deletion build/utils.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
import { spawnSync } from "node:child_process";
import fs from "node:fs";
import { readFile } from "node:fs/promises";
import path from "node:path";
import { cwd } from "node:process";

import * as cheerio from "cheerio";
import got from "got";
Expand All @@ -17,7 +20,6 @@ import {
VALID_MIME_TYPES,
} from "../libs/constants/index.js";
import { FileAttachment } from "../content/index.js";
import { spawnSync } from "node:child_process";
import { BLOG_ROOT } from "../libs/env/index.js";

const { default: imageminPngquant } = imageminPngquantPkg;
Expand Down Expand Up @@ -343,3 +345,13 @@ const POST_URL_RE = /^\/en-US\/blog\/([^/]+)\/?$/;
export function getSlugByBlogPostUrl(url: string): string | null {
return url.match(POST_URL_RE)?.[1] || null;
}

export async function importJSON<T>(jsonPath: string): Promise<T> {
if (!jsonPath.startsWith(".")) {
jsonPath = path.join(cwd(), "node_modules", jsonPath);
}

const json = await readFile(jsonPath, "utf-8");

return JSON.parse(json);
}
30 changes: 30 additions & 0 deletions build/web-features.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
import { WebFeature, WebFeatureStatus } from "../libs/types/document.js";
import { importJSON } from "./utils.js";

let promise: Promise<Record<string, WebFeature>> | null = null;

export async function getWebFeatures(): Promise<Record<string, WebFeature>> {
if (!promise) {
promise = importJSON<Record<string, WebFeature>>("web-features/index.json");
}

return promise;
}

export async function getWebFeatureStatus(
...features: string[]
): Promise<WebFeatureStatus | undefined> {
if (features.length === 0) {
return;
}

const webFeatures = await getWebFeatures();
for (const feature of Object.values(webFeatures)) {
if (
feature.status &&
feature.compat_features?.some((feature) => features.includes(feature))
) {
return feature.status;
}
}
}
58 changes: 58 additions & 0 deletions build/web-specs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,58 @@
import { importJSON } from "./utils.js";

type WebSpecs = WebSpec[];
interface WebSpec {
url: string;
seriesComposition: string;
shortname: string;
series: {
shortname: string;
currentSpecification: string;
title: string;
shortTitle: string;
nightlyUrl: string;
};
nightly: {
url: string;
status: string;
sourcePath: string;
alternateUrls: string[];
repository: string;
filename: string;
};
organization: string;
groups: { name: string; url: string }[];
title: string;
source: string;
shortTitle: string;
categories: string[];
standing: string;
tests: { repository: string; testPaths: string[] };
}

let promise: Promise<WebSpecs> | null = null;

export async function getWebSpecs(): Promise<WebSpecs> {
if (!promise) {
promise = importJSON<WebSpecs>("web-specs/index.json");
}

return promise;
}

export async function getWebSpec(url: string): Promise<WebSpec | undefined> {
if (!url) {
return;
}
const specs = await getWebSpecs();

return specs.find(
(spec) =>
url.startsWith(spec.url) ||
url.startsWith(spec.nightly.url) ||
spec.nightly.alternateUrls.some((s) => url.startsWith(s)) ||
// When grabbing series nightly, make sure we're grabbing the latest spec version
(spec.shortname === spec.series.currentSpecification &&
url.startsWith(spec.series.nightlyUrl))
);
}
2 changes: 1 addition & 1 deletion kumascript/tests/fixtures/server/macros/require-used.ejs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<%
import bcd from '@mdn/browser-compat-data' assert { type: "json" };
const bcd = require('@mdn/browser-compat-data');
var queryString = $0;
Expand Down

0 comments on commit 378ff81

Please sign in to comment.