diff --git a/framework/definitions.d.ts b/framework/definitions.d.ts new file mode 100644 index 0000000..aca66bc --- /dev/null +++ b/framework/definitions.d.ts @@ -0,0 +1,2 @@ +declare function UNWRAP(value: T | null | undefined): T; +declare function ASSERT(value: unknown, ...log: unknown[]): asserts value; diff --git a/framework/fs.ts b/framework/fs.ts index b6d2d61..30554c4 100644 --- a/framework/fs.ts +++ b/framework/fs.ts @@ -1,12 +1,18 @@ -// File System APIs -import { readFileSync, writeFileSync, readdirSync, statSync, existsSync, mkdirSync as nodeMkdirSync, rmSync } from 'node:fs'; -import { readFile, writeFile, readdir, stat, mkdir as nodeMkdir, rm } from 'node:fs/promises'; - -// Re-export a mix of built-in Node.js sync+promise fs methods. +// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in +// Node.js sync+promise fs methods. For convenince. export { - readFileSync, writeFileSync, readdirSync, statSync, existsSync, rmSync , - readFile, writeFile, readdir, stat, mkdir, rm -} + existsSync, + readdir, + readdirSync, + readFile, + readFileSync, + rm, + rmSync, + stat, + statSync, + writeFile, + writeFileSync, +}; export function mkdir(dir: string) { return nodeMkdir(dir, { recursive: true }); @@ -21,5 +27,35 @@ export async function writeMkdir(file: string, contents: Buffer | string) { return writeFile(file, contents); } -import * as path from 'node:path'; +export function writeMkdirSync(file: string, contents: Buffer | string) { + mkdirSync(path.dirname(file)); + return writeFileSync(file, contents); +} +export function readDirRecOptionalSync(dir: string) { + try { + return readdirSync(dir, { withFileTypes: true }); + } catch (err: any) { + if (err.code === "ENOENT") return []; + throw err; + } +} + +import * as path from "node:path"; +import { + existsSync, + mkdirSync as nodeMkdirSync, + readdirSync, + readFileSync, + rmSync, + statSync, + writeFileSync, +} from "node:fs"; +import { + mkdir as nodeMkdir, + readdir, + readFile, + rm, + stat, + writeFile, +} from "node:fs/promises"; diff --git a/framework/sitegen.tsx b/framework/sitegen.tsx index 3df1caf..3220718 100644 --- a/framework/sitegen.tsx +++ b/framework/sitegen.tsx @@ -9,20 +9,44 @@ function main() { }, sitegen); } -async function sitegen(status) { +/** + * A filesystem object associated with some ID, + * such as a page's route to it's source file. + */ +interface FileItem { + id: string; + file: string; +} + +async function sitegen(status: Spinner) { const startTime = performance.now(); - let root = path.resolve(import_meta.dirname, "../src"); - const join = (...sub) => path.join(root, ...sub); + let root = path.resolve(import.meta.dirname, "../src"); + const join = (...sub: string[]) => path.join(root, ...sub); const incr = new Incremental(); + + // Sitegen reviews every defined section for resources to process const sections: Section[] = require(path.join(root, "sections.ts")).siteSections; - const views = []; - const staticFiles = []; - let pages = []; - let scripts = []; + + // Static files are compressed and served as-is. + // - "{section}/static/*.png" + let staticFiles: FileItem[] = []; + // Pages are rendered then served as static files. + // - "{section}/pages/*.marko" + let pages: FileItem[] = []; + // Views are dynamically rendered pages called via backend code. + // - "{section}/views/*.tsx" + let views: FileItem[] = []; + // Public scripts are bundled for the client as static assets under "/js/[...]" + // This is used for the file viewer's canvases. + // Note that '.client.ts' can be placed anywhere in the file structure. + // - "{section}/scripts/*.client.ts" + let scripts: FileItem[] = []; + // 'backend.ts' const backendFiles = []; + // -- Scan for files -- status.text = "Scanning Project"; for (const section of sections) { const { root: sectionRoot } = section; @@ -39,16 +63,21 @@ async function sitegen(status) { }, { dir: sectionPath("static"), list: staticFiles, prefix: "/", ext: true }, { dir: sectionPath("scripts"), list: scripts, prefix: rootPrefix }, - { dir: sectionPath("views"), list: views, prefix: rootPrefix }, + { + dir: sectionPath("views"), + list: views, + prefix: rootPrefix, + exclude: [".css", ".client.ts", ".client.tsx"], + }, ]; for (const { dir, list, prefix, exclude = [], ext = false } of kinds) { - const pages2 = fs.readDirRecOptional(dir); - page: for (const page of pages2) { - if (page.isDirectory()) continue; - for (const ext2 of exclude) { - if (page.name.endsWith(ext2)) continue page; + const items = fs.readDirRecOptionalSync(dir); + item: for (const item of items) { + if (item.isDirectory()) continue; + for (const e of exclude) { + if (item.name.endsWith(e)) continue item; } - const file = path.relative(dir, page.parentPath + "/" + page.name); + const file = path.relative(dir, item.parentPath + "/" + item.name); const trim = ext ? file : file.slice(0, -path.extname(file).length).replaceAll(".", "/"); @@ -56,7 +85,7 @@ async function sitegen(status) { if (prefix === "/" && id.endsWith("/index")) { id = id.slice(0, -"/index".length) || "/"; } - list.push({ id, file: path.join(page.parentPath, page.name) }); + list.push({ id, file: path.join(item.parentPath, item.name) }); } } let backendFile = [ @@ -67,86 +96,110 @@ async function sitegen(status) { } scripts = scripts.filter(({ file }) => !file.match(/\.client\.[tj]sx?/)); const globalCssPath = join("global.css"); + + // TODO: invalidate incremental resources + + // -- server side render -- status.text = "Building"; const cssOnce = new OnceMap(); - const cssQueue = new Queue({ + const cssQueue = new Queue<[string, string[], css.Theme], string>({ name: "Bundle", fn: ([, files, theme]) => css.bundleCssFiles(files, theme), passive: true, getItemText: ([id]) => id, maxJobs: 2, }); - const ssrResults = []; - function loadSsrModule(page) { - require(page.file); + interface RenderResult { + body: string; + head: string; + inlineCss: string; + scriptFiles: string[]; + item: FileItem; } - async function doSsrPage(page) { - const module2 = require(page.file); - const Page = module2.default; - if (!Page) { - throw new Error("Page is missing a 'default' export."); - } - const metadata = module2.meta; + const renderResults: RenderResult[] = []; + async function loadPageModule({ file }: FileItem) { + require(file); + } + async function renderPage(item: FileItem) { + // -- load and validate module -- + let { default: Page, meta: metadata, theme: pageTheme, layout } = require( + item.file, + ); + if (!Page) throw new Error("Page is missing a 'default' export."); if (!metadata) { - throw new Error("Page is missing 'meta' attribute with a title."); + throw new Error("Page is missing 'meta' export with a title."); } + if (layout?.theme) pageTheme = layout.theme; const theme = { bg: "#fff", fg: "#050505", primary: "#2e7dab", - ...module2.theme, + ...pageTheme, }; + + // -- metadata -- const renderedMetaPromise = Promise.resolve( typeof metadata === "function" ? metadata({ ssr: true }) : metadata, ).then((m) => meta.resolveAndRenderMetadata(m)); - const cssImports = [globalCssPath, ...hot.getCssImports(page.file)]; + // -- css -- + const cssImports = [globalCssPath, ...hot.getCssImports(item.file)]; const cssPromise = cssOnce.get( cssImports.join(":") + JSON.stringify(theme), - () => cssQueue.add([page.id, cssImports, theme]), + () => cssQueue.add([item.id, cssImports, theme]), ); + // -- html -- const sitegenApi = sg.initRender(); - const body = await (0, import_ssr.ssrAsync)( - /* @__PURE__ */ (0, import_jsx_runtime.jsx)(Page, {}), - { - sitegen: sitegenApi, - }, - ); - const inlineCss = await cssPromise; - const renderedMeta = await renderedMetaPromise; + const bodyPromise = await ssr.ssrAsync(, { + sitegen: sitegenApi, + }); + + const [body, inlineCss, renderedMeta] = await Promise.all([ + bodyPromise, + cssPromise, + renderedMetaPromise, + ]); if (!renderedMeta.includes("")) { throw new Error( - "Page is missing 'meta.title'. All pages need a title tag.", + "Page is missing 'meta.title'. " + + "All pages need a title tag.", ); } - ssrResults.push({ + // The script content is not ready, allow another page to Render. The page + // contents will be rebuilt at the end. This is more convenient anyways + // because it means client scripts don't re-render the page. + renderResults.push({ body, head: renderedMeta, inlineCss, scriptFiles: Array.from(sitegenApi.scripts), - page, + item: item, }); } + // This is done in two passes so that a page that throws during evaluation + // will report "Load Render Module" instead of "Render Static Page". const spinnerFormat = status.format; status.format = () => ""; - const moduleLoadQueue = new import_queue.Queue({ + const moduleLoadQueue = new Queue({ name: "Load Render Module", - fn: loadSSRModule, + fn: loadPageModule, getItemText, maxJobs: 1, }); moduleLoadQueue.addMany(pages); await moduleLoadQueue.done({ method: "stop" }); - const pageQueue = new import_queue.Queue({ - name: "Render", - fn: doSsrPage, + const pageQueue = new Queue({ + name: "Render Static Page", + fn: renderPage, getItemText, maxJobs: 2, }); pageQueue.addMany(pages); await pageQueue.done({ method: "stop" }); status.format = spinnerFormat; + + // -- bundle scripts -- const referencedScripts = Array.from( - new Set(ssrResults.flatMap((r) => r.scriptFiles)), + new Set(renderResults.flatMap((r) => r.scriptFiles)), ); const extraPublicScripts = scripts.map((entry) => entry.file); const uniqueCount = new Set([ @@ -159,15 +212,17 @@ async function sitegen(status) { extraPublicScripts, incr, ); - async function doStaticFile(page) { - const body = await fs.readFile(page.file); + + // -- copy/compress static files -- + async function doStaticFile(item: FileItem) { + const body = await fs.readFile(item.file); incr.putAsset({ - srcId: "static:" + page.file, - key: page.id, + srcId: "static:" + item.file, + key: item.id, body, }); } - const staticQueue = new import_queue.Queue({ + const staticQueue = new Queue({ name: "Load Static", fn: doStaticFile, getItemText, @@ -177,40 +232,68 @@ async function sitegen(status) { staticQueue.addMany(staticFiles); await staticQueue.done({ method: "stop" }); status.format = spinnerFormat; - status.text = `Concat ${ssrResults.length} Pages`; + + // -- concatenate static rendered pages -- + status.text = `Concat ${renderResults.length} Pages`; await Promise.all( - ssrResults.map(async ({ page, body, head, inlineCss, scriptFiles }) => { - const doc = wrapDocument({ - body, - head, - inlineCss, - scripts: scriptFiles.map( - (id) => - UNWRAP( - incr.out.script.get( - path.basename(id).replace(/\.client\.[jt]sx?$/, ""), + renderResults.map( + async ({ item: page, body, head, inlineCss, scriptFiles }) => { + const doc = wrapDocument({ + body, + head, + inlineCss, + scripts: scriptFiles.map( + (id) => + UNWRAP( + incr.out.script.get( + path.basename(id).replace(/\.client\.[jt]sx?$/, ""), + ), ), - ), - ).map((x) => `{${x}}`).join(""), - }); - incr.putAsset({ - srcId: "page:" + page.file, - key: page.id, - body: doc, - headers: { - "Content-Type": "text/html", - }, - }); - }), + ).map((x) => `{${x}}`).join("\n"), + }); + incr.putAsset({ + srcId: "page:" + page.file, + key: page.id, + body: doc, + headers: { + "Content-Type": "text/html", + }, + }); + }, + ), ); status.format = () => ""; status.text = ``; + // This will wait for all compression jobs to finish, which up + // to this point have been left as dangling promises. await incr.wait(); + + // Flush the site to disk. status.format = spinnerFormat; status.text = `Incremental Flush`; incr.flush(); - incr.serializeToDisk(); - return { elapsed: (performance.now() - startTime) / 1e3 }; + incr.serializeToDisk(); // Allows picking up this state again + return { elapsed: (performance.now() - startTime) / 1000 }; +} + +function getItemText({ file }: FileItem) { + return path.relative(hot.projectSrc, file).replaceAll("\\", "/"); +} + +function wrapDocument({ + body, + head, + inlineCss, + scripts, +}: { + head: string; + body: string; + inlineCss: string; + scripts: string; +}) { + return `<!doctype html><head>${head}${ + inlineCss ? `<style>${inlineCss}</style>` : "" + }</head><body>${body}${scripts ? `<script>${scripts}</script>` : ""}</body>`; } import type { Section } from "./sitegen-lib.ts"; @@ -218,5 +301,10 @@ import { OnceMap, Queue } from "./queue.ts"; import { Incremental } from "./incremental.ts"; import * as bundle from "./bundle.ts"; import * as css from "./css.ts"; -import * as fs from './fs.ts'; -import { withSpinner, Spinner } from "@paperclover/console/Spinner"; +import * as fs from "./fs.ts"; +import { Spinner, withSpinner } from "@paperclover/console/Spinner"; +import * as meta from "./meta"; +import * as ssr from "./engine/ssr.ts"; +import * as sg from "./sitegen-lib.ts"; +import * as hot from "./hot.ts"; +import * as path from "node:path"; diff --git a/run.js b/run.js index b502c89..8794cdb 100644 --- a/run.js +++ b/run.js @@ -1,6 +1,7 @@ // This file allows using Node.js in combination with -// available plugins. Usage: "node run <script>" +// all available plugins. Usage: "node run <script>" import * as path from "node:path"; +import * as util from "node:util"; import process from "node:process"; // Disable experimental warnings (Type Stripping, etc) @@ -24,6 +25,22 @@ globalThis.console.warn = console.warn; globalThis.console.error = console.error; globalThis.console.debug = console.scoped("dbg"); +globalThis.UNWRAP = (t, ...args) => { + if (t == null) { + throw new Error( + args.length > 0 ? util.format(...args) : "UNWRAP(" + t + ")", + ); + } + return t; +}; +globalThis.ASSERT = (t, ...args) => { + if (!t) { + throw new Error( + args.length > 0 ? util.format(...args) : "Assertion Failed", + ); + } +}; + // Load with hooks if (process.argv[1].startsWith(import.meta.filename.slice(0, -".js".length))) { if (process.argv.length == 2) { diff --git a/tsconfig.json b/tsconfig.json index e3093bf..147598e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -1,13 +1,15 @@ { - "allowImportingTSExtensions": true, - "baseDir": ".", - "incremental": true, - "lib": ["dom", "esnext"], - "moduleResolution": "node18", - "outdir": ".clover/ts", - "paths": { "@/*": ["src/*"] }, - "strict": true, - "target": "es2022", - "jsxImportSource": "#ssr", - "jsx": "react-jsx", + "compilerOptions": { + "allowImportingTSExtensions": true, + "baseDir": ".", + "incremental": true, + "lib": ["dom", "esnext", "esnext.iterator"], + "module": "nodenext", + "outdir": ".clover/ts", + "paths": { "@/*": ["src/*"] }, + "strict": true, + "target": "es2022", + "jsxImportSource": "#ssr", + "jsx": "react-jsx" + } }