From a41569983f81008a75934854b98ff51bb4a11818 Mon Sep 17 00:00:00 2001 From: chloe caruso Date: Fri, 13 Jun 2025 00:13:22 -0700 Subject: [PATCH] incremental sitegen dev server! --- framework/bundle.ts | 231 ++++++++++++++++++--------------- framework/css.ts | 10 +- framework/generate.ts | 84 +++++++----- framework/hot.ts | 94 ++++++++------ framework/incremental.ts | 274 +++++++++++++++++++++++++++++++++------ framework/lib/assets.ts | 18 ++- framework/lib/fs.ts | 10 +- framework/watch.ts | 47 ++++++- 8 files changed, 535 insertions(+), 233 deletions(-) diff --git a/framework/bundle.ts b/framework/bundle.ts index 0f79bf0..3d3daaf 100644 --- a/framework/bundle.ts +++ b/framework/bundle.ts @@ -1,8 +1,4 @@ // This file implements client-side bundling, mostly wrapping esbuild. -const clientPlugins: esbuild.Plugin[] = [ - // There are currently no plugins needed by 'paperclover.net' -]; - export async function bundleClientJavaScript( referencedScripts: string[], extraPublicScripts: string[], @@ -11,7 +7,7 @@ export async function bundleClientJavaScript( ) { const entryPoints = [ ...new Set([ - ...referencedScripts, + ...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)), ...extraPublicScripts, ]), ]; @@ -26,6 +22,10 @@ export async function bundleClientJavaScript( ); } + const clientPlugins: esbuild.Plugin[] = [ + // There are currently no plugins needed by 'paperclover.net' + ]; + const bundle = await esbuild.build({ bundle: true, chunkNames: "/js/c.[hash]", @@ -51,7 +51,6 @@ export async function bundleClientJavaScript( ); const { metafile } = bundle; const promises: Promise[] = []; - // TODO: add a shared build hash to entrypoints, derived from all the chunk hashes. for (const file of bundle.outputFiles) { const { text } = file; let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); @@ -61,7 +60,7 @@ export async function bundleClientJavaScript( // Register non-chunks as script entries. const chunk = route.startsWith("/js/c."); if (!chunk) { - const key = hot.getScriptId(sources[0]); + const key = hot.getScriptId(path.resolve(sources[0])); route = "/js/" + key + ".js"; incr.put({ sources, @@ -82,41 +81,40 @@ export async function bundleClientJavaScript( await Promise.all(promises); } -type ServerPlatform = "node" | "passthru"; +export type ServerPlatform = "node" | "passthru"; export async function bundleServerJavaScript( - /** Has 'export default app;' */ - _: string, - /** Views for dynamic loading */ - viewEntryPoints: FileItem[], + incr: Incremental, platform: ServerPlatform = "node", ) { - const scriptMagic = "CLOVER_CLIENT_SCRIPTS_DEFINITION"; + if (incr.hasArtifact("backendBundle", platform)) return; + + // Comment + const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_"); + const viewSource = [ - ...viewEntryPoints.map((view, i) => - `import * as view${i} from ${JSON.stringify(view.file)}` + ...Array.from( + incr.out.viewMetadata, + ([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`, ), - `const styles = ${scriptMagic}[-2]`, - `export const scripts = ${scriptMagic}[-1]`, + `const styles = ${magicWord}[-2]`, + `export const scripts = ${magicWord}[-1]`, "export const views = {", - ...viewModules.flatMap(({ view, module }, i) => [ - ` ${JSON.stringify(view.id)}: {`, - ` component: view${i}.default,`, - ` meta: view${i}.meta,`, - ` layout: ${ - module.layout?.default ? `view${i}.layout?.default` : "undefined" - },`, - ` theme: ${ - module.layout?.theme - ? `view${i}.layout?.theme` - : module.theme - ? `view${i}.theme` - : "undefined" - },`, - ` inlineCss: styles[${scriptMagic}[${i}]]`, - ` },`, - ]), + ...Array.from(incr.out.viewMetadata, ([key, view], i) => + [ + ` ${JSON.stringify(key)}: {`, + ` component: view${i}.default,`, + // ` meta: ${ + // view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta` + // },`, + ` meta: view${i}.meta,`, + ` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`, + ` inlineCss: styles[${magicWord}[${i}]]`, + ` },`, + ].join("\n")), "}", ].join("\n"); + + // -- plugins -- const serverPlugins: esbuild.Plugin[] = [ virtualFiles({ "$views": viewSource, @@ -125,10 +123,21 @@ export async function bundleServerJavaScript( { name: "marko via build cache", setup(b) { - b.onLoad({ filter: /\.marko$/ }, async ({ path: file }) => ({ - loader: "ts", - contents: hot.getSourceCode(file), - })); + b.onLoad( + { filter: /\.marko$/ }, + async ({ path: file }) => { + const key = path.relative(hot.projectRoot, file) + .replaceAll("\\", "/"); + const cacheEntry = incr.out.serverMarko.get(key); + if (!cacheEntry) { + throw new Error("Marko file not in cache: " + file); + } + return ({ + loader: "ts", + contents: cacheEntry.src, + }); + }, + ); }, }, { @@ -145,10 +154,10 @@ export async function bundleServerJavaScript( }, }, ]; - const bundle = await esbuild.build({ + const { metafile, outputFiles, warnings } = await esbuild.build({ bundle: true, chunkNames: "c.[hash]", - entryNames: "[name]", + entryNames: "server", entryPoints: [ path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"), ], @@ -161,81 +170,96 @@ export async function bundleServerJavaScript( write: false, metafile: true, }); - const viewModules = viewEntryPoints.map((view) => { - const module = require(view.file); - if (!module.meta) { - throw new Error(`${view.file} is missing 'export const meta'`); + + const files: Record = {}; + let fileWithMagicWord: string | null = null; + for (const output of outputFiles) { + const basename = output.path.replace(/^.*?!/, ""); + const key = "out!" + basename.replaceAll("\\", "/"); + // If this contains the generated "$views" file, then + // mark this file as the one for replacement. Because + // `splitting` is `true`, esbuild will not emit this + // file in more than one chunk. + if (metafile.outputs[key].inputs["framework/lib/view.ts"]) { + fileWithMagicWord = basename; } - if (!module.default) { - throw new Error(`${view.file} is missing a default export.`); - } - return { module, view }; + files[basename] = Buffer.from(output.contents); + } + incr.put({ + kind: "backendBundle", + key: platform, + value: { + magicWord, + files, + fileWithMagicWord, + }, + sources: Object.keys(metafile.inputs).filter((x) => + !x.startsWith("vfs:") && + !x.startsWith("dropped:") && + !x.includes("node_modules") + ), }); - const viewData = viewModules.map(({ module, view }) => { - return { - id: view.id, - theme: module.theme, - cssImports: hot.getCssImports(view.file) - .concat("src/global.css") - .map((file) => path.resolve(file)), - clientRefs: hot.getClientScriptRefs(view.file), - }; - }); - return { - views: viewData, - bundle, - scriptMagic, - }; } -type Await = T extends Promise ? R : T; - -export function finalizeServerJavaScript( - backend: Await>, - viewCssBundles: css.Output[], +export async function finalizeServerJavaScript( incr: Incremental, + platform: ServerPlatform, ) { - const { metafile, outputFiles } = backend.bundle; + if (incr.hasArtifact("backendReplace", platform)) return; + const { + files, + fileWithMagicWord, + magicWord, + } = UNWRAP(incr.getArtifact("backendBundle", platform)); - // Only the reachable script files need to be inserted into the bundle. - const reachableScriptKeys = new Set( - backend.views.flatMap((view) => view.clientRefs), - ); - const reachableScripts = Object.fromEntries( - Array.from(incr.out.script) - .filter(([k]) => reachableScriptKeys.has(k)), + if (!fileWithMagicWord) return; + + // Only the reachable resources need to be inserted into the bundle. + const viewScriptsList = new Set( + Array.from(incr.out.viewMetadata.values()) + .flatMap((view) => view.clientRefs), ); + const viewStyleKeys = Array.from(incr.out.viewMetadata.values()) + .map((view) => css.styleKey(view.cssImports, view.theme)); + const viewCssBundles = viewStyleKeys + .map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key)); // Deduplicate styles const styleList = Array.from(new Set(viewCssBundles)); - for (const output of outputFiles) { - const basename = output.path.replace(/^.*?!/, ""); - const key = "out!" + basename.replaceAll("\\", "/"); + // Replace the magic word + let text = files[fileWithMagicWord].toString("utf-8"); + text = text.replace( + new RegExp(magicWord + "\\[(-?\\d)\\]", "gs"), + (_, i) => { + i = Number(i); + // Inline the styling data + if (i === -2) { + return JSON.stringify(styleList.map((cssText) => cssText)); + } + // Inline the script data + if (i === -1) { + return JSON.stringify(Object.fromEntries(incr.out.script)); + } + // Reference an index into `styleList` + return `${styleList.indexOf(viewCssBundles[i])}`; + }, + ); - // If this contains the generated "$views" file, then - // replace the IDs with the bundled results. - let text = output.text; - if (metafile.outputs[key].inputs["framework/lib/view.ts"]) { - text = text.replace( - /CLOVER_CLIENT_SCRIPTS_DEFINITION\[(-?\d)\]/gs, - (_, i) => { - i = Number(i); - // Inline the styling data - if (i === -2) { - return JSON.stringify(styleList.map((item) => item.text)); - } - // Inline the script data - if (i === -1) { - return JSON.stringify(Object.fromEntries(incr.out.script)); - } - // Reference an index into `styleList` - return `${styleList.indexOf(viewCssBundles[i])}`; - }, - ); - } - fs.writeMkdirSync(path.join(".clover/backend/" + basename), text); - } + incr.put({ + kind: "backendReplace", + key: platform, + sources: [ + // Backend input code (includes view code) + ...incr.sourcesFor("backendBundle", platform), + // Script + ...Array.from(viewScriptsList) + .flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))), + // Style + ...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)), + ], + value: Buffer.from(text), + }); } import * as esbuild from "esbuild"; @@ -248,7 +272,4 @@ import { virtualFiles, } from "./esbuild-support.ts"; import { Incremental } from "./incremental.ts"; -import type { FileItem } from "#sitegen"; -import * as marko from "@marko/compiler"; import * as css from "./css.ts"; -import * as fs from "./lib/fs.ts"; diff --git a/framework/css.ts b/framework/css.ts index 6a1ab22..ddf7df9 100644 --- a/framework/css.ts +++ b/framework/css.ts @@ -51,10 +51,12 @@ export function styleKey( ) { cssImports = cssImports .map((file) => - path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file + (path.isAbsolute(file) ? path.relative(hot.projectSrc, file) : file) + .replaceAll("\\", "/") ) .sort(); - return cssImports.join(":") + JSON.stringify(theme); + return cssImports.join(":") + ":" + + Object.entries(theme).map(([k, v]) => `${k}=${v}`); } export async function bundleCssFiles( @@ -62,9 +64,7 @@ export async function bundleCssFiles( theme: Theme, dev: boolean = false, ): Promise { - cssImports = cssImports.map((file) => - path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file - ); + cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file)); const plugin = { name: "clover css", setup(b) { diff --git a/framework/generate.ts b/framework/generate.ts index 3310769..6707fb9 100644 --- a/framework/generate.ts +++ b/framework/generate.ts @@ -2,23 +2,35 @@ // By using `Incremental`'s ability to automatically purge stale // assets, the `sitegen` function performs partial rebuilds. -export function main(incremental?: Incremental) { +export function main() { return withSpinner, any>({ text: "Recovering State", successText, failureText: () => "sitegen FAIL", }, async (spinner) => { - // const incr = Incremental.fromDisk(); - // await incr.statAllFiles(); - const incr = new Incremental(); + const incr = Incremental.fromDisk(); + await incr.statAllFiles(); + // const incr = new Incremental(); const result = await sitegen(spinner, incr); incr.toDisk(); // Allows picking up this state again return result; }) as ReturnType; } -export function successText({ elapsed }: { elapsed: number }) { - return "sitegen! update in " + elapsed.toFixed(1) + "s"; +export function successText({ + elapsed, + inserted, + referenced, + unreferenced, +}: Awaited>) { + const s = (array: unknown[]) => array.length === 1 ? "" : "s"; + const kind = inserted.length === referenced.length ? "build" : "update"; + const status = inserted.length > 0 + ? `${kind} ${inserted.length} key${s(inserted)}` + : unreferenced.length > 0 + ? `pruned ${unreferenced.length} key${s(unreferenced)}` + : `checked ${referenced.length} key${s(referenced)}`; + return `sitegen! ${status} in ${elapsed.toFixed(1)}s`; } export async function sitegen( @@ -159,7 +171,10 @@ export async function sitegen( ...css.defaultTheme, ...pageTheme, }; - const cssImports = [globalCssPath, ...hot.getCssImports(item.file)]; + const cssImports = Array.from( + new Set([globalCssPath, ...hot.getCssImports(item.file)]), + (file) => path.relative(hot.projectSrc, file), + ); ensureCssGetsBuilt(cssImports, theme, item.id); // -- metadata -- @@ -216,7 +231,7 @@ export async function sitegen( }; const cssImports = hot.getCssImports(view.file) .concat("src/global.css") - .map((file) => path.relative(hot.projectRoot, path.resolve(file))); + .map((file) => path.relative(hot.projectSrc, path.resolve(file))); incr.put({ kind: "viewMetadata", key: view.id, @@ -250,6 +265,9 @@ export async function sitegen( return !existing; }); + // Load the marko cache before render modules are loaded + incr.loadMarkoCache(); + // This is done in two passes so that a page that throws during evaluation // will report "Load Render Module" instead of "Render Static Page". const spinnerFormat = status.format; @@ -281,22 +299,12 @@ export async function sitegen( await viewQueue.done({ method: "stop" }); status.format = spinnerFormat; - // -- bundle backend and views -- - // status.text = "Bundle backend code"; - // const backend = await bundle.bundleServerJavaScript( - // join("backend.ts"), - // views, - // ); - // const viewCssPromise = await Promise.all( - // backend.views.map((view) => - // cssOnce.get( - // view.cssImports.join(":") + JSON.stringify(view.theme), - // () => cssQueue.add([view.id, view.cssImports, view.theme ?? {}]), - // ) - // ), - // ); + // -- bundle server javascript (backend and views) -- + status.text = "Bundle JavaScript"; + incr.snapshotMarkoCache(); + const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node"); - // -- bundle scripts -- + // -- bundle client javascript -- const referencedScripts = Array.from( new Set( [ @@ -317,19 +325,17 @@ export async function sitegen( (script) => path.resolve(hot.projectSrc, script), ).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file))); const extraPublicScripts = scripts.map((entry) => entry.file); - const uniqueCount = new Set([ - ...referencedScripts, - ...extraPublicScripts, - ]).size; - status.text = `Bundle ${uniqueCount} Scripts`; - await bundle.bundleClientJavaScript( + const clientJavaScriptPromise = bundle.bundleClientJavaScript( referencedScripts, extraPublicScripts, incr, ); - - // -- finalize backend bundle -- - // await bundle.finalizeServerJavaScript(backend, await viewCssPromise, incr); + await Promise.all([ + serverJavaScriptPromise, + clientJavaScriptPromise, + cssQueue.done({ method: "stop" }), + ]); + await bundle.finalizeServerJavaScript(incr, "node"); // -- copy/compress static files -- async function doStaticFile(item: FileItem) { @@ -353,8 +359,6 @@ export async function sitegen( await staticQueue.done({ method: "stop" }); status.format = spinnerFormat; - await cssQueue.done({ method: "stop" }); - // -- concatenate static rendered pages -- status.text = `Concat Pages`; await Promise.all(pages.map(async (page) => { @@ -399,11 +403,19 @@ export async function sitegen( // to this point have been left as dangling promises. await incr.wait(); + const { inserted, referenced, unreferenced } = incr.shake(); + // Flush the site to disk. status.format = spinnerFormat; status.text = `Incremental Flush`; - incr.flush(); // Write outputs - return { incr, elapsed: (performance.now() - startTime) / 1000 }; + incr.flush("node"); // Write outputs + return { + incr, + inserted, + referenced, + unreferenced, + elapsed: (performance.now() - startTime) / 1000, + }; } function getItemText({ file }: FileItem) { diff --git a/framework/hot.ts b/framework/hot.ts index c25b650..a60ff6f 100644 --- a/framework/hot.ts +++ b/framework/hot.ts @@ -35,9 +35,10 @@ export interface FileStat { lastModified: number; imports: string[]; } -let fsGraph = new Map(); -export function getFsGraph() { - return fsGraph; +const fileStats = new Map(); + +export function getFileStat(filepath: string) { + return fileStats.get(path.resolve(filepath)); } function shouldTrackPath(filename: string) { @@ -63,18 +64,16 @@ Module.prototype._compile = function ( const cssImportsMaybe: string[] = []; const imports: string[] = []; for (const { filename: file } of this.children) { - const relative = path.relative(projectRoot, file); - if (file.endsWith(".css")) cssImportsMaybe.push(relative); + if (file.endsWith(".css")) cssImportsMaybe.push(file); else { - const child = fsGraph.get(relative); + const child = fileStats.get(file); if (!child) continue; const { cssImportsRecursive } = child; if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive); - imports.push(relative); + imports.push(file); } } - const relative = path.relative(projectRoot, filename); - fsGraph.set(relative, { + fileStats.set(filename, { cssImportsRecursive: cssImportsMaybe.length > 0 ? Array.from(new Set(cssImportsMaybe)) : null, @@ -155,36 +154,46 @@ function resolveClientRef(sourcePath: string, ref: string) { ) { throw new Error("addScript must be a .client.ts or .client.tsx"); } - return filePath; + return path.relative(projectSrc, filePath); } +// TODO: extract the marko compilation tools out, lazy load them +export interface MarkoCacheEntry { + src: string; + scannedClientRefs: string[]; +} +export const markoCache = new Map(); function loadMarko(module: NodeJS.Module, filepath: string) { - let src = fs.readFileSync(filepath, "utf8"); - // A non-standard thing here is Clover Sitegen implements - // its own client side scripting stuff, so it overrides - // bare client import statements to it's own usage. - const scannedClientRefs = new Set(); - if (src.match(/^\s*client\s+import\s+["']/m)) { - src = src.replace( - /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, - (_, src) => { - const ref = JSON.parse(`"${src.slice(1, -1)}"`); - const resolved = resolveClientRef(filepath, ref); - scannedClientRefs.add(resolved); - return ``; - }, - ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; + let cache = markoCache.get(filepath); + if (!cache) { + let src = fs.readFileSync(filepath, "utf8"); + // A non-standard thing here is Clover Sitegen implements + // its own client side scripting stuff, so it overrides + // bare client import statements to it's own usage. + const scannedClientRefs = new Set(); + if (src.match(/^\s*client\s+import\s+["']/m)) { + src = src.replace( + /^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m, + (_, src) => { + const ref = JSON.parse(`"${src.slice(1, -1)}"`); + const resolved = resolveClientRef(filepath, ref); + scannedClientRefs.add(resolved); + return ``; + }, + ) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n'; + } + + src = marko.compileSync(src, filepath).code; + src = src.replace("marko/debug/html", "#ssr/marko"); + cache = { src, scannedClientRefs: Array.from(scannedClientRefs) }; + markoCache.set(filepath, cache); } - src = marko.compileSync(src, filepath).code; - src = src.replace("marko/debug/html", "#ssr/marko"); - - module.cloverSourceCode = src; - + const { src, scannedClientRefs } = cache; return loadEsbuildCode(module, filepath, src, { - scannedClientRefs: Array.from(scannedClientRefs), + scannedClientRefs, }); } @@ -202,12 +211,19 @@ function loadCss(module: NodeJS.Module, _filepath: string) { export function reloadRecursive(filepath: string) { filepath = path.resolve(filepath); const existing = cache[filepath]; - if (existing) deleteRecursive(filepath, existing); - fsGraph.clear(); + if (existing) deleteRecursiveInner(filepath, existing); + fileStats.clear(); return require(filepath); } -function deleteRecursive(id: string, module: any) { +export function unload(filepath: string) { + filepath = path.resolve(filepath); + const existing = cache[filepath]; + if (existing) delete cache[filepath]; + fileStats.delete(filepath); +} + +function deleteRecursiveInner(id: string, module: any) { if (id.includes(path.sep + "node_modules" + path.sep)) { return; } @@ -215,15 +231,14 @@ function deleteRecursive(id: string, module: any) { for (const child of module.children) { if (child.filename.includes("/engine/")) return; const existing = cache[child.filename]; - if (existing === child) deleteRecursive(child.filename, existing); + if (existing === child) deleteRecursiveInner(child.filename, existing); } } export function getCssImports(filepath: string) { filepath = path.resolve(filepath); if (!require.cache[filepath]) throw new Error(filepath + " was never loaded"); - return fsGraph.get(path.relative(projectRoot, filepath)) - ?.cssImportsRecursive ?? []; + return fileStats.get(filepath)?.cssImportsRecursive ?? []; } export function getClientScriptRefs(filepath: string) { @@ -296,7 +311,6 @@ export function resolveClientRefs( export function getScriptId(file: string) { return (path.isAbsolute(file) ? path.relative(projectSrc, file) : file) - .replace(/^\/?src\//, "") .replaceAll("\\", "/"); } diff --git a/framework/incremental.ts b/framework/incremental.ts index d20dda4..fd69232 100644 --- a/framework/incremental.ts +++ b/framework/incremental.ts @@ -20,8 +20,19 @@ interface ArtifactMap { pageMetadata: PageMetadata; /* Metadata about a dynamic view */ viewMetadata: ViewMetadata; + /* Cached '.marko' server compilation */ + serverMarko: hot.MarkoCacheEntry; + /* Backend source code, pre-replacement. Keyed by platform type. */ + backendBundle: BackendBundle; + /* One file in the backend receives post-processing. */ + backendReplace: Buffer; } type ArtifactKind = keyof ArtifactMap; +/* Automatic path tracing is performed to make it so that + * specifying 'sources: [file]' refers to it and everything it imports. + * These kinds do not have that behavior + */ +const exactDependencyKinds = ["serverMarko"]; export interface Asset { buffer: Buffer; headers: Record; @@ -50,6 +61,11 @@ export interface ViewMetadata { clientRefs: string[]; hasLayout: boolean; } +export interface BackendBundle { + magicWord: string; + fileWithMagicWord: string | null; + files: Record; +} // -- incremental support types -- export interface PutBase { @@ -76,6 +92,9 @@ export class Incremental { style: new Map(), pageMetadata: new Map(), viewMetadata: new Map(), + serverMarko: new Map(), + backendBundle: new Map(), + backendReplace: new Map(), }; /** Tracking filesystem entries to `srcId` */ invals = new Map(); @@ -92,16 +111,52 @@ export class Incremental { getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, }); + /** Reset at the end of each update */ + round = { + inserted: new Set(), + referenced: new Set(), + }; + getArtifact(kind: T, key: string) { + this.round.referenced.add(`${kind}\0${key}`); return this.out[kind].get(key); } - hasArtifact(kind: T, key: string) { - return this.out[kind].has(key); + hasArtifact(kind: ArtifactKind, key: string) { + return this.getArtifact(kind, key) != null; } sourcesFor(kind: ArtifactKind, key: string) { - return UNWRAP(this.sources.get(kind + "\0" + key)); + return UNWRAP( + this.sources.get(kind + "\0" + key), + `No artifact '${kind}' '${key}'`, + ); + } + + shake() { + const toPublic = (str: string) => { + const [kind, key] = str.split("\0"); + return { kind: kind as ArtifactKind, key }; + }; + const inserted = Array.from(this.round.inserted, toPublic); + const referenced = Array.from(this.round.referenced, toPublic); + const unreferenced: { kind: ArtifactKind; key: string }[] = []; + + for (const kind in this.out) { + const map = this.out[kind as keyof typeof this.out]; + if (!map) continue; + for (const key of map.keys()) { + if (!this.round.referenced.has(`${kind}\0${key}`)) { + unreferenced.push({ kind: kind as ArtifactKind, key }); + console.warn("unreferened " + kind + " : " + key); + } + } + } + + this.round.inserted.clear(); + this.round.referenced.clear(); + + return { inserted, referenced, unreferenced }; } /* @@ -115,9 +170,25 @@ export class Incremental { key, value, }: Put) { - console.log("put " + kind + ": " + key); + // These three invariants affect incremental accuracy. + if (this.round.inserted.has(`${kind}\0${key}`)) { + console.error( + `Artifact ${kind}:${key} was inserted multiple times in the same round!`, + ); + } else if (!this.round.referenced.has(`${kind}\0${key}`)) { + console.error( + `Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`, + ); + } else if (this.out[kind].has(key)) { + console.error( + `Artifact ${kind}:${key} is not stale, but overwritten.`, + ); + } + this.out[kind].set(key, value); + this.round.inserted.add(`${kind}\0${key}`); + // Update sources information ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key); sources = sources.map((src) => path.normalize(src)); @@ -150,8 +221,7 @@ export class Incremental { #getOrInitInvals(source: string) { let invals = this.invals.get(source); if (!invals) { - const g = hot.getFsGraph().get(source); - const lastModified = g?.lastModified ?? + const lastModified = hot.getFileStat(source)?.lastModified ?? fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs; this.invals.set( source, @@ -166,8 +236,7 @@ export class Incremental { } #followImports(file: string) { - const graph = hot.getFsGraph(); - const stat = graph.get(file); + const stat = hot.getFileStat(file); if (!stat) return; for (const i of stat.imports) { const invals = this.#getOrInitInvals(i); @@ -178,19 +247,23 @@ export class Incremental { async statAllFiles() { for (const file of this.invals.keys()) { - const mtime = fs.statSync(file).mtimeMs; - this.updateStat(file, mtime); + try { + const mtime = fs.statSync(file).mtimeMs; + this.updateStat(file, mtime); + } catch (err) { + } } } - updateStat(file: string, newLastModified: number) { + updateStat(file: string, newLastModified: number | null) { file = path.relative(hot.projectRoot, file); const stat = this.invals.get(file); ASSERT(stat, "Updated stat on untracked file " + file); - const hasUpdate = stat.lastModified < newLastModified; + const hasUpdate = !newLastModified || stat.lastModified < newLastModified; if (hasUpdate) { // Invalidate - console.info(file + " updated"); + console.info(file + " " + (newLastModified ? "updated" : "deleted")); + hot.unload(file); const invalidQueue = [file]; let currentInvalid; while (currentInvalid = invalidQueue.pop()) { @@ -204,12 +277,15 @@ export class Incremental { for (const out of outputs) { const [kind, artifactKey] = out.split("\0"); this.out[kind as ArtifactKind].delete(artifactKey); - console.log("stale " + kind + ": " + artifactKey); } invalidQueue.push(...files); } } - stat.lastModified = newLastModified; + if (newLastModified) { + stat.lastModified = newLastModified; + } else { + this.invals.delete(file); + } return hasUpdate; } @@ -266,6 +342,7 @@ export class Incremental { serialize() { const writer = new BufferWriter(); + // -- artifact -- const asset = Array.from( this.out.asset, ([key, { buffer, hash, headers }]) => { @@ -283,6 +360,31 @@ export class Incremental { }, ); const script = Array.from(this.out.script); + const style = Array.from(this.out.style); + const pageMetadata = Array.from(this.out.pageMetadata); + const viewMetadata = Array.from(this.out.viewMetadata); + const serverMarko = Array.from(this.out.serverMarko); + const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => { + return [k, { + magicWord: v.magicWord, + fileWithMagicWord: v.fileWithMagicWord, + files: Object.entries(v.files).map( + ([file, contents]) => [ + file, + writer.write(contents, "backendBundle" + k + ":" + file), + ], + ), + }] satisfies SerializedMeta["backendBundle"][0]; + }); + const backendReplace = Array.from( + this.out.backendReplace, + ([k, v]) => + [ + k, + writer.write(v, "backendReplace" + k), + ] satisfies SerializedMeta["backendReplace"][0], + ); + // -- incremental metadata -- const invals = Array.from(this.invals, ([key, value]) => { const { lastModified, files, outputs } = value; return [key, { @@ -299,6 +401,12 @@ export class Incremental { script, invals, sources, + style, + pageMetadata, + viewMetadata, + serverMarko, + backendBundle, + backendReplace, } satisfies SerializedMeta; const meta = Buffer.from(JSON.stringify(json), "utf-8"); @@ -317,21 +425,39 @@ export class Incremental { buffer.subarray(4 + metaLength + start, 4 + metaLength + end); const incr = new Incremental(); - incr.out.asset = new Map(meta.asset.map(([key, value]) => { - const { hash, raw, gzip, zstd, headers } = value; - if ((gzip || zstd) && !incr.compress.has(hash)) { - incr.compress.set(hash, { - gzip: gzip ? view(gzip) : undefined, - zstd: zstd ? view(zstd) : undefined, - }); - } - return [key, { - buffer: view(raw), - headers: headers, - hash: hash, - }]; - })); - incr.out.script = new Map(meta.script); + incr.out = { + asset: new Map(meta.asset.map(([key, value]) => { + const { hash, raw, gzip, zstd, headers } = value; + if ((gzip || zstd) && !incr.compress.has(hash)) { + incr.compress.set(hash, { + gzip: gzip ? view(gzip) : undefined, + zstd: zstd ? view(zstd) : undefined, + }); + } + return [key, { + buffer: view(raw), + headers: headers, + hash: hash, + }]; + })), + script: new Map(meta.script), + style: new Map(meta.style), + pageMetadata: new Map(meta.pageMetadata), + viewMetadata: new Map(meta.viewMetadata), + serverMarko: new Map(meta.serverMarko), + backendBundle: new Map(meta.backendBundle.map(([key, value]) => { + return [key, { + magicWord: value.magicWord, + fileWithMagicWord: value.fileWithMagicWord, + files: Object.fromEntries( + value.files.map(([file, contents]) => [file, view(contents)]), + ), + }]; + })), + backendReplace: new Map( + meta.backendReplace.map(([key, contents]) => [key, view(contents)]), + ), + }; incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => { return [key, { lastModified: m, @@ -343,6 +469,37 @@ export class Incremental { return incr; } + /* + * Move the cached (server) marko transpilations from this incremental + * into the running process. + */ + loadMarkoCache() { + hot.markoCache.clear(); + for (const [key, value] of this.out.serverMarko) { + hot.markoCache.set(path.resolve(hot.projectRoot, key), value); + } + } + + /* + * Move the cached (server) marko transpilations from this incremental + * into the running process. + */ + snapshotMarkoCache() { + for (const [file, value] of hot.markoCache) { + const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/"); + // Only insert if it doesn't exist. Calling 'put' when it + // already exists would inform the user of extra calls to put. + if (!this.hasArtifact("serverMarko", key)) { + this.put({ + kind: "serverMarko", + sources: [file], + key, + value, + }); + } + } + } + toDisk(file = ".clover/incr.state") { const buffer = this.serialize(); fs.writeFileSync(file, buffer); @@ -362,23 +519,52 @@ export class Incremental { await this.compressQueue.done({ method: "success" }); } - async flush() { + async flush( + platform: bundle.ServerPlatform, + dir = path.resolve(".clover/out"), + ) { ASSERT(!this.compressQueue.active); + const join = (...args: string[]) => path.join(dir, ...args); const writer = new BufferWriter(); - // TODO: ensure all assets are actually compressed and not fake lying. + + // TODO: ensure all compressed got compressed + const asset = Object.fromEntries( Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; - const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; - const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; + const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz"); + const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd"); return [key, { raw, gzip, zstd, headers }]; }), ); - await Promise.all([ - fs.writeFile(".clover/static.json", JSON.stringify(asset)), - fs.writeFile(".clover/static.blob", writer.get()), - ]); + const backendBundle = UNWRAP(this.out.backendBundle.get(platform)); + + // Arrange output files + const outFiles: Array<[file: string, contents: string | Buffer]> = [ + // Asset manifest + ["static.json", JSON.stringify(asset)], + ["static.blob", writer.get()], + + // Backend + ...Object.entries(backendBundle.files).map(([subPath, contents]) => + [ + subPath, + subPath === backendBundle.fileWithMagicWord + ? UNWRAP(this.out.backendReplace.get(platform)) + : contents, + ] as [string, Buffer] + ), + ]; + + // TODO: check duplicates + + // Perform all i/o + await Promise.all( + outFiles.map(([subPath, contents]) => + fs.writeMkdir(join(subPath), contents, { flush: true }) + ), + ); } } @@ -440,6 +626,17 @@ export interface SerializedMeta { headers: Record; }]>; script: Array<[key: string, value: string]>; + style: Array<[key: string, value: string]>; + pageMetadata: Array<[key: string, PageMetadata]>; + viewMetadata: Array<[key: string, ViewMetadata]>; + serverMarko: Array<[key: string, hot.MarkoCacheEntry]>; + backendBundle: Array<[platform: string, { + magicWord: string; + fileWithMagicWord: string | null; + files: Array<[string, View]>; + }]>; + backendReplace: Array<[key: string, View]>; + invals: Array<[key: string, { /** Modified */ m: number; @@ -461,3 +658,4 @@ import * as mime from "#sitegen/mime"; import * as path from "node:path"; import { Buffer } from "node:buffer"; import * as css from "./css.ts"; +import type * as bundle from "./bundle.ts"; diff --git a/framework/lib/assets.ts b/framework/lib/assets.ts index 751571f..2c24e1d 100644 --- a/framework/lib/assets.ts +++ b/framework/lib/assets.ts @@ -8,9 +8,10 @@ export type StaticPageId = string; export async function reload() { const [map, buf] = await Promise.all([ - fs.readFile(".clover/static.json", "utf8"), - fs.readFile(".clover/static.blob"), + fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"), + fs.readFile(path.join(import.meta.dirname, "static.blob")), ]); + console.log("new buffer loaded"); assets = { map: JSON.parse(map), buf, @@ -18,8 +19,11 @@ export async function reload() { } export async function reloadSync() { - const map = fs.readFileSync(".clover/static.json", "utf8"); - const buf = fs.readFileSync(".clover/static.blob"); + const map = fs.readFileSync( + path.join(import.meta.dirname, "static.json"), + "utf8", + ); + const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob")); assets = { map: JSON.parse(map), buf, @@ -105,8 +109,14 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) { return c.res = new Response(body, { headers, status }); } +process.on("message", (msg: any) => { + console.log({ msg }); + if (msg?.type === "clover.assets.reload") reload(); +}); + import * as fs from "#sitegen/fs"; import type { Context, Next } from "hono"; import type { StatusCode } from "hono/utils/http-status"; import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts"; import { Buffer } from "node:buffer"; +import * as path from "node:path"; diff --git a/framework/lib/fs.ts b/framework/lib/fs.ts index 974ded3..bc4bef9 100644 --- a/framework/lib/fs.ts +++ b/framework/lib/fs.ts @@ -22,9 +22,15 @@ export function mkdirSync(dir: string) { return nodeMkdirSync(dir, { recursive: true }); } -export async function writeMkdir(file: string, contents: Buffer | string) { +export type WriteFileAsyncOptions = Parameters[2]; + +export async function writeMkdir( + file: string, + contents: Buffer | string, + options?: WriteFileAsyncOptions, +) { await mkdir(path.dirname(file)); - return writeFile(file, contents); + return writeFile(file, contents, options); } export function writeMkdirSync(file: string, contents: Buffer | string) { diff --git a/framework/watch.ts b/framework/watch.ts index 706b7ba..852fe23 100644 --- a/framework/watch.ts +++ b/framework/watch.ts @@ -3,12 +3,39 @@ const debounceMilliseconds = 25; export async function main() { + let subprocess: child_process.ChildProcess | null = null; + // Catch up state by running a main build. const { incr } = await generate.main(); // ...and watch the files that cause invals. const watch = new Watch(rebuild); watch.add(...incr.invals.keys()); statusLine(); + // ... an + serve(); + + function serve() { + if (subprocess) { + subprocess.removeListener("close", onSubprocessClose); + subprocess.kill(); + } + subprocess = child_process.fork(".clover/out/server.js", [ + "--development", + ], { + stdio: "inherit", + }); + subprocess.on("close", onSubprocessClose); + } + + function onSubprocessClose(code: number | null, signal: string | null) { + subprocess = null; + const status = code != null ? `code ${code}` : `signal ${signal}`; + console.error(`Backend process exited with ${status}`); + } + + process.on("beforeExit", () => { + subprocess?.removeListener("close", onSubprocessClose); + }); function rebuild(files: string[]) { files = files.map((file) => path.relative(hot.projectRoot, file)); @@ -20,7 +47,7 @@ export async function main() { console.warn("Files were modified but the 'modify' time did not change."); return; } - withSpinner, any>({ + withSpinner>>({ text: "Rebuilding", successText: generate.successText, failureText: () => "sitegen FAIL", @@ -39,6 +66,20 @@ export async function main() { if (!incr.invals.has(relative)) watch.remove(file); } return result; + }).then((result) => { + // Restart the server if it was changed or not running. + if ( + !subprocess || + result.inserted.some(({ kind }) => kind === "backendReplace") + ) { + serve(); + } else if ( + subprocess && + result.inserted.some(({ kind }) => kind === "asset") + ) { + subprocess.send({ type: "clover.assets.reload" }); + } + return result; }).catch((err) => { console.error(util.inspect(err)); }).finally(statusLine); @@ -142,10 +183,10 @@ class Watch { } } -import { Incremental } from "./incremental.ts"; import * as fs from "node:fs"; -import { Spinner, withSpinner } from "@paperclover/console/Spinner"; +import { withSpinner } from "@paperclover/console/Spinner"; import * as generate from "./generate.ts"; import * as path from "node:path"; import * as util from "node:util"; import * as hot from "./hot.ts"; +import * as child_process from "node:child_process";