diff --git a/framework/bundle.ts b/framework/bundle.ts index 300d3d2..8bc4295 100644 --- a/framework/bundle.ts +++ b/framework/bundle.ts @@ -38,6 +38,7 @@ export async function bundleClientJavaScript( plugins, splitting: true, write: false, + metafile: true, }); if (bundle.errors.length || bundle.warnings.length) { throw new AggregateError( @@ -45,29 +46,34 @@ export async function bundleClientJavaScript( "JS bundle failed", ); } - incr.invalidate("bundle-script"); const publicScriptRoutes = extraPublicScripts.map((file) => path.basename(file).replace(/\.client\.[tj]sx?/, "") ); + const { metafile } = bundle; + console.log(metafile); const promises: Promise[] = []; // TODO: add a shared build hash to entrypoints, derived from all the chunk hashes. for (const file of bundle.outputFiles) { + const { text } = file; let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); - const text = file.text; + const { inputs } = UNWRAP(metafile.outputs["out!" + route]); + const sources = Object.keys(inputs); + // Register non-chunks as script entries. const chunk = route.startsWith("/js/c."); if (!chunk) { route = route.replace(".client.js", ".js"); incr.put({ - srcId: "bundle-script", + sources, type: "script", key: route.slice("/js/".length, -".js".length), value: text, }); } + // Register chunks and public scripts as assets. if (chunk || publicScriptRoutes.includes(route)) { promises.push(incr.putAsset({ - srcId: "bundle-script", + sources, key: route, body: text, })); diff --git a/framework/css.ts b/framework/css.ts index bc7ba8f..eda4d32 100644 --- a/framework/css.ts +++ b/framework/css.ts @@ -34,11 +34,19 @@ export function preprocess(css: string, theme: Theme): string { ); } +export interface Output { + text: string; + sources: string[]; +} + export async function bundleCssFiles( cssImports: string[], theme: Theme, dev: boolean = false, -): Promise { +): Promise { + cssImports = cssImports.map((file) => + path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file + ); const plugin = { name: "clover", setup(b) { @@ -68,13 +76,14 @@ export async function bundleCssFiles( const build = await esbuild.build({ bundle: true, entryPoints: ["$input$"], - write: false, - external: ["*.woff2"], - target: ["ie11"], - plugins: [plugin], + external: ["*.woff2", "*.ttf", "*.png", "*.jpeg"], + metafile: true, minify: !dev, + plugins: [plugin], + target: ["ie11"], + write: false, }); - const { errors, warnings, outputFiles } = build; + const { errors, warnings, outputFiles, metafile } = build; if (errors.length > 0) { throw new AggregateError(errors, "CSS Build Failed"); } @@ -82,8 +91,15 @@ export async function bundleCssFiles( throw new AggregateError(warnings, "CSS Build Failed"); } if (outputFiles.length > 1) throw new Error("Too many output files"); - return outputFiles[0].text; + return { + text: outputFiles[0].text, + sources: Object.keys(metafile.outputs["$input$.css"].inputs) + .filter((x) => x !== "input:."), + }; } import * as esbuild from "esbuild"; import * as fs from "#sitegen/fs"; +import * as hot from "./hot.ts"; +import * as path from "node:path"; +import { Incremental } from "./incremental.ts"; diff --git a/framework/generate.tsx b/framework/generate.tsx index 69825f2..63964b6 100644 --- a/framework/generate.tsx +++ b/framework/generate.tsx @@ -21,7 +21,8 @@ async function sitegen(status: Spinner) { let root = path.resolve(import.meta.dirname, "../src"); const join = (...sub: string[]) => path.join(root, ...sub); - const incr = new Incremental(); + const incr = Incremental.fromDisk(); + await incr.statAllFiles(); // Sitegen reviews every defined section for resources to process const sections: sg.Section[] = @@ -68,20 +69,24 @@ async function sitegen(status: Spinner) { ]; for (const { dir, list, prefix, exclude = [], ext = false } of kinds) { const items = fs.readDirRecOptionalSync(dir); - item: for (const item of items) { - if (item.isDirectory()) continue; + item: for (const subPath of items) { + const file = path.join(dir, subPath); + const stat = fs.statSync(file); + if (stat.isDirectory()) continue; for (const e of exclude) { - if (item.name.endsWith(e)) continue item; + if (subPath.endsWith(e)) continue item; } - const file = path.relative(dir, item.parentPath + "/" + item.name); const trim = ext - ? file - : file.slice(0, -path.extname(file).length).replaceAll(".", "/"); + ? subPath + : subPath.slice(0, -path.extname(subPath).length).replaceAll( + ".", + "/", + ); let id = prefix + trim.replaceAll("\\", "/"); if (prefix === "/" && id.endsWith("/index")) { id = id.slice(0, -"/index".length) || "/"; } - list.push({ id, file: path.join(item.parentPath, item.name) }); + list.push({ id, file: file }); } } } @@ -92,8 +97,8 @@ async function sitegen(status: Spinner) { // -- server side render -- status.text = "Building"; - const cssOnce = new OnceMap(); - const cssQueue = new Queue<[string, string[], css.Theme], string>({ + const cssOnce = new OnceMap(); + const cssQueue = new Queue<[string, string[], css.Theme], css.Output>({ name: "Bundle", fn: ([, files, theme]) => css.bundleCssFiles(files, theme), passive: true, @@ -103,7 +108,7 @@ async function sitegen(status: Spinner) { interface RenderResult { body: string; head: string; - inlineCss: string; + css: css.Output; scriptFiles: string[]; item: FileItem; } @@ -139,11 +144,16 @@ async function sitegen(status: Spinner) { () => cssQueue.add([item.id, cssImports, theme]), ); // -- html -- - const bodyPromise = await ssr.ssrAsync(, { + let page = ; + if (layout?.default) { + const Layout = layout.default; + page = {page}; + } + const bodyPromise = await ssr.ssrAsync(page, { sitegen: sg.initRender(), }); - const [{ text, addon }, inlineCss, renderedMeta] = await Promise.all([ + const [{ text, addon }, cssBundle, renderedMeta] = await Promise.all([ bodyPromise, cssPromise, renderedMetaPromise, @@ -160,13 +170,14 @@ async function sitegen(status: Spinner) { renderResults.push({ body: text, head: renderedMeta, - inlineCss, + css: cssBundle, scriptFiles: Array.from(addon.sitegen.scripts), item: item, }); } // This is done in two passes so that a page that throws during evaluation // will report "Load Render Module" instead of "Render Static Page". + const neededPages = pages.filter((page) => incr.needsBuild("asset", page.id)); const spinnerFormat = status.format; status.format = () => ""; const moduleLoadQueue = new Queue({ @@ -175,7 +186,7 @@ async function sitegen(status: Spinner) { getItemText, maxJobs: 1, }); - moduleLoadQueue.addMany(pages); + moduleLoadQueue.addMany(neededPages); await moduleLoadQueue.done({ method: "stop" }); const pageQueue = new Queue({ name: "Render Static Page", @@ -183,7 +194,7 @@ async function sitegen(status: Spinner) { getItemText, maxJobs: 2, }); - pageQueue.addMany(pages); + pageQueue.addMany(neededPages); await pageQueue.done({ method: "stop" }); status.format = spinnerFormat; @@ -207,7 +218,7 @@ async function sitegen(status: Spinner) { async function doStaticFile(item: FileItem) { const body = await fs.readFile(item.file); await incr.putAsset({ - srcId: "static:" + item.file, + sources: [item.file], key: item.id, body, }); @@ -219,7 +230,9 @@ async function sitegen(status: Spinner) { maxJobs: 16, }); status.format = () => ""; - staticQueue.addMany(staticFiles); + staticQueue.addMany( + staticFiles.filter((file) => incr.needsBuild("asset", file.id)), + ); await staticQueue.done({ method: "stop" }); status.format = spinnerFormat; @@ -227,11 +240,13 @@ async function sitegen(status: Spinner) { status.text = `Concat ${renderResults.length} Pages`; await Promise.all( renderResults.map( - async ({ item: page, body, head, inlineCss, scriptFiles }) => { + async ( + { item: page, body, head, css, scriptFiles }, + ) => { const doc = wrapDocument({ body, head, - inlineCss, + inlineCss: css.text, scripts: scriptFiles.map( (id) => UNWRAP( @@ -242,7 +257,7 @@ async function sitegen(status: Spinner) { ).map((x) => `{${x}}`).join("\n"), }); await incr.putAsset({ - srcId: "page:" + page.file, + sources: [page.file, ...css.sources], key: page.id, body: doc, headers: { @@ -262,7 +277,7 @@ async function sitegen(status: Spinner) { status.format = spinnerFormat; status.text = `Incremental Flush`; incr.flush(); - incr.serializeToDisk(); // Allows picking up this state again + incr.toDisk(); // Allows picking up this state again return { elapsed: (performance.now() - startTime) / 1000 }; } @@ -281,9 +296,11 @@ function wrapDocument({ inlineCss: string; scripts: string; }) { - return `${head}${ + return `${head}${ inlineCss ? `` : "" - }${body}${scripts ? `` : ""}`; + }${body}${ + scripts ? `` : "" + }`; } import { OnceMap, Queue } from "./queue.ts"; diff --git a/framework/hot.ts b/framework/hot.ts index 1f612c4..728845e 100644 --- a/framework/hot.ts +++ b/framework/hot.ts @@ -34,8 +34,6 @@ export interface FileStat { cssImportsRecursive: string[] | null; lastModified: number; imports: string[]; - /* Used by 'incremental.ts' */ - srcIds: string[]; } let fsGraph = new Map(); export function setFsGraph(g: Map) { @@ -60,14 +58,6 @@ Module.prototype._compile = function ( filename: string, format: "module" | "commonjs", ) { - fs.writeMkdirSync( - ".clover/debug-transpilation/" + - path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll( - "../", - "_/", - ).replaceAll("/", "."), - content, - ); const result = ModulePrototypeUnderscoreCompile.call( this, content, @@ -96,7 +86,6 @@ Module.prototype._compile = function ( : null, imports, lastModified: stat.mtimeMs, - srcIds: [], }); } return result; diff --git a/framework/incremental.ts b/framework/incremental.ts index d77f676..196a7a7 100644 --- a/framework/incremental.ts +++ b/framework/incremental.ts @@ -1,54 +1,50 @@ // `Incremental` contains multiple maps for the different parts of a site // build, and tracks reused items across builds. It also handles emitting and // updating the built site. This structure is self contained and serializable. -// -// Tracking is simple: Files map to one or more 'source IDs', which map to one -// or more 'artifact'. This two layer approach allows many files (say a page + -// all its imports) to map to the build of a page, which produces an HTML file -// plus a list of scripts. + +type SourceId = string; // relative to project root, e.g. 'src/global.css' +type ArtifactId = string; // `${ArtifactType}#${string}` +type Sha1Id = string; // Sha1 hex string interface ArtifactMap { asset: Asset; script: string; } -type AllArtifactMaps = { - [K in keyof ArtifactMap]: Map; -}; type ArtifactType = keyof ArtifactMap; - interface Asset { buffer: Buffer; headers: Record; hash: string; } - export interface PutBase { - srcTag?: string; // deprecated - srcId: string; + sources: SourceId[]; key: string; } - export interface Put extends PutBase { type: T; value: ArtifactMap[T]; } - -export interface Output { - type: ArtifactType; - key: string; +export interface Invalidations { + lastModified: number; + outputs: Set; + files: Set; } -const gzip = util.promisify(zlib.gzip); -const zstd = util.promisify(zlib.zstdCompress); - export class Incremental { /** The generated artifacts */ - out: AllArtifactMaps = { + out: { + [K in keyof ArtifactMap]: Map; + } = { asset: new Map(), script: new Map(), }; + /** Tracking filesystem entries to `srcId` */ + invals = new Map(); + /** Tracking output keys to files */ + sources = new Map(); + /** Compressed resources */ - compress = new Map(); + compress = new Map(); compressQueue = new Queue({ name: "Compress", maxJobs: 5, @@ -56,59 +52,114 @@ export class Incremental { passive: true, getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, }); - /** Tracking filesystem entries to `srcId` */ - files = new Map(); - srcIds = new Map(); - static fromSerialized() { - } - serialize() { - const writer = new BufferWriter(); - - const asset = Array.from( - this.out.asset, - ([key, { buffer, hash, headers }]) => { - const raw = writer.write(buffer, hash); - const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; - const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null; - const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null; - return [key, { - raw, - gzip, - zstd, - hash, - headers, - }]; - }, - ); - const script = Array.from(this.out.script); - - const meta = Buffer.from( - JSON.stringify({ - asset, - script, - }), - "utf-8", - ); - - const lengthBuffer = Buffer.alloc(4); - lengthBuffer.writeUInt32LE(meta.byteLength, 0); - - return Buffer.concat([meta, lengthBuffer, ...writer.buffers]); - } - - serializeToDisk(file = ".clover/incr.state") { - const buffer = this.serialize(); - fs.writeFileSync(file, buffer); + /** Invalidation deletes build artifacts so the check is trivial. */ + needsBuild(type: ArtifactType, key: string) { + return !this.out[type].has(key); } + /* + * Put built artifacts into the incremental cache. The entry points + * used to build this must be provided. 'Incremental' will trace JS + * imports and file modification times tracked by 'hot.ts'. + */ put({ - srcId, + sources, type, key, value, }: Put) { this.out[type].set(key, value); + + // Update sources information + ASSERT(sources.length > 0, "Missing sources for " + type + " " + key); + sources = sources.map((src) => path.normalize(src)); + const fullKey = `${type}#${key}`; + const prevSources = this.sources.get(fullKey); + const newSources = new Set( + sources.map((file) => + path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file + ), + ); + this.sources.set(fullKey, [...newSources]); + for (const source of prevSources ?? []) { + if (sources.includes(source)) { + newSources.delete(source); + continue; + } + const invals = UNWRAP(this.invals.get(source)); + ASSERT(invals.outputs.has(fullKey)); + invals.outputs.delete(fullKey); + } + // Use reflection from the plugin system to get imports. + for (const source of newSources) { + const invals = this.#getOrInitInvals(source); + invals.outputs.add(fullKey); + this.#followImports(source); + } + } + + // TODO: this doesnt remove stuff when it disappeary + #getOrInitInvals(source: string) { + let invals = this.invals.get(source); + if (!invals) { + const g = hot.getFsGraph().get(source); + const lastModified = g?.lastModified ?? + fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs; + this.invals.set( + source, + invals = { + lastModified, + files: new Set(), + outputs: new Set(), + }, + ); + } + return invals; + } + + #followImports(file: string) { + const graph = hot.getFsGraph(); + const stat = graph.get(file); + if (!stat) return; + for (const i of stat.imports) { + const invals = this.#getOrInitInvals(i); + invals.files.add(file); + this.#followImports(i); + } + } + + async statAllFiles() { + for (const file of this.invals.keys()) { + const mtime = fs.statSync(file).mtimeMs; + this.updateStat(file, mtime); + } + } + + updateStat(fileKey: string, newLastModified: number) { + const stat = this.invals.get(fileKey); + ASSERT(stat, "Updated stat on untracked file " + fileKey); + if (stat.lastModified < newLastModified) { + // Invalidate + console.log(fileKey + " updated"); + const invalidQueue = [fileKey]; + let currentInvalid; + while (currentInvalid = invalidQueue.pop()) { + const invalidations = this.invals.get(currentInvalid); + ASSERT( + invalidations, + "No reason to track file '" + currentInvalid + + "' if it has no invalidations", + ); + const { files, outputs } = invalidations; + for (const out of outputs) { + const [type, artifactKey] = out.split("#", 2); + this.out[type as ArtifactType].delete(artifactKey); + } + invalidQueue.push(...files); + } + } + stat.lastModified = newLastModified; } async putAsset(info: PutAsset) { @@ -161,7 +212,99 @@ export class Incremental { entry![algo] = out; } - invalidate(srcId: string) { + serialize() { + const writer = new BufferWriter(); + + const asset = Array.from( + this.out.asset, + ([key, { buffer, hash, headers }]) => { + const raw = writer.write(buffer, hash); + const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; + const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; + const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; + return [key, { + raw, + gzip, + zstd, + hash, + headers: headers as Record, + }] satisfies SerializedMeta["asset"][0]; + }, + ); + const script = Array.from(this.out.script); + const invals = Array.from(this.invals, ([key, value]) => { + const { lastModified, files, outputs } = value; + return [key, { + m: lastModified, + f: [...files], + o: [...outputs], + }] satisfies SerializedMeta["invals"][0]; + }); + const sources = Array.from(this.sources, ([key, value]) => { + return [key, ...value] as [string, ...string[]]; + }); + const json = { + asset, + script, + invals, + sources, + } satisfies SerializedMeta; + const meta = Buffer.from(JSON.stringify(json), "utf-8"); + + const lengthBuffer = Buffer.alloc(4); + lengthBuffer.writeUInt32LE(meta.byteLength, 0); + + return Buffer.concat([lengthBuffer, meta, ...writer.buffers]); + } + + static fromSerialized(buffer: Buffer): Incremental { + const metaLength = buffer.readUint32LE(0); + const meta: SerializedMeta = JSON.parse( + buffer.subarray(4, 4 + metaLength).toString("utf8"), + ); + const view = ([start, end]: View) => + buffer.subarray(4 + metaLength + start, 4 + metaLength + end); + + const incr = new Incremental(); + incr.out.asset = new Map(meta.asset.map(([key, value]) => { + const { hash, raw, gzip, zstd, headers } = value; + if ((gzip || zstd) && !incr.compress.has(hash)) { + incr.compress.set(hash, { + gzip: gzip ? view(gzip) : undefined, + zstd: zstd ? view(zstd) : undefined, + }); + } + return [key, { + buffer: view(raw), + headers: headers, + hash: hash, + }]; + })); + incr.out.script = new Map(meta.script); + incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => { + return [key, { + lastModified: m, + files: new Set(f), + outputs: new Set(o), + }]; + })); + incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value])); + return incr; + } + + toDisk(file = ".clover/incr.state") { + const buffer = this.serialize(); + fs.writeFileSync(file, buffer); + } + + static fromDisk(file = ".clover/incr.state"): Incremental { + try { + const buffer = fs.readFileSync(file); + return Incremental.fromSerialized(buffer); + } catch (err: any) { + if (err.code === "ENOENT") return new Incremental(); + throw err; + } } async wait() { @@ -171,18 +314,14 @@ export class Incremental { async flush() { ASSERT(!this.compressQueue.active); const writer = new BufferWriter(); + // TODO: ensure all assets are actually compressed and not fake lying. const asset = Object.fromEntries( Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; - return [key, { - raw, - gzip, - zstd, - headers, - }]; + return [key, { raw, gzip, zstd, headers }]; }), ); await Promise.all([ @@ -230,10 +369,10 @@ class BufferWriter { export type View = [start: number, end: number]; -// Alongside this type is a byte buffer, containing all the assets. export interface BuiltAssetMap { [route: string]: BuiltAsset; } + export interface BuiltAsset { raw: View; gzip: View; @@ -249,13 +388,24 @@ export interface SerializedMeta { hash: string; headers: Record; }]>; - script: [key: string, value: string][]; + script: Array<[key: string, value: string]>; + invals: Array<[key: string, { + /** Modified */ + m: number; + f: SourceId[]; + o: ArtifactId[]; + }]>; + sources: Array<[string, ...string[]]>; } +const gzip = util.promisify(zlib.gzip); +const zstd = util.promisify(zlib.zstdCompress); + import * as fs from "#sitegen/fs"; import * as zlib from "node:zlib"; import * as util from "node:util"; import { Queue } from "./queue.ts"; import * as hot from "./hot.ts"; import * as mime from "#sitegen/mime"; +import * as path from "node:path"; import { Buffer } from "node:buffer"; diff --git a/framework/lib/fs.ts b/framework/lib/fs.ts index 4761f37..632093e 100644 --- a/framework/lib/fs.ts +++ b/framework/lib/fs.ts @@ -34,7 +34,7 @@ export function writeMkdirSync(file: string, contents: Buffer | string) { export function readDirRecOptionalSync(dir: string) { try { - return readdirSync(dir, { recursive: true, withFileTypes: true }); + return readdirSync(dir, { recursive: true, encoding: "utf8" }); } catch (err: any) { if (err.code === "ENOENT") return []; throw err; diff --git a/readme.md b/readme.md index 37bd508..e31405f 100644 --- a/readme.md +++ b/readme.md @@ -33,6 +33,11 @@ Included is `src`, which contains `paperclover.net`. Website highlights: ## Development +minimum system requirements: +- a cpu with at least 1 core. +- random access memory. +- windows 7 or later, macos, or linux operating system. + ``` npm install diff --git a/src/q+a/components/QuestionForm.marko b/src/q+a/components/QuestionForm.marko new file mode 100644 index 0000000..4b1ee59 --- /dev/null +++ b/src/q+a/components/QuestionForm.marko @@ -0,0 +1,13 @@ +
+