// Incremental contains multiple maps for the different kinds // of Artifact, which contain a list of source files which // were used to produce it. When files change, Incremental sees // that the `mtime` is newer, and purges the referenced artifacts. type SourceId = string; // relative to project root, e.g. 'src/global.css' type ArtifactId = string; // `${ArtifactType}\0${string}` type Sha1Id = string; // Sha1 hex string // -- artifact types -- interface ArtifactMap { /* An asset (serve with "#sitegen/asset" */ asset: Asset; /* The bundled text of a '.client.ts' script */ // TODO: track imports this has into `asset` script: string; /* The bundled style tag contents. Keyed by 'css.styleKey' */ style: string; /* Metadata about a static page */ pageMetadata: PageMetadata; /* Metadata about a dynamic view */ viewMetadata: ViewMetadata; } type ArtifactKind = keyof ArtifactMap; export interface Asset { buffer: Buffer; headers: Record; hash: string; } /** * This interface intentionally omits the *contents* * of its scripts and styles for fine-grained rebuilds. */ export interface PageMetadata { html: string; meta: string; cssImports: string[]; theme: css.Theme; clientRefs: string[]; } /** * Like a page, this intentionally omits resources, * but additionally omits the bundled server code. */ export interface ViewMetadata { file: string; // staticMeta: string | null; TODO cssImports: string[]; theme: css.Theme; clientRefs: string[]; hasLayout: boolean; } // -- incremental support types -- export interface PutBase { sources: SourceId[]; key: string; } export interface Put extends PutBase { kind: T; value: ArtifactMap[T]; } export interface Invalidations { lastModified: number; outputs: Set; files: Set; } export class Incremental { /** The generated artifacts */ out: { [K in keyof ArtifactMap]: Map; } = { asset: new Map(), script: new Map(), style: new Map(), pageMetadata: new Map(), viewMetadata: new Map(), }; /** Tracking filesystem entries to `srcId` */ invals = new Map(); /** Tracking output keys to files */ sources = new Map(); /** Compressed resources */ compress = new Map(); compressQueue = new Queue({ name: "Compress", maxJobs: 5, fn: this.compressImpl.bind(this), passive: true, getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, }); getArtifact(kind: T, key: string) { return this.out[kind].get(key); } hasArtifact(kind: T, key: string) { return this.out[kind].has(key); } sourcesFor(kind: ArtifactKind, key: string) { return UNWRAP(this.sources.get(kind + "\0" + key)); } /* * Put built artifacts into the incremental cache. The entry points * used to build this must be provided. 'Incremental' will trace JS * imports and file modification times tracked by 'hot.ts'. */ put({ sources, kind, key, value, }: Put) { console.log("put " + kind + ": " + key); this.out[kind].set(key, value); // Update sources information ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key); sources = sources.map((src) => path.normalize(src)); const fullKey = `${kind}\0${key}`; const prevSources = this.sources.get(fullKey); const newSources = new Set( sources.map((file) => path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file ), ); this.sources.set(fullKey, [...newSources]); for (const source of prevSources ?? []) { if (sources.includes(source)) { newSources.delete(source); continue; } const invals = UNWRAP(this.invals.get(source)); ASSERT(invals.outputs.has(fullKey)); invals.outputs.delete(fullKey); } // Use reflection from the plugin system to get imports. for (const source of newSources) { const invals = this.#getOrInitInvals(source); invals.outputs.add(fullKey); this.#followImports(source); } } // TODO: this doesnt remove stuff when it disappeary #getOrInitInvals(source: string) { let invals = this.invals.get(source); if (!invals) { const g = hot.getFsGraph().get(source); const lastModified = g?.lastModified ?? fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs; this.invals.set( source, invals = { lastModified, files: new Set(), outputs: new Set(), }, ); } return invals; } #followImports(file: string) { const graph = hot.getFsGraph(); const stat = graph.get(file); if (!stat) return; for (const i of stat.imports) { const invals = this.#getOrInitInvals(i); invals.files.add(file); this.#followImports(i); } } async statAllFiles() { for (const file of this.invals.keys()) { const mtime = fs.statSync(file).mtimeMs; this.updateStat(file, mtime); } } updateStat(file: string, newLastModified: number) { file = path.relative(hot.projectRoot, file); const stat = this.invals.get(file); ASSERT(stat, "Updated stat on untracked file " + file); const hasUpdate = stat.lastModified < newLastModified; if (hasUpdate) { // Invalidate console.info(file + " updated"); const invalidQueue = [file]; let currentInvalid; while (currentInvalid = invalidQueue.pop()) { const invalidations = this.invals.get(currentInvalid); ASSERT( invalidations, "No reason to track file '" + currentInvalid + "' if it has no invalidations", ); const { files, outputs } = invalidations; for (const out of outputs) { const [kind, artifactKey] = out.split("\0"); this.out[kind as ArtifactKind].delete(artifactKey); console.log("stale " + kind + ": " + artifactKey); } invalidQueue.push(...files); } } stat.lastModified = newLastModified; return hasUpdate; } async putAsset(info: PutAsset) { const { body, headers, key } = info; const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body); const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer)) .toString("hex"); const value: Asset = { buffer, headers: { "Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key), "ETag": JSON.stringify(hash), ...headers, }, hash, }; const a = this.put({ ...info, kind: "asset", value }); if (!this.compress.has(hash)) { const label = info.key; this.compress.set(hash, { zstd: undefined, gzip: undefined, }); this.compressQueue.add({ label, buffer, algo: "zstd", hash }); this.compressQueue.add({ label, buffer, algo: "gzip", hash }); } return a; } async compressImpl({ algo, buffer, hash }: CompressJob) { let out; switch (algo) { case "zstd": out = await zstd(buffer); break; case "gzip": out = await gzip(buffer, { level: 9 }); break; } let entry = this.compress.get(hash); if (!entry) { this.compress.set( hash, entry = { zstd: undefined, gzip: undefined, }, ); } entry![algo] = out; } serialize() { const writer = new BufferWriter(); const asset = Array.from( this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; return [key, { raw, gzip, zstd, hash, headers: headers as Record, }] satisfies SerializedMeta["asset"][0]; }, ); const script = Array.from(this.out.script); const invals = Array.from(this.invals, ([key, value]) => { const { lastModified, files, outputs } = value; return [key, { m: lastModified, f: [...files], o: [...outputs], }] satisfies SerializedMeta["invals"][0]; }); const sources = Array.from(this.sources, ([key, value]) => { return [key, ...value] as [string, ...string[]]; }); const json = { asset, script, invals, sources, } satisfies SerializedMeta; const meta = Buffer.from(JSON.stringify(json), "utf-8"); const lengthBuffer = Buffer.alloc(4); lengthBuffer.writeUInt32LE(meta.byteLength, 0); return Buffer.concat([lengthBuffer, meta, ...writer.buffers]); } static fromSerialized(buffer: Buffer): Incremental { const metaLength = buffer.readUint32LE(0); const meta: SerializedMeta = JSON.parse( buffer.subarray(4, 4 + metaLength).toString("utf8"), ); const view = ([start, end]: View) => buffer.subarray(4 + metaLength + start, 4 + metaLength + end); const incr = new Incremental(); incr.out.asset = new Map(meta.asset.map(([key, value]) => { const { hash, raw, gzip, zstd, headers } = value; if ((gzip || zstd) && !incr.compress.has(hash)) { incr.compress.set(hash, { gzip: gzip ? view(gzip) : undefined, zstd: zstd ? view(zstd) : undefined, }); } return [key, { buffer: view(raw), headers: headers, hash: hash, }]; })); incr.out.script = new Map(meta.script); incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => { return [key, { lastModified: m, files: new Set(f), outputs: new Set(o), }]; })); incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value])); return incr; } toDisk(file = ".clover/incr.state") { const buffer = this.serialize(); fs.writeFileSync(file, buffer); } static fromDisk(file = ".clover/incr.state"): Incremental { try { const buffer = fs.readFileSync(file); return Incremental.fromSerialized(buffer); } catch (err: any) { if (err.code === "ENOENT") return new Incremental(); throw err; } } async wait() { await this.compressQueue.done({ method: "success" }); } async flush() { ASSERT(!this.compressQueue.active); const writer = new BufferWriter(); // TODO: ensure all assets are actually compressed and not fake lying. const asset = Object.fromEntries( Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null; const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null; return [key, { raw, gzip, zstd, headers }]; }), ); await Promise.all([ fs.writeFile(".clover/static.json", JSON.stringify(asset)), fs.writeFile(".clover/static.blob", writer.get()), ]); } } export interface PutAsset extends PutBase { body: string | Buffer; headers?: Record; } export interface Compressed { gzip?: Buffer; zstd?: Buffer; } export interface CompressJob { algo: "zstd" | "gzip"; buffer: Buffer; label: string; hash: string; } class BufferWriter { size = 0; seen = new Map(); buffers: Buffer[] = []; write(buffer: Buffer, hash: string): View { let view = this.seen.get(hash); if (view) return view; view = [this.size, this.size += buffer.byteLength]; this.seen.set(hash, view); this.buffers.push(buffer); return view; } get() { return Buffer.concat(this.buffers); } } export type View = [start: number, end: number]; export interface BuiltAssetMap { [route: string]: BuiltAsset; } export interface BuiltAsset { raw: View; gzip: View; zstd: View; headers: Record; } export interface SerializedMeta { asset: Array<[route: string, data: { raw: View; gzip: View | null; zstd: View | null; hash: string; headers: Record; }]>; script: Array<[key: string, value: string]>; invals: Array<[key: string, { /** Modified */ m: number; f: SourceId[]; o: ArtifactId[]; }]>; sources: Array<[string, ...string[]]>; } const gzip = util.promisify(zlib.gzip); const zstd = util.promisify(zlib.zstdCompress); import * as fs from "#sitegen/fs"; import * as zlib from "node:zlib"; import * as util from "node:util"; import { Queue } from "./queue.ts"; import * as hot from "./hot.ts"; import * as mime from "#sitegen/mime"; import * as path from "node:path"; import { Buffer } from "node:buffer"; import * as css from "./css.ts";