// `Incremental` contains multiple maps for the different parts of a site // build, and tracks reused items across builds. It also handles emitting and // updating the built site. This structure is self contained and serializable. // // Tracking is simple: Files map to one or more 'source IDs', which map to one // or more 'artifact'. This two layer approach allows many files (say a page + // all its imports) to map to the build of a page, which produces an HTML file // plus a list of scripts. import { Buffer } from "node:buffer"; interface ArtifactMap { asset: Asset; script: string; } type AllArtifactMaps = { [K in keyof ArtifactMap]: Map; }; type ArtifactType = keyof ArtifactMap; interface Asset { buffer: Buffer; headers: Record; hash: string; } export interface PutBase { srcTag?: string; // deprecated srcId: string; key: string; } export interface Put extends PutBase { type: T; value: ArtifactMap[T]; } export interface Output { type: ArtifactType; key: string; } const gzip = util.promisify(zlib.gzip); const zstd = util.promisify(zlib.zstdCompress); export class Incremental { /** The generated artifacts */ out: AllArtifactMaps = { asset: new Map(), script: new Map(), }; /** Compressed resources */ compress = new Map(); compressQueue = new Queue({ name: "Compress", maxJobs: 5, fn: this.compressImpl.bind(this), passive: true, getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, }); /** Tracking filesystem entries to `srcId` */ files = new Map(); srcIds = new Map(); static fromSerialized() { } serialize() { const writer = new BufferWriter(); const asset = Array.from( this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null; const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null; return [key, { raw, gzip, zstd, hash, headers, }]; }, ); const script = Array.from(this.out.script); const meta = Buffer.from( JSON.stringify({ asset, script, }), "utf-8", ); const lengthBuffer = Buffer.alloc(4); lengthBuffer.writeUInt32LE(meta.byteLength, 0); return Buffer.concat([meta, lengthBuffer, ...writer.buffers]); } serializeToDisk(file = ".clover/incr.state") { const buffer = this.serialize(); fs.writeFileSync(file, buffer); } put({ srcId, type, key, value, }: Put) { this.out[type].set(key, value); } async putAsset(info: PutAsset) { const { body, headers, key } = info; const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body); const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer)) .toString("hex"); const value: Asset = { buffer, headers: { "Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key), "ETag": JSON.stringify(hash), ...headers, }, hash, }; if (!this.compress.has(hash)) { const label = info.key; this.compress.set(hash, { zstd: undefined, gzip: undefined, }); await Promise.all([ this.compressQueue.add({ label, buffer, algo: "zstd", hash }), this.compressQueue.add({ label, buffer, algo: "gzip", hash }), ]); } return this.put({ ...info, type: "asset", value }); } async compressImpl({ algo, buffer, hash }: CompressJob) { let out; switch (algo) { case "zstd": out = await zstd(buffer); break; case "gzip": out = await gzip(buffer, { level: 9 }); break; } let entry = this.compress.get(hash); if (!entry) { this.compress.set( hash, entry = { zstd: undefined, gzip: undefined, }, ); } entry![algo] = out; } invalidate(srcId: string) { } async wait() { await this.compressQueue.done({ method: "stop" }); } async flush() { const writer = new BufferWriter(); const asset = Object.fromEntries( Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { const raw = writer.write(buffer, hash); const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null; const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null; return [key, { raw, gzip, zstd, headers, }]; }), ); await Promise.all([ fs.writeFile(".clover/static.json", JSON.stringify(asset)), fs.writeFile(".clover/static.blob", writer.get()), ]); } } export interface PutAsset extends PutBase { body: string | Buffer; headers?: Record; } export interface Compressed { gzip?: Buffer; zstd?: Buffer; } export interface CompressJob { algo: "zstd" | "gzip"; buffer: Buffer; label: string; hash: string; } class BufferWriter { size = 0; seen = new Map(); buffers: Buffer[] = []; write(buffer: Buffer, hash: string): View { let view = this.seen.get(hash); if (view) return view; view = [this.size, this.size += buffer.byteLength]; this.seen.set(hash, view); this.buffers.push(buffer); return view; } get() { return Buffer.concat(this.buffers); } } export type View = [start: number, end: number]; // Alongside this type is a byte buffer, containing all the assets. export interface BuiltAssetMap { [route: string]: BuiltAsset; } export interface BuiltAsset { raw: View; gzip: View; zstd: View; headers: Record; } export interface SerializedMeta { asset: Array<[route: string, data: { raw: View; gzip: View | null; zstd: View | null; hash: string; headers: Record; }]>; script: [key: string, value: string][]; } function never(): never { throw new Error("Impossible"); } import * as path from "node:path"; import * as fs from "./fs.ts"; import * as zlib from "node:zlib"; import * as util from "node:util"; import { Queue } from "./queue.ts"; import * as hot from "./hot.ts"; import * as mime from "./mime.ts";