it was weird. i pressed delete on a subfolder, i think one of the pages.off folders that i was using. and then, suddenly, nvim on windows 7 decided to delete every file in the directory. they weren't shred off the space time continuum, but just marked deleted. i had to pay $80 to get access to a software that could see them. bleh! just seeing all my work, a little over a week, was pretty heart shattering. but i remembered that long ago, a close friend said i could call them whenever i was feeling sad. i finally took them up on that offer. the first time i've ever called someone for emotional support. but it's ok. i got it back. and the site framework is better than ever. i'm gonna commit and push more often. the repo is private anyways.
265 lines
6.4 KiB
TypeScript
265 lines
6.4 KiB
TypeScript
// `Incremental` contains multiple maps for the different parts of a site
|
|
// build, and tracks reused items across builds. It also handles emitting and
|
|
// updating the built site. This structure is self contained and serializable.
|
|
//
|
|
// Tracking is simple: Files map to one or more 'source IDs', which map to one
|
|
// or more 'artifact'. This two layer approach allows many files (say a page +
|
|
// all its imports) to map to the build of a page, which produces an HTML file
|
|
// plus a list of scripts.
|
|
|
|
import { Buffer } from "node:buffer";
|
|
interface ArtifactMap {
|
|
asset: Asset;
|
|
script: string;
|
|
}
|
|
type AllArtifactMaps = {
|
|
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
|
};
|
|
type ArtifactType = keyof ArtifactMap;
|
|
|
|
interface Asset {
|
|
buffer: Buffer;
|
|
headers: Record<string, string | undefined>;
|
|
hash: string;
|
|
}
|
|
|
|
export interface PutBase {
|
|
srcTag?: string; // deprecated
|
|
srcId: string;
|
|
key: string;
|
|
}
|
|
|
|
export interface Put<T extends ArtifactType> extends PutBase {
|
|
type: T;
|
|
value: ArtifactMap[T];
|
|
}
|
|
|
|
export interface Output {
|
|
type: ArtifactType;
|
|
key: string;
|
|
}
|
|
|
|
const gzip = util.promisify(zlib.gzip);
|
|
const zstd = util.promisify(zlib.zstdCompress);
|
|
|
|
export class Incremental {
|
|
/** The generated artifacts */
|
|
out: AllArtifactMaps = {
|
|
asset: new Map(),
|
|
script: new Map(),
|
|
};
|
|
/** Compressed resources */
|
|
compress = new Map<string, Compressed>();
|
|
compressQueue = new Queue<CompressJob, void>({
|
|
name: "Compress",
|
|
maxJobs: 5,
|
|
fn: this.compressImpl.bind(this),
|
|
passive: true,
|
|
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
|
});
|
|
/** Tracking filesystem entries to `srcId` */
|
|
files = new Map<string, hot.FileStat>();
|
|
srcIds = new Map<string, Output[]>();
|
|
|
|
static fromSerialized() {
|
|
}
|
|
serialize() {
|
|
const writer = new BufferWriter();
|
|
|
|
const asset = Array.from(
|
|
this.out.asset,
|
|
([key, { buffer, hash, headers }]) => {
|
|
const raw = writer.write(buffer, hash);
|
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
|
|
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
|
|
return [key, {
|
|
raw,
|
|
gzip,
|
|
zstd,
|
|
hash,
|
|
headers,
|
|
}];
|
|
},
|
|
);
|
|
const script = Array.from(this.out.script);
|
|
|
|
const meta = Buffer.from(
|
|
JSON.stringify({
|
|
asset,
|
|
script,
|
|
}),
|
|
"utf-8",
|
|
);
|
|
|
|
const lengthBuffer = Buffer.alloc(4);
|
|
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
|
|
|
return Buffer.concat([meta, lengthBuffer, ...writer.buffers]);
|
|
}
|
|
|
|
serializeToDisk(file = ".clover/incr.state") {
|
|
const buffer = this.serialize();
|
|
fs.writeFileSync(file, buffer);
|
|
}
|
|
|
|
put<T extends ArtifactType>({
|
|
srcId,
|
|
type,
|
|
key,
|
|
value,
|
|
}: Put<T>) {
|
|
this.out[type].set(key, value);
|
|
}
|
|
|
|
async putAsset(info: PutAsset) {
|
|
const { body, headers, key } = info;
|
|
const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body);
|
|
const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer))
|
|
.toString("hex");
|
|
const value: Asset = {
|
|
buffer,
|
|
headers: {
|
|
"Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key),
|
|
"ETag": JSON.stringify(hash),
|
|
...headers,
|
|
},
|
|
hash,
|
|
};
|
|
if (!this.compress.has(hash)) {
|
|
const label = info.key;
|
|
this.compress.set(hash, {
|
|
zstd: undefined,
|
|
gzip: undefined,
|
|
});
|
|
await Promise.all([
|
|
this.compressQueue.add({ label, buffer, algo: "zstd", hash }),
|
|
this.compressQueue.add({ label, buffer, algo: "gzip", hash }),
|
|
]);
|
|
}
|
|
return this.put({ ...info, type: "asset", value });
|
|
}
|
|
|
|
async compressImpl({ algo, buffer, hash }: CompressJob) {
|
|
let out;
|
|
switch (algo) {
|
|
case "zstd":
|
|
out = await zstd(buffer);
|
|
break;
|
|
case "gzip":
|
|
out = await gzip(buffer, { level: 9 });
|
|
break;
|
|
}
|
|
let entry = this.compress.get(hash);
|
|
if (!entry) {
|
|
this.compress.set(
|
|
hash,
|
|
entry = {
|
|
zstd: undefined,
|
|
gzip: undefined,
|
|
},
|
|
);
|
|
}
|
|
entry![algo] = out;
|
|
}
|
|
|
|
invalidate(srcId: string) {
|
|
}
|
|
|
|
async wait() {
|
|
await this.compressQueue.done({ method: "stop" });
|
|
}
|
|
async flush() {
|
|
const writer = new BufferWriter();
|
|
const asset = Object.fromEntries(
|
|
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
|
const raw = writer.write(buffer, hash);
|
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
|
|
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
|
|
return [key, {
|
|
raw,
|
|
gzip,
|
|
zstd,
|
|
headers,
|
|
}];
|
|
}),
|
|
);
|
|
await Promise.all([
|
|
fs.writeFile(".clover/static.json", JSON.stringify(asset)),
|
|
fs.writeFile(".clover/static.blob", writer.get()),
|
|
]);
|
|
}
|
|
}
|
|
|
|
export interface PutAsset extends PutBase {
|
|
body: string | Buffer;
|
|
headers?: Record<string, string | undefined>;
|
|
}
|
|
|
|
export interface Compressed {
|
|
gzip?: Buffer;
|
|
zstd?: Buffer;
|
|
}
|
|
|
|
export interface CompressJob {
|
|
algo: "zstd" | "gzip";
|
|
buffer: Buffer;
|
|
label: string;
|
|
hash: string;
|
|
}
|
|
|
|
class BufferWriter {
|
|
size = 0;
|
|
seen = new Map<string, View>();
|
|
buffers: Buffer[] = [];
|
|
|
|
write(buffer: Buffer, hash: string): View {
|
|
let view = this.seen.get(hash);
|
|
if (view) return view;
|
|
view = [this.size, this.size += buffer.byteLength];
|
|
this.seen.set(hash, view);
|
|
this.buffers.push(buffer);
|
|
return view;
|
|
}
|
|
|
|
get() {
|
|
return Buffer.concat(this.buffers);
|
|
}
|
|
}
|
|
|
|
export type View = [start: number, end: number];
|
|
|
|
// Alongside this type is a byte buffer, containing all the assets.
|
|
export interface BuiltAssetMap {
|
|
[route: string]: BuiltAsset;
|
|
}
|
|
export interface BuiltAsset {
|
|
raw: View;
|
|
gzip: View;
|
|
zstd: View;
|
|
headers: Record<string, string>;
|
|
}
|
|
|
|
export interface SerializedMeta {
|
|
asset: Array<[route: string, data: {
|
|
raw: View;
|
|
gzip: View | null;
|
|
zstd: View | null;
|
|
hash: string;
|
|
headers: Record<string, string>;
|
|
}]>;
|
|
script: [key: string, value: string][];
|
|
}
|
|
|
|
function never(): never {
|
|
throw new Error("Impossible");
|
|
}
|
|
|
|
import * as path from "node:path";
|
|
import * as fs from "./fs.ts";
|
|
import * as zlib from "node:zlib";
|
|
import * as util from "node:util";
|
|
import { Queue } from "./queue.ts";
|
|
import * as hot from "./hot.ts";
|
|
import * as mime from "./mime.ts";
|