661 lines
19 KiB
TypeScript
661 lines
19 KiB
TypeScript
// Incremental contains multiple maps for the different kinds
|
|
// of Artifact, which contain a list of source files which
|
|
// were used to produce it. When files change, Incremental sees
|
|
// that the `mtime` is newer, and purges the referenced artifacts.
|
|
|
|
type SourceId = string; // relative to project root, e.g. 'src/global.css'
|
|
type ArtifactId = string; // `${ArtifactType}\0${string}`
|
|
type Sha1Id = string; // Sha1 hex string
|
|
|
|
// -- artifact types --
|
|
interface ArtifactMap {
|
|
/* An asset (serve with "#sitegen/asset" */
|
|
asset: Asset;
|
|
/* The bundled text of a '.client.ts' script */
|
|
// TODO: track imports this has into `asset`
|
|
script: string;
|
|
/* The bundled style tag contents. Keyed by 'css.styleKey' */
|
|
style: string;
|
|
/* Metadata about a static page */
|
|
pageMetadata: PageMetadata;
|
|
/* Metadata about a dynamic view */
|
|
viewMetadata: ViewMetadata;
|
|
/* Cached '.marko' server compilation */
|
|
serverMarko: hot.MarkoCacheEntry;
|
|
/* Backend source code, pre-replacement. Keyed by platform type. */
|
|
backendBundle: BackendBundle;
|
|
/* One file in the backend receives post-processing. */
|
|
backendReplace: Buffer;
|
|
}
|
|
type ArtifactKind = keyof ArtifactMap;
|
|
/* Automatic path tracing is performed to make it so that
|
|
* specifying 'sources: [file]' refers to it and everything it imports.
|
|
* These kinds do not have that behavior
|
|
*/
|
|
const exactDependencyKinds = ["serverMarko"];
|
|
export interface Asset {
|
|
buffer: Buffer;
|
|
headers: Record<string, string | undefined>;
|
|
hash: string;
|
|
}
|
|
/**
|
|
* This interface intentionally omits the *contents*
|
|
* of its scripts and styles for fine-grained rebuilds.
|
|
*/
|
|
export interface PageMetadata {
|
|
html: string;
|
|
meta: string;
|
|
cssImports: string[];
|
|
theme: css.Theme;
|
|
clientRefs: string[];
|
|
}
|
|
/**
|
|
* Like a page, this intentionally omits resources,
|
|
* but additionally omits the bundled server code.
|
|
*/
|
|
export interface ViewMetadata {
|
|
file: string;
|
|
// staticMeta: string | null; TODO
|
|
cssImports: string[];
|
|
theme: css.Theme;
|
|
clientRefs: string[];
|
|
hasLayout: boolean;
|
|
}
|
|
export interface BackendBundle {
|
|
magicWord: string;
|
|
fileWithMagicWord: string | null;
|
|
files: Record<string, Buffer>;
|
|
}
|
|
|
|
// -- incremental support types --
|
|
export interface PutBase {
|
|
sources: SourceId[];
|
|
key: string;
|
|
}
|
|
export interface Put<T extends ArtifactKind> extends PutBase {
|
|
kind: T;
|
|
value: ArtifactMap[T];
|
|
}
|
|
export interface Invalidations {
|
|
lastModified: number;
|
|
outputs: Set<ArtifactId>;
|
|
files: Set<SourceId>;
|
|
}
|
|
|
|
export class Incremental {
|
|
/** The generated artifacts */
|
|
out: {
|
|
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
|
} = {
|
|
asset: new Map(),
|
|
script: new Map(),
|
|
style: new Map(),
|
|
pageMetadata: new Map(),
|
|
viewMetadata: new Map(),
|
|
serverMarko: new Map(),
|
|
backendBundle: new Map(),
|
|
backendReplace: new Map(),
|
|
};
|
|
/** Tracking filesystem entries to `srcId` */
|
|
invals = new Map<SourceId, Invalidations>();
|
|
/** Tracking output keys to files */
|
|
sources = new Map<ArtifactId, SourceId[]>();
|
|
|
|
/** Compressed resources */
|
|
compress = new Map<Sha1Id, Compressed>();
|
|
compressQueue = new Queue<CompressJob, void>({
|
|
name: "Compress",
|
|
maxJobs: 5,
|
|
fn: this.compressImpl.bind(this),
|
|
passive: true,
|
|
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
|
});
|
|
|
|
/** Reset at the end of each update */
|
|
round = {
|
|
inserted: new Set<ArtifactId>(),
|
|
referenced: new Set<ArtifactId>(),
|
|
};
|
|
|
|
getArtifact<T extends ArtifactKind>(kind: T, key: string) {
|
|
this.round.referenced.add(`${kind}\0${key}`);
|
|
return this.out[kind].get(key);
|
|
}
|
|
|
|
hasArtifact(kind: ArtifactKind, key: string) {
|
|
return this.getArtifact(kind, key) != null;
|
|
}
|
|
|
|
sourcesFor(kind: ArtifactKind, key: string) {
|
|
return UNWRAP(
|
|
this.sources.get(kind + "\0" + key),
|
|
`No artifact '${kind}' '${key}'`,
|
|
);
|
|
}
|
|
|
|
shake() {
|
|
const toPublic = (str: string) => {
|
|
const [kind, key] = str.split("\0");
|
|
return { kind: kind as ArtifactKind, key };
|
|
};
|
|
const inserted = Array.from(this.round.inserted, toPublic);
|
|
const referenced = Array.from(this.round.referenced, toPublic);
|
|
const unreferenced: { kind: ArtifactKind; key: string }[] = [];
|
|
|
|
for (const kind in this.out) {
|
|
const map = this.out[kind as keyof typeof this.out];
|
|
if (!map) continue;
|
|
for (const key of map.keys()) {
|
|
if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
|
unreferenced.push({ kind: kind as ArtifactKind, key });
|
|
this.out[kind as ArtifactKind].delete(key);
|
|
}
|
|
}
|
|
}
|
|
|
|
this.round.inserted.clear();
|
|
this.round.referenced.clear();
|
|
|
|
return { inserted, referenced, unreferenced };
|
|
}
|
|
|
|
/*
|
|
* Put built artifacts into the incremental cache. The entry points
|
|
* used to build this must be provided. 'Incremental' will trace JS
|
|
* imports and file modification times tracked by 'hot.ts'.
|
|
*/
|
|
put<T extends ArtifactKind>({
|
|
sources,
|
|
kind,
|
|
key,
|
|
value,
|
|
}: Put<T>) {
|
|
// These three invariants affect incremental accuracy.
|
|
if (this.round.inserted.has(`${kind}\0${key}`)) {
|
|
console.error(
|
|
`Artifact ${kind}:${key} was inserted multiple times in the same round!`,
|
|
);
|
|
} else if (!this.round.referenced.has(`${kind}\0${key}`)) {
|
|
console.error(
|
|
`Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`,
|
|
);
|
|
} else if (this.out[kind].has(key)) {
|
|
console.error(
|
|
`Artifact ${kind}:${key} is not stale, but overwritten.`,
|
|
);
|
|
}
|
|
|
|
this.out[kind].set(key, value);
|
|
|
|
this.round.inserted.add(`${kind}\0${key}`);
|
|
|
|
// Update sources information
|
|
ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key);
|
|
sources = sources.map((src) => path.normalize(src));
|
|
const fullKey = `${kind}\0${key}`;
|
|
const prevSources = this.sources.get(fullKey);
|
|
const newSources = new Set(
|
|
sources.map((file) =>
|
|
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
|
),
|
|
);
|
|
this.sources.set(fullKey, [...newSources]);
|
|
for (const source of prevSources ?? []) {
|
|
if (sources.includes(source)) {
|
|
newSources.delete(source);
|
|
continue;
|
|
}
|
|
const invals = UNWRAP(this.invals.get(source));
|
|
ASSERT(invals.outputs.has(fullKey));
|
|
invals.outputs.delete(fullKey);
|
|
}
|
|
// Use reflection from the plugin system to get imports.
|
|
for (const source of newSources) {
|
|
const invals = this.#getOrInitInvals(source);
|
|
invals.outputs.add(fullKey);
|
|
this.#followImports(source);
|
|
}
|
|
}
|
|
|
|
// TODO: this doesnt remove stuff when it disappeary
|
|
#getOrInitInvals(source: string) {
|
|
let invals = this.invals.get(source);
|
|
if (!invals) {
|
|
const lastModified = hot.getFileStat(source)?.lastModified ??
|
|
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
|
|
this.invals.set(
|
|
source,
|
|
invals = {
|
|
lastModified,
|
|
files: new Set(),
|
|
outputs: new Set(),
|
|
},
|
|
);
|
|
}
|
|
return invals;
|
|
}
|
|
|
|
#followImports(file: string) {
|
|
const stat = hot.getFileStat(file);
|
|
if (!stat) return;
|
|
for (const i of stat.imports) {
|
|
const invals = this.#getOrInitInvals(i);
|
|
invals.files.add(file);
|
|
this.#followImports(i);
|
|
}
|
|
}
|
|
|
|
async statAllFiles() {
|
|
for (const file of this.invals.keys()) {
|
|
try {
|
|
const mtime = fs.statSync(file).mtimeMs;
|
|
this.updateStat(file, mtime);
|
|
} catch (err) {
|
|
}
|
|
}
|
|
}
|
|
|
|
updateStat(file: string, newLastModified: number | null) {
|
|
file = path.relative(hot.projectRoot, file);
|
|
const stat = this.invals.get(file);
|
|
ASSERT(stat, "Updated stat on untracked file " + file);
|
|
const hasUpdate = !newLastModified || stat.lastModified < newLastModified;
|
|
if (hasUpdate) {
|
|
// Invalidate
|
|
console.info(file + " " + (newLastModified ? "updated" : "deleted"));
|
|
hot.unload(file);
|
|
const invalidQueue = [file];
|
|
let currentInvalid;
|
|
while (currentInvalid = invalidQueue.pop()) {
|
|
const invalidations = this.invals.get(currentInvalid);
|
|
ASSERT(
|
|
invalidations,
|
|
"No reason to track file '" + currentInvalid +
|
|
"' if it has no invalidations",
|
|
);
|
|
const { files, outputs } = invalidations;
|
|
for (const out of outputs) {
|
|
const [kind, artifactKey] = out.split("\0");
|
|
this.out[kind as ArtifactKind].delete(artifactKey);
|
|
}
|
|
invalidQueue.push(...files);
|
|
}
|
|
}
|
|
if (newLastModified) {
|
|
stat.lastModified = newLastModified;
|
|
} else {
|
|
this.invals.delete(file);
|
|
}
|
|
return hasUpdate;
|
|
}
|
|
|
|
async putAsset(info: PutAsset) {
|
|
const { body, headers, key } = info;
|
|
const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body);
|
|
const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer))
|
|
.toString("hex");
|
|
const value: Asset = {
|
|
buffer,
|
|
headers: {
|
|
"Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key),
|
|
"ETag": JSON.stringify(hash),
|
|
...headers,
|
|
},
|
|
hash,
|
|
};
|
|
const a = this.put({ ...info, kind: "asset", value });
|
|
if (!this.compress.has(hash)) {
|
|
const label = info.key;
|
|
this.compress.set(hash, {
|
|
zstd: undefined,
|
|
gzip: undefined,
|
|
});
|
|
this.compressQueue.add({ label, buffer, algo: "zstd", hash });
|
|
this.compressQueue.add({ label, buffer, algo: "gzip", hash });
|
|
}
|
|
return a;
|
|
}
|
|
|
|
async compressImpl({ algo, buffer, hash }: CompressJob) {
|
|
let out;
|
|
switch (algo) {
|
|
case "zstd":
|
|
out = await zstd(buffer);
|
|
break;
|
|
case "gzip":
|
|
out = await gzip(buffer, { level: 9 });
|
|
break;
|
|
}
|
|
let entry = this.compress.get(hash);
|
|
if (!entry) {
|
|
this.compress.set(
|
|
hash,
|
|
entry = {
|
|
zstd: undefined,
|
|
gzip: undefined,
|
|
},
|
|
);
|
|
}
|
|
entry![algo] = out;
|
|
}
|
|
|
|
serialize() {
|
|
const writer = new BufferWriter();
|
|
|
|
// -- artifact --
|
|
const asset = Array.from(
|
|
this.out.asset,
|
|
([key, { buffer, hash, headers }]) => {
|
|
const raw = writer.write(buffer, hash);
|
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
|
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
|
return [key, {
|
|
raw,
|
|
gzip,
|
|
zstd,
|
|
hash,
|
|
headers: headers as Record<string, string>,
|
|
}] satisfies SerializedMeta["asset"][0];
|
|
},
|
|
);
|
|
const script = Array.from(this.out.script);
|
|
const style = Array.from(this.out.style);
|
|
const pageMetadata = Array.from(this.out.pageMetadata);
|
|
const viewMetadata = Array.from(this.out.viewMetadata);
|
|
const serverMarko = Array.from(this.out.serverMarko);
|
|
const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => {
|
|
return [k, {
|
|
magicWord: v.magicWord,
|
|
fileWithMagicWord: v.fileWithMagicWord,
|
|
files: Object.entries(v.files).map(
|
|
([file, contents]) => [
|
|
file,
|
|
writer.write(contents, "backendBundle" + k + ":" + file),
|
|
],
|
|
),
|
|
}] satisfies SerializedMeta["backendBundle"][0];
|
|
});
|
|
const backendReplace = Array.from(
|
|
this.out.backendReplace,
|
|
([k, v]) =>
|
|
[
|
|
k,
|
|
writer.write(v, "backendReplace" + k),
|
|
] satisfies SerializedMeta["backendReplace"][0],
|
|
);
|
|
// -- incremental metadata --
|
|
const invals = Array.from(this.invals, ([key, value]) => {
|
|
const { lastModified, files, outputs } = value;
|
|
return [key, {
|
|
m: lastModified,
|
|
f: [...files],
|
|
o: [...outputs],
|
|
}] satisfies SerializedMeta["invals"][0];
|
|
});
|
|
const sources = Array.from(this.sources, ([key, value]) => {
|
|
return [key, ...value] as [string, ...string[]];
|
|
});
|
|
const json = {
|
|
asset,
|
|
script,
|
|
invals,
|
|
sources,
|
|
style,
|
|
pageMetadata,
|
|
viewMetadata,
|
|
serverMarko,
|
|
backendBundle,
|
|
backendReplace,
|
|
} satisfies SerializedMeta;
|
|
const meta = Buffer.from(JSON.stringify(json), "utf-8");
|
|
|
|
const lengthBuffer = Buffer.alloc(4);
|
|
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
|
|
|
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
|
|
}
|
|
|
|
static fromSerialized(buffer: Buffer): Incremental {
|
|
const metaLength = buffer.readUint32LE(0);
|
|
const meta: SerializedMeta = JSON.parse(
|
|
buffer.subarray(4, 4 + metaLength).toString("utf8"),
|
|
);
|
|
const view = ([start, end]: View) =>
|
|
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
|
|
|
|
const incr = new Incremental();
|
|
incr.out = {
|
|
asset: new Map(meta.asset.map(([key, value]) => {
|
|
const { hash, raw, gzip, zstd, headers } = value;
|
|
if ((gzip || zstd) && !incr.compress.has(hash)) {
|
|
incr.compress.set(hash, {
|
|
gzip: gzip ? view(gzip) : undefined,
|
|
zstd: zstd ? view(zstd) : undefined,
|
|
});
|
|
}
|
|
return [key, {
|
|
buffer: view(raw),
|
|
headers: headers,
|
|
hash: hash,
|
|
}];
|
|
})),
|
|
script: new Map(meta.script),
|
|
style: new Map(meta.style),
|
|
pageMetadata: new Map(meta.pageMetadata),
|
|
viewMetadata: new Map(meta.viewMetadata),
|
|
serverMarko: new Map(meta.serverMarko),
|
|
backendBundle: new Map(meta.backendBundle.map(([key, value]) => {
|
|
return [key, {
|
|
magicWord: value.magicWord,
|
|
fileWithMagicWord: value.fileWithMagicWord,
|
|
files: Object.fromEntries(
|
|
value.files.map(([file, contents]) => [file, view(contents)]),
|
|
),
|
|
}];
|
|
})),
|
|
backendReplace: new Map(
|
|
meta.backendReplace.map(([key, contents]) => [key, view(contents)]),
|
|
),
|
|
};
|
|
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
|
|
return [key, {
|
|
lastModified: m,
|
|
files: new Set(f),
|
|
outputs: new Set(o),
|
|
}];
|
|
}));
|
|
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
|
|
return incr;
|
|
}
|
|
|
|
/*
|
|
* Move the cached (server) marko transpilations from this incremental
|
|
* into the running process.
|
|
*/
|
|
loadMarkoCache() {
|
|
hot.markoCache.clear();
|
|
for (const [key, value] of this.out.serverMarko) {
|
|
hot.markoCache.set(path.resolve(hot.projectRoot, key), value);
|
|
}
|
|
}
|
|
|
|
/*
|
|
* Move the cached (server) marko transpilations from this incremental
|
|
* into the running process.
|
|
*/
|
|
snapshotMarkoCache() {
|
|
for (const [file, value] of hot.markoCache) {
|
|
const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/");
|
|
// Only insert if it doesn't exist. Calling 'put' when it
|
|
// already exists would inform the user of extra calls to put.
|
|
if (!this.hasArtifact("serverMarko", key)) {
|
|
this.put({
|
|
kind: "serverMarko",
|
|
sources: [file],
|
|
key,
|
|
value,
|
|
});
|
|
}
|
|
}
|
|
}
|
|
|
|
toDisk(file = ".clover/incr.state") {
|
|
const buffer = this.serialize();
|
|
fs.writeFileSync(file, buffer);
|
|
}
|
|
|
|
static fromDisk(file = ".clover/incr.state"): Incremental {
|
|
try {
|
|
const buffer = fs.readFileSync(file);
|
|
return Incremental.fromSerialized(buffer);
|
|
} catch (err: any) {
|
|
if (err.code === "ENOENT") return new Incremental();
|
|
throw err;
|
|
}
|
|
}
|
|
|
|
async wait() {
|
|
await this.compressQueue.done({ method: "success" });
|
|
}
|
|
|
|
async flush(
|
|
platform: bundle.ServerPlatform,
|
|
dir = path.resolve(".clover/out"),
|
|
) {
|
|
ASSERT(!this.compressQueue.active);
|
|
const join = (...args: string[]) => path.join(dir, ...args);
|
|
const writer = new BufferWriter();
|
|
|
|
// TODO: ensure all compressed got compressed
|
|
|
|
const asset = Object.fromEntries(
|
|
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
|
const raw = writer.write(buffer, hash);
|
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz");
|
|
const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd");
|
|
return [key, { raw, gzip, zstd, headers }];
|
|
}),
|
|
);
|
|
const backendBundle = UNWRAP(this.out.backendBundle.get(platform));
|
|
|
|
// Arrange output files
|
|
const outFiles: Array<[file: string, contents: string | Buffer]> = [
|
|
// Asset manifest
|
|
["static.json", JSON.stringify(asset)],
|
|
["static.blob", writer.get()],
|
|
|
|
// Backend
|
|
...Object.entries(backendBundle.files).map(([subPath, contents]) =>
|
|
[
|
|
subPath,
|
|
subPath === backendBundle.fileWithMagicWord
|
|
? UNWRAP(this.out.backendReplace.get(platform))
|
|
: contents,
|
|
] as [string, Buffer]
|
|
),
|
|
];
|
|
|
|
// TODO: check duplicates
|
|
|
|
// Perform all i/o
|
|
await Promise.all(
|
|
outFiles.map(([subPath, contents]) =>
|
|
fs.writeMkdir(join(subPath), contents, { flush: true })
|
|
),
|
|
);
|
|
}
|
|
}
|
|
|
|
export interface PutAsset extends PutBase {
|
|
body: string | Buffer;
|
|
headers?: Record<string, string | undefined>;
|
|
}
|
|
|
|
export interface Compressed {
|
|
gzip?: Buffer;
|
|
zstd?: Buffer;
|
|
}
|
|
|
|
export interface CompressJob {
|
|
algo: "zstd" | "gzip";
|
|
buffer: Buffer;
|
|
label: string;
|
|
hash: string;
|
|
}
|
|
|
|
class BufferWriter {
|
|
size = 0;
|
|
seen = new Map<string, View>();
|
|
buffers: Buffer[] = [];
|
|
|
|
write(buffer: Buffer, hash: string): View {
|
|
let view = this.seen.get(hash);
|
|
if (view) return view;
|
|
view = [this.size, this.size += buffer.byteLength];
|
|
this.seen.set(hash, view);
|
|
this.buffers.push(buffer);
|
|
return view;
|
|
}
|
|
|
|
get() {
|
|
return Buffer.concat(this.buffers);
|
|
}
|
|
}
|
|
|
|
export type View = [start: number, end: number];
|
|
|
|
export interface BuiltAssetMap {
|
|
[route: string]: BuiltAsset;
|
|
}
|
|
|
|
export interface BuiltAsset {
|
|
raw: View;
|
|
gzip: View;
|
|
zstd: View;
|
|
headers: Record<string, string>;
|
|
}
|
|
|
|
export interface SerializedMeta {
|
|
asset: Array<[route: string, data: {
|
|
raw: View;
|
|
gzip: View | null;
|
|
zstd: View | null;
|
|
hash: string;
|
|
headers: Record<string, string>;
|
|
}]>;
|
|
script: Array<[key: string, value: string]>;
|
|
style: Array<[key: string, value: string]>;
|
|
pageMetadata: Array<[key: string, PageMetadata]>;
|
|
viewMetadata: Array<[key: string, ViewMetadata]>;
|
|
serverMarko: Array<[key: string, hot.MarkoCacheEntry]>;
|
|
backendBundle: Array<[platform: string, {
|
|
magicWord: string;
|
|
fileWithMagicWord: string | null;
|
|
files: Array<[string, View]>;
|
|
}]>;
|
|
backendReplace: Array<[key: string, View]>;
|
|
|
|
invals: Array<[key: string, {
|
|
/** Modified */
|
|
m: number;
|
|
f: SourceId[];
|
|
o: ArtifactId[];
|
|
}]>;
|
|
sources: Array<[string, ...string[]]>;
|
|
}
|
|
|
|
const gzip = util.promisify(zlib.gzip);
|
|
const zstd = util.promisify(zlib.zstdCompress);
|
|
|
|
import * as fs from "#sitegen/fs";
|
|
import * as zlib from "node:zlib";
|
|
import * as util from "node:util";
|
|
import { Queue } from "#sitegen/async";
|
|
import * as hot from "./hot.ts";
|
|
import * as mime from "#sitegen/mime";
|
|
import * as path from "node:path";
|
|
import { Buffer } from "node:buffer";
|
|
import * as css from "./css.ts";
|
|
import type * as bundle from "./bundle.ts";
|