incremental generator
This commit is contained in:
parent
92ddecc37e
commit
399ccec226
12 changed files with 435 additions and 337 deletions
|
@ -38,6 +38,7 @@ export async function bundleClientJavaScript(
|
|||
plugins,
|
||||
splitting: true,
|
||||
write: false,
|
||||
metafile: true,
|
||||
});
|
||||
if (bundle.errors.length || bundle.warnings.length) {
|
||||
throw new AggregateError(
|
||||
|
@ -45,29 +46,34 @@ export async function bundleClientJavaScript(
|
|||
"JS bundle failed",
|
||||
);
|
||||
}
|
||||
incr.invalidate("bundle-script");
|
||||
const publicScriptRoutes = extraPublicScripts.map((file) =>
|
||||
path.basename(file).replace(/\.client\.[tj]sx?/, "")
|
||||
);
|
||||
const { metafile } = bundle;
|
||||
console.log(metafile);
|
||||
const promises: Promise<void>[] = [];
|
||||
// TODO: add a shared build hash to entrypoints, derived from all the chunk hashes.
|
||||
for (const file of bundle.outputFiles) {
|
||||
const { text } = file;
|
||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||
const text = file.text;
|
||||
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
|
||||
const sources = Object.keys(inputs);
|
||||
|
||||
// Register non-chunks as script entries.
|
||||
const chunk = route.startsWith("/js/c.");
|
||||
if (!chunk) {
|
||||
route = route.replace(".client.js", ".js");
|
||||
incr.put({
|
||||
srcId: "bundle-script",
|
||||
sources,
|
||||
type: "script",
|
||||
key: route.slice("/js/".length, -".js".length),
|
||||
value: text,
|
||||
});
|
||||
}
|
||||
// Register chunks and public scripts as assets.
|
||||
if (chunk || publicScriptRoutes.includes(route)) {
|
||||
promises.push(incr.putAsset({
|
||||
srcId: "bundle-script",
|
||||
sources,
|
||||
key: route,
|
||||
body: text,
|
||||
}));
|
||||
|
|
|
@ -34,11 +34,19 @@ export function preprocess(css: string, theme: Theme): string {
|
|||
);
|
||||
}
|
||||
|
||||
export interface Output {
|
||||
text: string;
|
||||
sources: string[];
|
||||
}
|
||||
|
||||
export async function bundleCssFiles(
|
||||
cssImports: string[],
|
||||
theme: Theme,
|
||||
dev: boolean = false,
|
||||
): Promise<string> {
|
||||
): Promise<Output> {
|
||||
cssImports = cssImports.map((file) =>
|
||||
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
||||
);
|
||||
const plugin = {
|
||||
name: "clover",
|
||||
setup(b) {
|
||||
|
@ -68,13 +76,14 @@ export async function bundleCssFiles(
|
|||
const build = await esbuild.build({
|
||||
bundle: true,
|
||||
entryPoints: ["$input$"],
|
||||
write: false,
|
||||
external: ["*.woff2"],
|
||||
target: ["ie11"],
|
||||
plugins: [plugin],
|
||||
external: ["*.woff2", "*.ttf", "*.png", "*.jpeg"],
|
||||
metafile: true,
|
||||
minify: !dev,
|
||||
plugins: [plugin],
|
||||
target: ["ie11"],
|
||||
write: false,
|
||||
});
|
||||
const { errors, warnings, outputFiles } = build;
|
||||
const { errors, warnings, outputFiles, metafile } = build;
|
||||
if (errors.length > 0) {
|
||||
throw new AggregateError(errors, "CSS Build Failed");
|
||||
}
|
||||
|
@ -82,8 +91,15 @@ export async function bundleCssFiles(
|
|||
throw new AggregateError(warnings, "CSS Build Failed");
|
||||
}
|
||||
if (outputFiles.length > 1) throw new Error("Too many output files");
|
||||
return outputFiles[0].text;
|
||||
return {
|
||||
text: outputFiles[0].text,
|
||||
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
|
||||
.filter((x) => x !== "input:."),
|
||||
};
|
||||
}
|
||||
|
||||
import * as esbuild from "esbuild";
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as path from "node:path";
|
||||
import { Incremental } from "./incremental.ts";
|
||||
|
|
|
@ -21,7 +21,8 @@ async function sitegen(status: Spinner) {
|
|||
|
||||
let root = path.resolve(import.meta.dirname, "../src");
|
||||
const join = (...sub: string[]) => path.join(root, ...sub);
|
||||
const incr = new Incremental();
|
||||
const incr = Incremental.fromDisk();
|
||||
await incr.statAllFiles();
|
||||
|
||||
// Sitegen reviews every defined section for resources to process
|
||||
const sections: sg.Section[] =
|
||||
|
@ -68,20 +69,24 @@ async function sitegen(status: Spinner) {
|
|||
];
|
||||
for (const { dir, list, prefix, exclude = [], ext = false } of kinds) {
|
||||
const items = fs.readDirRecOptionalSync(dir);
|
||||
item: for (const item of items) {
|
||||
if (item.isDirectory()) continue;
|
||||
item: for (const subPath of items) {
|
||||
const file = path.join(dir, subPath);
|
||||
const stat = fs.statSync(file);
|
||||
if (stat.isDirectory()) continue;
|
||||
for (const e of exclude) {
|
||||
if (item.name.endsWith(e)) continue item;
|
||||
if (subPath.endsWith(e)) continue item;
|
||||
}
|
||||
const file = path.relative(dir, item.parentPath + "/" + item.name);
|
||||
const trim = ext
|
||||
? file
|
||||
: file.slice(0, -path.extname(file).length).replaceAll(".", "/");
|
||||
? subPath
|
||||
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
||||
".",
|
||||
"/",
|
||||
);
|
||||
let id = prefix + trim.replaceAll("\\", "/");
|
||||
if (prefix === "/" && id.endsWith("/index")) {
|
||||
id = id.slice(0, -"/index".length) || "/";
|
||||
}
|
||||
list.push({ id, file: path.join(item.parentPath, item.name) });
|
||||
list.push({ id, file: file });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -92,8 +97,8 @@ async function sitegen(status: Spinner) {
|
|||
|
||||
// -- server side render --
|
||||
status.text = "Building";
|
||||
const cssOnce = new OnceMap<string>();
|
||||
const cssQueue = new Queue<[string, string[], css.Theme], string>({
|
||||
const cssOnce = new OnceMap<css.Output>();
|
||||
const cssQueue = new Queue<[string, string[], css.Theme], css.Output>({
|
||||
name: "Bundle",
|
||||
fn: ([, files, theme]) => css.bundleCssFiles(files, theme),
|
||||
passive: true,
|
||||
|
@ -103,7 +108,7 @@ async function sitegen(status: Spinner) {
|
|||
interface RenderResult {
|
||||
body: string;
|
||||
head: string;
|
||||
inlineCss: string;
|
||||
css: css.Output;
|
||||
scriptFiles: string[];
|
||||
item: FileItem;
|
||||
}
|
||||
|
@ -139,11 +144,16 @@ async function sitegen(status: Spinner) {
|
|||
() => cssQueue.add([item.id, cssImports, theme]),
|
||||
);
|
||||
// -- html --
|
||||
const bodyPromise = await ssr.ssrAsync(<Page />, {
|
||||
let page = <Page />;
|
||||
if (layout?.default) {
|
||||
const Layout = layout.default;
|
||||
page = <Layout>{page}</Layout>;
|
||||
}
|
||||
const bodyPromise = await ssr.ssrAsync(page, {
|
||||
sitegen: sg.initRender(),
|
||||
});
|
||||
|
||||
const [{ text, addon }, inlineCss, renderedMeta] = await Promise.all([
|
||||
const [{ text, addon }, cssBundle, renderedMeta] = await Promise.all([
|
||||
bodyPromise,
|
||||
cssPromise,
|
||||
renderedMetaPromise,
|
||||
|
@ -160,13 +170,14 @@ async function sitegen(status: Spinner) {
|
|||
renderResults.push({
|
||||
body: text,
|
||||
head: renderedMeta,
|
||||
inlineCss,
|
||||
css: cssBundle,
|
||||
scriptFiles: Array.from(addon.sitegen.scripts),
|
||||
item: item,
|
||||
});
|
||||
}
|
||||
// This is done in two passes so that a page that throws during evaluation
|
||||
// will report "Load Render Module" instead of "Render Static Page".
|
||||
const neededPages = pages.filter((page) => incr.needsBuild("asset", page.id));
|
||||
const spinnerFormat = status.format;
|
||||
status.format = () => "";
|
||||
const moduleLoadQueue = new Queue({
|
||||
|
@ -175,7 +186,7 @@ async function sitegen(status: Spinner) {
|
|||
getItemText,
|
||||
maxJobs: 1,
|
||||
});
|
||||
moduleLoadQueue.addMany(pages);
|
||||
moduleLoadQueue.addMany(neededPages);
|
||||
await moduleLoadQueue.done({ method: "stop" });
|
||||
const pageQueue = new Queue({
|
||||
name: "Render Static Page",
|
||||
|
@ -183,7 +194,7 @@ async function sitegen(status: Spinner) {
|
|||
getItemText,
|
||||
maxJobs: 2,
|
||||
});
|
||||
pageQueue.addMany(pages);
|
||||
pageQueue.addMany(neededPages);
|
||||
await pageQueue.done({ method: "stop" });
|
||||
status.format = spinnerFormat;
|
||||
|
||||
|
@ -207,7 +218,7 @@ async function sitegen(status: Spinner) {
|
|||
async function doStaticFile(item: FileItem) {
|
||||
const body = await fs.readFile(item.file);
|
||||
await incr.putAsset({
|
||||
srcId: "static:" + item.file,
|
||||
sources: [item.file],
|
||||
key: item.id,
|
||||
body,
|
||||
});
|
||||
|
@ -219,7 +230,9 @@ async function sitegen(status: Spinner) {
|
|||
maxJobs: 16,
|
||||
});
|
||||
status.format = () => "";
|
||||
staticQueue.addMany(staticFiles);
|
||||
staticQueue.addMany(
|
||||
staticFiles.filter((file) => incr.needsBuild("asset", file.id)),
|
||||
);
|
||||
await staticQueue.done({ method: "stop" });
|
||||
status.format = spinnerFormat;
|
||||
|
||||
|
@ -227,11 +240,13 @@ async function sitegen(status: Spinner) {
|
|||
status.text = `Concat ${renderResults.length} Pages`;
|
||||
await Promise.all(
|
||||
renderResults.map(
|
||||
async ({ item: page, body, head, inlineCss, scriptFiles }) => {
|
||||
async (
|
||||
{ item: page, body, head, css, scriptFiles },
|
||||
) => {
|
||||
const doc = wrapDocument({
|
||||
body,
|
||||
head,
|
||||
inlineCss,
|
||||
inlineCss: css.text,
|
||||
scripts: scriptFiles.map(
|
||||
(id) =>
|
||||
UNWRAP(
|
||||
|
@ -242,7 +257,7 @@ async function sitegen(status: Spinner) {
|
|||
).map((x) => `{${x}}`).join("\n"),
|
||||
});
|
||||
await incr.putAsset({
|
||||
srcId: "page:" + page.file,
|
||||
sources: [page.file, ...css.sources],
|
||||
key: page.id,
|
||||
body: doc,
|
||||
headers: {
|
||||
|
@ -262,7 +277,7 @@ async function sitegen(status: Spinner) {
|
|||
status.format = spinnerFormat;
|
||||
status.text = `Incremental Flush`;
|
||||
incr.flush();
|
||||
incr.serializeToDisk(); // Allows picking up this state again
|
||||
incr.toDisk(); // Allows picking up this state again
|
||||
return { elapsed: (performance.now() - startTime) / 1000 };
|
||||
}
|
||||
|
||||
|
@ -281,9 +296,11 @@ function wrapDocument({
|
|||
inlineCss: string;
|
||||
scripts: string;
|
||||
}) {
|
||||
return `<!doctype html><head>${head}${
|
||||
return `<!doctype html><html lang=en><head>${head}${
|
||||
inlineCss ? `<style>${inlineCss}</style>` : ""
|
||||
}</head><body>${body}${scripts ? `<script>${scripts}</script>` : ""}</body>`;
|
||||
}</head><body>${body}${
|
||||
scripts ? `<script>${scripts}</script>` : ""
|
||||
}</body></html>`;
|
||||
}
|
||||
|
||||
import { OnceMap, Queue } from "./queue.ts";
|
||||
|
|
|
@ -34,8 +34,6 @@ export interface FileStat {
|
|||
cssImportsRecursive: string[] | null;
|
||||
lastModified: number;
|
||||
imports: string[];
|
||||
/* Used by 'incremental.ts' */
|
||||
srcIds: string[];
|
||||
}
|
||||
let fsGraph = new Map<string, FileStat>();
|
||||
export function setFsGraph(g: Map<string, FileStat>) {
|
||||
|
@ -60,14 +58,6 @@ Module.prototype._compile = function (
|
|||
filename: string,
|
||||
format: "module" | "commonjs",
|
||||
) {
|
||||
fs.writeMkdirSync(
|
||||
".clover/debug-transpilation/" +
|
||||
path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll(
|
||||
"../",
|
||||
"_/",
|
||||
).replaceAll("/", "."),
|
||||
content,
|
||||
);
|
||||
const result = ModulePrototypeUnderscoreCompile.call(
|
||||
this,
|
||||
content,
|
||||
|
@ -96,7 +86,6 @@ Module.prototype._compile = function (
|
|||
: null,
|
||||
imports,
|
||||
lastModified: stat.mtimeMs,
|
||||
srcIds: [],
|
||||
});
|
||||
}
|
||||
return result;
|
||||
|
|
|
@ -1,54 +1,50 @@
|
|||
// `Incremental` contains multiple maps for the different parts of a site
|
||||
// build, and tracks reused items across builds. It also handles emitting and
|
||||
// updating the built site. This structure is self contained and serializable.
|
||||
//
|
||||
// Tracking is simple: Files map to one or more 'source IDs', which map to one
|
||||
// or more 'artifact'. This two layer approach allows many files (say a page +
|
||||
// all its imports) to map to the build of a page, which produces an HTML file
|
||||
// plus a list of scripts.
|
||||
|
||||
type SourceId = string; // relative to project root, e.g. 'src/global.css'
|
||||
type ArtifactId = string; // `${ArtifactType}#${string}`
|
||||
type Sha1Id = string; // Sha1 hex string
|
||||
|
||||
interface ArtifactMap {
|
||||
asset: Asset;
|
||||
script: string;
|
||||
}
|
||||
type AllArtifactMaps = {
|
||||
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
||||
};
|
||||
type ArtifactType = keyof ArtifactMap;
|
||||
|
||||
interface Asset {
|
||||
buffer: Buffer;
|
||||
headers: Record<string, string | undefined>;
|
||||
hash: string;
|
||||
}
|
||||
|
||||
export interface PutBase {
|
||||
srcTag?: string; // deprecated
|
||||
srcId: string;
|
||||
sources: SourceId[];
|
||||
key: string;
|
||||
}
|
||||
|
||||
export interface Put<T extends ArtifactType> extends PutBase {
|
||||
type: T;
|
||||
value: ArtifactMap[T];
|
||||
}
|
||||
|
||||
export interface Output {
|
||||
type: ArtifactType;
|
||||
key: string;
|
||||
export interface Invalidations {
|
||||
lastModified: number;
|
||||
outputs: Set<ArtifactId>;
|
||||
files: Set<SourceId>;
|
||||
}
|
||||
|
||||
const gzip = util.promisify(zlib.gzip);
|
||||
const zstd = util.promisify(zlib.zstdCompress);
|
||||
|
||||
export class Incremental {
|
||||
/** The generated artifacts */
|
||||
out: AllArtifactMaps = {
|
||||
out: {
|
||||
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
||||
} = {
|
||||
asset: new Map(),
|
||||
script: new Map(),
|
||||
};
|
||||
/** Tracking filesystem entries to `srcId` */
|
||||
invals = new Map<SourceId, Invalidations>();
|
||||
/** Tracking output keys to files */
|
||||
sources = new Map<ArtifactId, SourceId[]>();
|
||||
|
||||
/** Compressed resources */
|
||||
compress = new Map<string, Compressed>();
|
||||
compress = new Map<Sha1Id, Compressed>();
|
||||
compressQueue = new Queue<CompressJob, void>({
|
||||
name: "Compress",
|
||||
maxJobs: 5,
|
||||
|
@ -56,59 +52,114 @@ export class Incremental {
|
|||
passive: true,
|
||||
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
||||
});
|
||||
/** Tracking filesystem entries to `srcId` */
|
||||
files = new Map<string, hot.FileStat>();
|
||||
srcIds = new Map<string, Output[]>();
|
||||
|
||||
static fromSerialized() {
|
||||
}
|
||||
serialize() {
|
||||
const writer = new BufferWriter();
|
||||
|
||||
const asset = Array.from(
|
||||
this.out.asset,
|
||||
([key, { buffer, hash, headers }]) => {
|
||||
const raw = writer.write(buffer, hash);
|
||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
|
||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
|
||||
return [key, {
|
||||
raw,
|
||||
gzip,
|
||||
zstd,
|
||||
hash,
|
||||
headers,
|
||||
}];
|
||||
},
|
||||
);
|
||||
const script = Array.from(this.out.script);
|
||||
|
||||
const meta = Buffer.from(
|
||||
JSON.stringify({
|
||||
asset,
|
||||
script,
|
||||
}),
|
||||
"utf-8",
|
||||
);
|
||||
|
||||
const lengthBuffer = Buffer.alloc(4);
|
||||
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
||||
|
||||
return Buffer.concat([meta, lengthBuffer, ...writer.buffers]);
|
||||
}
|
||||
|
||||
serializeToDisk(file = ".clover/incr.state") {
|
||||
const buffer = this.serialize();
|
||||
fs.writeFileSync(file, buffer);
|
||||
/** Invalidation deletes build artifacts so the check is trivial. */
|
||||
needsBuild(type: ArtifactType, key: string) {
|
||||
return !this.out[type].has(key);
|
||||
}
|
||||
|
||||
/*
|
||||
* Put built artifacts into the incremental cache. The entry points
|
||||
* used to build this must be provided. 'Incremental' will trace JS
|
||||
* imports and file modification times tracked by 'hot.ts'.
|
||||
*/
|
||||
put<T extends ArtifactType>({
|
||||
srcId,
|
||||
sources,
|
||||
type,
|
||||
key,
|
||||
value,
|
||||
}: Put<T>) {
|
||||
this.out[type].set(key, value);
|
||||
|
||||
// Update sources information
|
||||
ASSERT(sources.length > 0, "Missing sources for " + type + " " + key);
|
||||
sources = sources.map((src) => path.normalize(src));
|
||||
const fullKey = `${type}#${key}`;
|
||||
const prevSources = this.sources.get(fullKey);
|
||||
const newSources = new Set(
|
||||
sources.map((file) =>
|
||||
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
||||
),
|
||||
);
|
||||
this.sources.set(fullKey, [...newSources]);
|
||||
for (const source of prevSources ?? []) {
|
||||
if (sources.includes(source)) {
|
||||
newSources.delete(source);
|
||||
continue;
|
||||
}
|
||||
const invals = UNWRAP(this.invals.get(source));
|
||||
ASSERT(invals.outputs.has(fullKey));
|
||||
invals.outputs.delete(fullKey);
|
||||
}
|
||||
// Use reflection from the plugin system to get imports.
|
||||
for (const source of newSources) {
|
||||
const invals = this.#getOrInitInvals(source);
|
||||
invals.outputs.add(fullKey);
|
||||
this.#followImports(source);
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this doesnt remove stuff when it disappeary
|
||||
#getOrInitInvals(source: string) {
|
||||
let invals = this.invals.get(source);
|
||||
if (!invals) {
|
||||
const g = hot.getFsGraph().get(source);
|
||||
const lastModified = g?.lastModified ??
|
||||
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
|
||||
this.invals.set(
|
||||
source,
|
||||
invals = {
|
||||
lastModified,
|
||||
files: new Set(),
|
||||
outputs: new Set(),
|
||||
},
|
||||
);
|
||||
}
|
||||
return invals;
|
||||
}
|
||||
|
||||
#followImports(file: string) {
|
||||
const graph = hot.getFsGraph();
|
||||
const stat = graph.get(file);
|
||||
if (!stat) return;
|
||||
for (const i of stat.imports) {
|
||||
const invals = this.#getOrInitInvals(i);
|
||||
invals.files.add(file);
|
||||
this.#followImports(i);
|
||||
}
|
||||
}
|
||||
|
||||
async statAllFiles() {
|
||||
for (const file of this.invals.keys()) {
|
||||
const mtime = fs.statSync(file).mtimeMs;
|
||||
this.updateStat(file, mtime);
|
||||
}
|
||||
}
|
||||
|
||||
updateStat(fileKey: string, newLastModified: number) {
|
||||
const stat = this.invals.get(fileKey);
|
||||
ASSERT(stat, "Updated stat on untracked file " + fileKey);
|
||||
if (stat.lastModified < newLastModified) {
|
||||
// Invalidate
|
||||
console.log(fileKey + " updated");
|
||||
const invalidQueue = [fileKey];
|
||||
let currentInvalid;
|
||||
while (currentInvalid = invalidQueue.pop()) {
|
||||
const invalidations = this.invals.get(currentInvalid);
|
||||
ASSERT(
|
||||
invalidations,
|
||||
"No reason to track file '" + currentInvalid +
|
||||
"' if it has no invalidations",
|
||||
);
|
||||
const { files, outputs } = invalidations;
|
||||
for (const out of outputs) {
|
||||
const [type, artifactKey] = out.split("#", 2);
|
||||
this.out[type as ArtifactType].delete(artifactKey);
|
||||
}
|
||||
invalidQueue.push(...files);
|
||||
}
|
||||
}
|
||||
stat.lastModified = newLastModified;
|
||||
}
|
||||
|
||||
async putAsset(info: PutAsset) {
|
||||
|
@ -161,7 +212,99 @@ export class Incremental {
|
|||
entry![algo] = out;
|
||||
}
|
||||
|
||||
invalidate(srcId: string) {
|
||||
serialize() {
|
||||
const writer = new BufferWriter();
|
||||
|
||||
const asset = Array.from(
|
||||
this.out.asset,
|
||||
([key, { buffer, hash, headers }]) => {
|
||||
const raw = writer.write(buffer, hash);
|
||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
||||
return [key, {
|
||||
raw,
|
||||
gzip,
|
||||
zstd,
|
||||
hash,
|
||||
headers: headers as Record<string, string>,
|
||||
}] satisfies SerializedMeta["asset"][0];
|
||||
},
|
||||
);
|
||||
const script = Array.from(this.out.script);
|
||||
const invals = Array.from(this.invals, ([key, value]) => {
|
||||
const { lastModified, files, outputs } = value;
|
||||
return [key, {
|
||||
m: lastModified,
|
||||
f: [...files],
|
||||
o: [...outputs],
|
||||
}] satisfies SerializedMeta["invals"][0];
|
||||
});
|
||||
const sources = Array.from(this.sources, ([key, value]) => {
|
||||
return [key, ...value] as [string, ...string[]];
|
||||
});
|
||||
const json = {
|
||||
asset,
|
||||
script,
|
||||
invals,
|
||||
sources,
|
||||
} satisfies SerializedMeta;
|
||||
const meta = Buffer.from(JSON.stringify(json), "utf-8");
|
||||
|
||||
const lengthBuffer = Buffer.alloc(4);
|
||||
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
||||
|
||||
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
|
||||
}
|
||||
|
||||
static fromSerialized(buffer: Buffer): Incremental {
|
||||
const metaLength = buffer.readUint32LE(0);
|
||||
const meta: SerializedMeta = JSON.parse(
|
||||
buffer.subarray(4, 4 + metaLength).toString("utf8"),
|
||||
);
|
||||
const view = ([start, end]: View) =>
|
||||
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
|
||||
|
||||
const incr = new Incremental();
|
||||
incr.out.asset = new Map<string, Asset>(meta.asset.map(([key, value]) => {
|
||||
const { hash, raw, gzip, zstd, headers } = value;
|
||||
if ((gzip || zstd) && !incr.compress.has(hash)) {
|
||||
incr.compress.set(hash, {
|
||||
gzip: gzip ? view(gzip) : undefined,
|
||||
zstd: zstd ? view(zstd) : undefined,
|
||||
});
|
||||
}
|
||||
return [key, {
|
||||
buffer: view(raw),
|
||||
headers: headers,
|
||||
hash: hash,
|
||||
}];
|
||||
}));
|
||||
incr.out.script = new Map(meta.script);
|
||||
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
|
||||
return [key, {
|
||||
lastModified: m,
|
||||
files: new Set(f),
|
||||
outputs: new Set(o),
|
||||
}];
|
||||
}));
|
||||
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
|
||||
return incr;
|
||||
}
|
||||
|
||||
toDisk(file = ".clover/incr.state") {
|
||||
const buffer = this.serialize();
|
||||
fs.writeFileSync(file, buffer);
|
||||
}
|
||||
|
||||
static fromDisk(file = ".clover/incr.state"): Incremental {
|
||||
try {
|
||||
const buffer = fs.readFileSync(file);
|
||||
return Incremental.fromSerialized(buffer);
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") return new Incremental();
|
||||
throw err;
|
||||
}
|
||||
}
|
||||
|
||||
async wait() {
|
||||
|
@ -171,18 +314,14 @@ export class Incremental {
|
|||
async flush() {
|
||||
ASSERT(!this.compressQueue.active);
|
||||
const writer = new BufferWriter();
|
||||
// TODO: ensure all assets are actually compressed and not fake lying.
|
||||
const asset = Object.fromEntries(
|
||||
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
||||
const raw = writer.write(buffer, hash);
|
||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
||||
return [key, {
|
||||
raw,
|
||||
gzip,
|
||||
zstd,
|
||||
headers,
|
||||
}];
|
||||
return [key, { raw, gzip, zstd, headers }];
|
||||
}),
|
||||
);
|
||||
await Promise.all([
|
||||
|
@ -230,10 +369,10 @@ class BufferWriter {
|
|||
|
||||
export type View = [start: number, end: number];
|
||||
|
||||
// Alongside this type is a byte buffer, containing all the assets.
|
||||
export interface BuiltAssetMap {
|
||||
[route: string]: BuiltAsset;
|
||||
}
|
||||
|
||||
export interface BuiltAsset {
|
||||
raw: View;
|
||||
gzip: View;
|
||||
|
@ -249,13 +388,24 @@ export interface SerializedMeta {
|
|||
hash: string;
|
||||
headers: Record<string, string>;
|
||||
}]>;
|
||||
script: [key: string, value: string][];
|
||||
script: Array<[key: string, value: string]>;
|
||||
invals: Array<[key: string, {
|
||||
/** Modified */
|
||||
m: number;
|
||||
f: SourceId[];
|
||||
o: ArtifactId[];
|
||||
}]>;
|
||||
sources: Array<[string, ...string[]]>;
|
||||
}
|
||||
|
||||
const gzip = util.promisify(zlib.gzip);
|
||||
const zstd = util.promisify(zlib.zstdCompress);
|
||||
|
||||
import * as fs from "#sitegen/fs";
|
||||
import * as zlib from "node:zlib";
|
||||
import * as util from "node:util";
|
||||
import { Queue } from "./queue.ts";
|
||||
import * as hot from "./hot.ts";
|
||||
import * as mime from "#sitegen/mime";
|
||||
import * as path from "node:path";
|
||||
import { Buffer } from "node:buffer";
|
||||
|
|
|
@ -34,7 +34,7 @@ export function writeMkdirSync(file: string, contents: Buffer | string) {
|
|||
|
||||
export function readDirRecOptionalSync(dir: string) {
|
||||
try {
|
||||
return readdirSync(dir, { recursive: true, withFileTypes: true });
|
||||
return readdirSync(dir, { recursive: true, encoding: "utf8" });
|
||||
} catch (err: any) {
|
||||
if (err.code === "ENOENT") return [];
|
||||
throw err;
|
||||
|
|
|
@ -33,6 +33,11 @@ Included is `src`, which contains `paperclover.net`. Website highlights:
|
|||
|
||||
## Development
|
||||
|
||||
minimum system requirements:
|
||||
- a cpu with at least 1 core.
|
||||
- random access memory.
|
||||
- windows 7 or later, macos, or linux operating system.
|
||||
|
||||
```
|
||||
npm install
|
||||
|
||||
|
|
13
src/q+a/components/QuestionForm.marko
Normal file
13
src/q+a/components/QuestionForm.marko
Normal file
|
@ -0,0 +1,13 @@
|
|||
<form action="/q+a" method="POST">
|
||||
<textarea
|
||||
name="text"
|
||||
placeholder="ask clover a question..."
|
||||
required
|
||||
minlength="1"
|
||||
maxlength="10000"
|
||||
/>
|
||||
<div aria-hidden class="title">ask a question</div>
|
||||
<button type="submit">send</button>
|
||||
<div class="disabled-button">send</div>
|
||||
</form>
|
||||
|
|
@ -3,7 +3,6 @@ export * as layout from "../layout.tsx";
|
|||
export interface Input {
|
||||
admin?: boolean;
|
||||
}
|
||||
|
||||
export const meta: Metadata = {
|
||||
title: "paper clover q+a",
|
||||
description: "ask clover a question",
|
||||
|
@ -12,24 +11,16 @@ export const meta: Metadata = {
|
|||
<const/{ admin = false } = input />
|
||||
<const/questions = [...Question.getAll()] />
|
||||
|
||||
<form action="/q+a" method="POST">
|
||||
<textarea
|
||||
name="text"
|
||||
placeholder="ask clover a question..."
|
||||
required
|
||||
minlength="1"
|
||||
maxlength="10000"
|
||||
/>
|
||||
<div aria-hidden class="title">ask a question</div>
|
||||
<button type="submit">send</button>
|
||||
<div class="disabled-button">send</div>
|
||||
</form>
|
||||
<if=!admin>
|
||||
<QuestionForm />
|
||||
</>
|
||||
<for|q| of=questions>
|
||||
<QuestionRender question=q admin=admin />;
|
||||
<QuestionRender question=q admin=admin />
|
||||
</>
|
||||
<footer>
|
||||
fun fact: clover has answered ${questions.length} questions
|
||||
</footer>
|
||||
|
||||
import { Question } from "@/q+a/models/Question.ts";
|
||||
import { QuestionForm } from "@/q+a/components/QuestionForm.marko";
|
||||
import QuestionRender from '@/q+a/components/Question.marko';
|
||||
|
|
|
@ -1,59 +1,6 @@
|
|||
// __esModule
|
||||
true
|
||||
User-agent: *
|
||||
Allow: /
|
||||
|
||||
// getEnd
|
||||
function getEnd(file, node) {
|
||||
// Restore if merged: https://github.com/babel/babel/pull/16849
|
||||
// if (node.end != null) {
|
||||
// return node.end;
|
||||
// }
|
||||
|
||||
if (node.loc) {
|
||||
return locToIndex(file, node.loc.end);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// getLoc
|
||||
function getLoc(file, index) {
|
||||
return findLoc(getLineIndexes(file), 0, index);
|
||||
}
|
||||
|
||||
// getLocRange
|
||||
function getLocRange(file, start, end) {
|
||||
const lineIndexes = getLineIndexes(file);
|
||||
const startLoc = findLoc(lineIndexes, 0, start);
|
||||
|
||||
if (startLoc) {
|
||||
const endLoc =
|
||||
start === end ? startLoc : findLoc(lineIndexes, startLoc.line - 1, end);
|
||||
|
||||
return {
|
||||
start: startLoc,
|
||||
end: endLoc
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// getStart
|
||||
function getStart(file, node) {
|
||||
// Restore if merged: https://github.com/babel/babel/pull/16849
|
||||
// if (node.start != null) {
|
||||
// return node.start;
|
||||
// }
|
||||
|
||||
if (node.loc) {
|
||||
return locToIndex(file, node.loc.start);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// withLoc
|
||||
function withLoc(file, node, start, end) {
|
||||
node.loc = getLocRange(file, start, end);
|
||||
node.start = start;
|
||||
node.end = end;
|
||||
return node;
|
||||
}
|
||||
# Restrict access to media files to ensure bots do not unintentionally
|
||||
# break their license agreement (https://paperclover.net/license)
|
||||
Disallow: /file
|
||||
|
|
|
@ -1,115 +1,43 @@
|
|||
// __esModule
|
||||
true
|
||||
<?xml version="1.0" encoding="UTF-8" ?>
|
||||
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
|
||||
|
||||
// parseArgs
|
||||
function parseArgs(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(file, `_(${str})`, sourceStart, sourceEnd, 2);
|
||||
<channel>
|
||||
<title>paper clover</title>
|
||||
<description>art</description>
|
||||
<link>https://paperclover.net</link>
|
||||
<item>
|
||||
<title>clo's files & cotyledon</title>
|
||||
<link>https://paperclover.net/file</link>
|
||||
<description>
|
||||
i rewrote the file viewer to be nicer to use. as well as uploading about
|
||||
10gb of old content. there's over a dozen hours worth of shit to explore.
|
||||
</description>
|
||||
<pubDate>Wed, 30 Apr 2025 00:00:00 -0000</pubDate>
|
||||
<guid>https://paperclover.net/file</guid>
|
||||
</item>
|
||||
<item>
|
||||
<title>waterfalls and gender identity update</title>
|
||||
<link>https://paperclover.net/waterfalls</link>
|
||||
<description>
|
||||
this is a song about identity, jealousness, and the seasons. more
|
||||
specifically, it's a personal account of how it felt realizing i
|
||||
was a transgender woman. enjoy.
|
||||
</description>
|
||||
<pubDate>Wed, 01 Jan 2025 00:00:00 -0000</pubDate>
|
||||
<guid>https://paperclover.net/waterfalls</guid>
|
||||
|
||||
<enclosure url="https://paperclover.net/file/2025/waterfalls/waterfalls.mp4"
|
||||
type="video/mp4"/>
|
||||
<media:content url="https://paperclover.net/file/2025/waterfalls/waterfalls.mp4"
|
||||
type="video/mp4"
|
||||
width="1920"
|
||||
height="1080">
|
||||
</media:content>
|
||||
<media:thumbnail url="https://paperclover.net/file/2025/waterfalls/thumbnail.jpeg"
|
||||
width="1920"
|
||||
height="1080"
|
||||
/>
|
||||
</item>
|
||||
</channel>
|
||||
|
||||
if (parsed.type === "CallExpression") {
|
||||
return parsed.arguments;
|
||||
}
|
||||
|
||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
||||
}
|
||||
|
||||
// parseExpression
|
||||
function parseExpression(
|
||||
file,
|
||||
str,
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
sourceOffset)
|
||||
{
|
||||
return tryParse(file, true, str, sourceStart, sourceEnd, sourceOffset);
|
||||
}
|
||||
|
||||
// parseParams
|
||||
function parseParams(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(
|
||||
file,
|
||||
`(${str})=>{}`,
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
1
|
||||
);
|
||||
|
||||
if (parsed.type === "ArrowFunctionExpression") {
|
||||
return parsed.params;
|
||||
}
|
||||
|
||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
||||
}
|
||||
|
||||
// parseStatements
|
||||
function parseStatements(
|
||||
file,
|
||||
str,
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
sourceOffset)
|
||||
{
|
||||
return tryParse(file, false, str, sourceStart, sourceEnd, sourceOffset);
|
||||
}
|
||||
|
||||
// parseTemplateLiteral
|
||||
function parseTemplateLiteral(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(
|
||||
file,
|
||||
"`" + str + "`",
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
1
|
||||
);
|
||||
|
||||
if (parsed.type === "TemplateLiteral") {
|
||||
return _compiler.types.templateLiteral(parsed.quasis, parsed.expressions);
|
||||
}
|
||||
|
||||
return ensureParseError(file, parsed, sourceStart, sourceEnd);
|
||||
}
|
||||
|
||||
// parseTypeArgs
|
||||
function parseTypeArgs(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(file, `_<${str}>`, sourceStart, sourceEnd, 2);
|
||||
|
||||
if (parsed.type === "TSInstantiationExpression") {
|
||||
// typeArguments is Flow only (not TS), we need to use typeParameters
|
||||
return parsed.typeParameters;
|
||||
}
|
||||
|
||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
||||
}
|
||||
|
||||
// parseTypeParams
|
||||
function parseTypeParams(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(
|
||||
file,
|
||||
`<${str}>()=>{}`,
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
1
|
||||
);
|
||||
|
||||
if (parsed.type === "ArrowFunctionExpression") {
|
||||
return parsed.typeParameters;
|
||||
}
|
||||
|
||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
||||
}
|
||||
|
||||
// parseVar
|
||||
function parseVar(file, str, sourceStart, sourceEnd) {
|
||||
const parsed = parseExpression(
|
||||
file,
|
||||
`(${str})=>{}`,
|
||||
sourceStart,
|
||||
sourceEnd,
|
||||
1
|
||||
);
|
||||
|
||||
if (parsed.type === "ArrowFunctionExpression" && parsed.params.length === 1) {
|
||||
return parsed.params[0];
|
||||
}
|
||||
|
||||
return ensureParseError(file, parsed, sourceStart, sourceEnd);
|
||||
}
|
||||
</rss>
|
||||
|
|
|
@ -1,30 +1,66 @@
|
|||
// __esModule
|
||||
true
|
||||
|
||||
// getTagDefForTagName
|
||||
function getTagDefForTagName(file, tagName) {
|
||||
const tagDef = getTaglibLookup(file).getTag(tagName);
|
||||
|
||||
if (tagDef) {
|
||||
let seen = file.metadata.marko[SEEN_TAGS_KEY];
|
||||
if (!seen) {
|
||||
seen = file.metadata.marko[SEEN_TAGS_KEY] = new Set();
|
||||
}
|
||||
|
||||
if (!seen.has(tagDef)) {
|
||||
seen.add(tagName);
|
||||
const { filePath } = tagDef;
|
||||
const len = filePath.length;
|
||||
|
||||
if (filePath[len - 14] === "m" && filePath.endsWith("marko-tag.json")) {
|
||||
file.metadata.marko.watchFiles.push(filePath);
|
||||
}
|
||||
}
|
||||
}
|
||||
return tagDef;
|
||||
}
|
||||
|
||||
// getTaglibLookup
|
||||
function getTaglibLookup(file) {
|
||||
return file.___taglibLookup;
|
||||
}
|
||||
<html lang="en">
|
||||
<head>
|
||||
<title>paper clover</title>
|
||||
</head>
|
||||
<body bgcolor="black" style="word-wrap: initial">
|
||||
<main style="display: flex; flex-direction: column; justify-content: center; align-items: center; height: 100vh">
|
||||
<div>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/waterfalls"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#FFB5E8">water</font><font color="#FFE3F6">falls</font>
|
||||
</a>
|
||||
</p>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/q+a"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#97E5B9">questions</font>
|
||||
<font color="#D4F5E2">and answers</font>
|
||||
</a>
|
||||
</p>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/file"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#B28DFF">file</font>
|
||||
<font color="#DDC6FF">browser</font>
|
||||
</a>
|
||||
</p>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/discord"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#5865F2">dis</font><font color="#7289DA">cord</font>
|
||||
</a>
|
||||
</p>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/github"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#ffffff">git</font><font color="#959DA5">hub</font>
|
||||
</a>
|
||||
</p>
|
||||
<p style="margin: 0.5rem 0">
|
||||
<a
|
||||
href="/rss.xml"
|
||||
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||
>
|
||||
<font color="#F26522">rss</font>
|
||||
<font color="#FF8147">feed</font>
|
||||
</a>
|
||||
</p>
|
||||
<h1 style="margin: -1.5rem 0 3rem 0; font-size: 7rem; font-weight: 400; font-family: times">
|
||||
<font color="#B8E1FF">paper</font>
|
||||
<font color="#E8F4FF">clover</font>
|
||||
</h1>
|
||||
</div>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
|
|
Loading…
Reference in a new issue