Compare commits

..

35 commits

Author SHA1 Message Date
clo
9795249ab4 Update run.js 2025-07-15 13:45:19 -07:00
cb12014ecb tidy 2025-07-10 00:48:39 -07:00
8a3a36f0c2 sot: fix deployment 2025-07-09 23:45:21 -07:00
c5db92203c use port from env in servers 2025-07-09 23:22:46 -07:00
47215df902 sot: fix missing mime.txt 2025-07-09 22:45:20 -07:00
8c72184d19 setup source of truth server 2025-07-09 22:34:07 -07:00
caf4da47e0 add "source of truth" server to replace the old cache url 2025-07-08 23:10:41 -07:00
7ef08faaec finish scan3 2025-07-08 20:48:30 -07:00
4c2a4f7578 start the markdown parser 2025-07-08 01:09:55 -07:00
ea5f2bc325 format 2025-07-07 20:58:02 -07:00
f1b1c650ce initial run of scan3 on sandwich 2025-07-07 09:42:04 -07:00
502786b689 actually run ffmpeg fr fr 2025-07-03 10:34:19 -07:00
2320091125 move discovered ffmpeg presets 2025-07-03 01:22:59 -07:00
8d1dc0d825 start av1 encoding ??? 2025-06-27 22:35:03 -07:00
4f89374ee0 stuff for file view 2025-06-27 19:40:19 -07:00
71a072b0be file viewer work 2025-06-22 14:38:36 -07:00
a367dfdb29 get clo file viewer running 2025-06-21 16:04:57 -07:00
c7dfbe1090 throw in the file viewer 2025-06-15 23:42:10 -07:00
7f5011bace finish q+a 2025-06-15 13:11:21 -07:00
db244583d7 work on porting paperclover.net and also some tests 2025-06-15 11:35:28 -07:00
c5113954a8 experiment: streaming suspense implementation 2025-06-15 01:26:38 -07:00
clo
50d245569c Update readme.md 2025-06-13 15:41:55 -07:00
a7220a7e74 update readme 2025-06-13 00:29:30 -07:00
a41569983f incremental sitegen dev server! 2025-06-13 00:13:22 -07:00
d5ef829f01 fine grained incremental rebuilding 2025-06-11 00:17:58 -07:00
15a4600c48 clean up watching 2025-06-10 22:29:12 -07:00
925366e79e add a file watcher, live rebuild.
this is only verified functional on windows 7
2025-06-10 20:06:32 -07:00
c8b5e91251 almost implement views 2025-06-10 01:13:59 -07:00
a1d17a5d61 stuff 2025-06-09 21:13:51 -07:00
399ccec226 incremental generator 2025-06-09 00:12:41 -07:00
92ddecc37e more organize 2025-06-08 17:31:03 -07:00
2767bf4455 add readme 2025-06-08 17:00:07 -07:00
0c5db556f1 primative backend support 2025-06-08 15:12:04 -07:00
46a67453a1 add content type library 2025-06-08 12:38:25 -07:00
7242c6eb89 fix all type errors 2025-06-07 17:01:34 -07:00
171 changed files with 67372 additions and 2936 deletions

4
.dockerignore Normal file
View file

@ -0,0 +1,4 @@
.clover
.env
node_modules

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
.clover
.env
node_modules

View file

@ -1,6 +1,6 @@
{
"lint": {
"exclude": ["framework/meta"], // OLD
"exclude": ["src"], // OLD
"rules": {
"exclude": [
"no-explicit-any" // TODO

61
flake.lock Normal file
View file

@ -0,0 +1,61 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1751271578,
"narHash": "sha256-P/SQmKDu06x8yv7i0s8bvnnuJYkxVGBWLWHaU+tt4YY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3016b4b15d13f3089db8a41ef937b13a9e33a8df",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"utils": "utils"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

29
flake.nix Normal file
View file

@ -0,0 +1,29 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs =
{ nixpkgs, utils, ... }:
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
(pkgs.ffmpeg.override {
withOpus = true;
withSvtav1 = true;
withJxl = true;
withWebp = true;
})
pkgs.rsync
];
};
}
);
}

View file

@ -0,0 +1,42 @@
import "@paperclover/console/inject";
import "#debug";
const app = require(globalThis.CLOVER_SERVER_ENTRY ?? process.argv[2]);
const protocol = "http";
const server = serve({
fetch: app.default.fetch,
port: Number(process.env.PORT ?? 3000),
}, ({ address, port }) => {
if (address === "::") address = "::1";
console.info(url.format({
protocol,
hostname: address,
port,
}));
});
process.on("SIGINT", () => {
server.close();
process.exit(0);
});
process.on("SIGTERM", () => {
server.close((err) => {
if (err) {
console.error(err);
process.exit(1);
}
process.exit(0);
});
});
declare global {
/* Control via --define:globalThis.CLOVER_SERVER_ENTRY="..." */
var CLOVER_SERVER_ENTRY: string;
}
import url from "node:url";
import { serve } from "@hono/node-server";
import process from "node:process";

View file

@ -0,0 +1,4 @@
import "@paperclover/console/inject";
export default app;
import app from "#backend";

View file

@ -1,9 +1,4 @@
// This file implements client-side bundling, mostly wrapping esbuild.
import process from "node:process";
const plugins: esbuild.Plugin[] = [
// There are currently no plugins needed by 'paperclover.net'
];
export async function bundleClientJavaScript(
referencedScripts: string[],
extraPublicScripts: string[],
@ -12,7 +7,7 @@ export async function bundleClientJavaScript(
) {
const entryPoints = [
...new Set([
...referencedScripts,
...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)),
...extraPublicScripts,
]),
];
@ -22,23 +17,39 @@ export async function bundleClientJavaScript(
if (invalidFiles.length > 0) {
const cwd = process.cwd();
throw new Error(
"All client-side scripts should be named like '.client.ts'. Exceptions: " +
invalidFiles.map((x) => path.join(cwd, x)).join(","),
"All client-side scripts should be named like '.client.ts'. Exceptions: \n" +
invalidFiles.map((x) => path.join(cwd, x)).join("\n"),
);
}
const clientPlugins: esbuild.Plugin[] = [
projectRelativeResolution(),
markoViaBuildCache(incr),
];
const bundle = await esbuild.build({
assetNames: "/asset/[hash]",
bundle: true,
chunkNames: "/js/c.[hash]",
entryNames: "/js/[name]",
assetNames: "/asset/[hash]",
entryPoints,
format: "esm",
jsx: "automatic",
jsxDev: dev,
jsxImportSource: "#ssr",
logLevel: "silent",
metafile: true,
minify: !dev,
outdir: "/out!",
plugins,
splitting: true,
outdir: "out!",
plugins: clientPlugins,
write: false,
define: {
"ASSERT": "console.assert",
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
},
}).catch((err: any) => {
err.message = `Client ${err.message}`;
throw err;
});
if (bundle.errors.length || bundle.warnings.length) {
throw new AggregateError(
@ -46,40 +57,264 @@ export async function bundleClientJavaScript(
"JS bundle failed",
);
}
incr.invalidate("bundle-script");
const publicScriptRoutes = extraPublicScripts.map((file) =>
path.basename(file).replace(/\.client\.[tj]sx?/, "")
"/js/" +
path.relative(hot.projectSrc, file).replaceAll("\\", "/").replace(
/\.client\.[tj]sx?/,
".js",
)
);
const promises: Promise<unknown>[] = [];
// TODO: add a shared build hash to entrypoints, derived from all the chunk hashes.
for (const file of bundle.outputFiles) {
const { metafile, outputFiles } = bundle;
const promises: Promise<void>[] = [];
for (const file of outputFiles) {
const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const text = file.text;
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs)
.filter((x) => !x.startsWith("<define:"));
// Register non-chunks as script entries.
const chunk = route.startsWith("/js/c.");
if (!chunk) {
route = route.replace(".client.js", ".js");
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
incr.put({
srcId: "bundle-script",
type: "script",
key: route.slice("/js/".length, -".js".length),
sources,
kind: "script",
key,
value: text,
});
}
// Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) {
promises.push(incr.putAsset({
srcId: "bundle-script",
sources,
key: route,
body: text,
}));
}
}
if (promises.length > 0) {
await Promise.all(promises);
}
await Promise.all(promises);
}
export type ServerPlatform = "node" | "passthru";
export async function bundleServerJavaScript(
incr: Incremental,
platform: ServerPlatform = "node",
) {
if (incr.hasArtifact("backendBundle", platform)) return;
// Comment
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
const viewSource = [
...Array.from(
incr.out.viewMetadata,
([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
),
`const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`,
"export const views = {",
...Array.from(incr.out.viewMetadata, ([key, view], i) =>
[
` ${JSON.stringify(key)}: {`,
` component: view${i}.default,`,
// ` meta: ${
// view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta`
// },`,
` meta: view${i}.meta,`,
` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`,
` inlineCss: styles[${magicWord}[${i}]]`,
` },`,
].join("\n")),
"}",
].join("\n");
// -- plugins --
const serverPlugins: esbuild.Plugin[] = [
virtualFiles({
"$views": viewSource,
}),
projectRelativeResolution(),
markoViaBuildCache(incr),
{
name: "replace client references",
setup(b) {
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
contents:
hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code,
loader: path.extname(file).slice(1) as esbuild.Loader,
}));
},
},
{
name: "mark css external",
setup(b) {
b.onResolve(
{ filter: /\.css$/ },
() => ({ path: ".", namespace: "dropped" }),
);
b.onLoad(
{ filter: /./, namespace: "dropped" },
() => ({ contents: "" }),
);
},
},
];
const pkg = await fs.readJson("package.json") as {
dependencies: Record<string, string>;
};
const { metafile, outputFiles } = await esbuild.build({
bundle: true,
chunkNames: "c.[hash]",
entryNames: "server",
entryPoints: [
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
],
platform: "node",
format: "esm",
minify: false,
outdir: "out!",
plugins: serverPlugins,
splitting: true,
logLevel: "silent",
write: false,
metafile: true,
jsx: "automatic",
jsxImportSource: "#ssr",
jsxDev: false,
define: {
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
},
external: Object.keys(pkg.dependencies)
.filter((x) => !x.startsWith("@paperclover")),
});
const files: Record<string, Buffer> = {};
let fileWithMagicWord: string | null = null;
for (const output of outputFiles) {
const basename = output.path.replace(/^.*?!/, "");
const key = "out!" + basename.replaceAll("\\", "/");
// If this contains the generated "$views" file, then
// mark this file as the one for replacement. Because
// `splitting` is `true`, esbuild will not emit this
// file in more than one chunk.
if (metafile.outputs[key].inputs["framework/lib/view.ts"]) {
fileWithMagicWord = basename;
}
files[basename] = Buffer.from(output.contents);
}
incr.put({
kind: "backendBundle",
key: platform,
value: {
magicWord,
files,
fileWithMagicWord,
},
sources: Object.keys(metafile.inputs).filter((x) =>
!x.includes("<define:") &&
!x.startsWith("vfs:") &&
!x.startsWith("dropped:") &&
!x.includes("node_modules")
),
});
}
export async function finalizeServerJavaScript(
incr: Incremental,
platform: ServerPlatform,
) {
if (incr.hasArtifact("backendReplace", platform)) return;
const {
files,
fileWithMagicWord,
magicWord,
} = UNWRAP(incr.getArtifact("backendBundle", platform));
if (!fileWithMagicWord) return;
// Only the reachable resources need to be inserted into the bundle.
const viewScriptsList = new Set(
Array.from(incr.out.viewMetadata.values())
.flatMap((view) => view.clientRefs),
);
const viewStyleKeys = Array.from(incr.out.viewMetadata.values())
.map((view) => css.styleKey(view.cssImports, view.theme));
const viewCssBundles = viewStyleKeys
.map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key));
// Deduplicate styles
const styleList = Array.from(new Set(viewCssBundles));
// Replace the magic word
let text = files[fileWithMagicWord].toString("utf-8");
text = text.replace(
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
(_, i) => {
i = Number(i);
// Inline the styling data
if (i === -2) {
return JSON.stringify(styleList.map((cssText) => cssText));
}
// Inline the script data
if (i === -1) {
return JSON.stringify(Object.fromEntries(incr.out.script));
}
// Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`;
},
);
incr.put({
kind: "backendReplace",
key: platform,
sources: [
// Backend input code (includes view code)
...incr.sourcesFor("backendBundle", platform),
// Script
...Array.from(viewScriptsList)
.flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))),
// Style
...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)),
],
value: Buffer.from(text),
});
}
function markoViaBuildCache(incr: Incremental): esbuild.Plugin {
return {
name: "marko via build cache",
setup(b) {
b.onLoad(
{ filter: /\.marko$/ },
async ({ path: file }) => {
const key = path.relative(hot.projectRoot, file)
.replaceAll("\\", "/");
const cacheEntry = incr.out.serverMarko.get(key);
if (!cacheEntry) {
if (!fs.existsSync(file)) {
console.log(`File does not exist: ${file}`);
}
throw new Error("Marko file not in cache: " + file);
}
return ({
loader: "ts",
contents: cacheEntry.src,
resolveDir: path.dirname(file),
});
},
);
},
};
}
import * as path from "node:path";
import * as esbuild from "esbuild";
import * as path from "node:path";
import process from "node:process";
import * as hot from "./hot.ts";
import { projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
import { Incremental } from "./incremental.ts";
import * as css from "./css.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";

View file

@ -5,6 +5,12 @@ export interface Theme {
h1?: string;
}
export const defaultTheme: Theme = {
bg: "#ffffff",
fg: "#050505",
primary: "#2e7dab",
};
export function stringifyTheme(theme: Theme) {
return [
":root {",
@ -34,28 +40,34 @@ export function preprocess(css: string, theme: Theme): string {
);
}
export interface Output {
text: string;
sources: string[];
}
export function styleKey(
cssImports: string[],
theme: Theme,
) {
cssImports = cssImports
.map((file) =>
(path.isAbsolute(file) ? path.relative(hot.projectSrc, file) : file)
.replaceAll("\\", "/")
)
.sort();
return cssImports.join(":") + ":" +
Object.entries(theme).map(([k, v]) => `${k}=${v}`);
}
export async function bundleCssFiles(
cssImports: string[],
theme: Theme,
dev: boolean = false,
): Promise<string> {
): Promise<Output> {
cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file));
const plugin = {
name: "clover",
name: "clover css",
setup(b) {
b.onResolve(
{ filter: /^\$input\$$/ },
() => ({ path: ".", namespace: "input" }),
);
b.onLoad(
{ filter: /./, namespace: "input" },
() => ({
loader: "css",
contents:
cssImports.map((path) => `@import url(${JSON.stringify(path)});`)
.join("\n") + stringifyTheme(theme),
resolveDir: ".",
}),
);
b.onLoad(
{ filter: /\.css$/ },
async ({ path: file }) => ({
@ -64,17 +76,29 @@ export async function bundleCssFiles(
}),
);
},
} satisfies Plugin;
} satisfies esbuild.Plugin;
const build = await esbuild.build({
bundle: true,
entryPoints: ["$input$"],
write: false,
external: ["*.woff2"],
target: ["ie11"],
plugins: [plugin],
external: ["*.woff2", "*.ttf", "*.png", "*.jpeg"],
metafile: true,
minify: !dev,
plugins: [
virtualFiles({
"$input$": {
contents: cssImports.map((path) =>
`@import url(${JSON.stringify(path)});`
)
.join("\n") + stringifyTheme(theme),
loader: "css",
},
}),
plugin,
],
target: ["ie11"],
write: false,
});
const { errors, warnings, outputFiles } = build;
const { errors, warnings, outputFiles, metafile } = build;
if (errors.length > 0) {
throw new AggregateError(errors, "CSS Build Failed");
}
@ -82,9 +106,15 @@ export async function bundleCssFiles(
throw new AggregateError(warnings, "CSS Build Failed");
}
if (outputFiles.length > 1) throw new Error("Too many output files");
return outputFiles[0].text;
return {
text: outputFiles[0].text,
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
.filter((x) => !x.startsWith("vfs:")),
};
}
import type { Plugin } from "esbuild";
import * as esbuild from "esbuild";
import * as fs from "./fs.ts";
import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts";
import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";

17
framework/debug.safe.ts Normal file
View file

@ -0,0 +1,17 @@
globalThis.UNWRAP = (t, ...args) => {
if (t == null) {
throw new Error(
args.length > 0 ? util.format(...args) : "UNWRAP(" + t + ")",
);
}
return t;
};
globalThis.ASSERT = (t, ...args) => {
if (!t) {
throw new Error(
args.length > 0 ? util.format(...args) : "Assertion Failed",
);
}
};
import * as util from "node:util";

View file

@ -1,2 +1,4 @@
declare function UNWRAP<T>(value: T | null | undefined): T;
declare function ASSERT(value: unknown, ...log: unknown[]): asserts value;
declare function UNWRAP<T>(value: T | null | undefined, ...log: unknown[]): T;
declare function ASSERT(value: unknown, ...log: unknown[]): asserts value;
type Timer = ReturnType<typeof setTimeout>;

View file

@ -1,46 +1,54 @@
export const Fragment = ({ children }: { children: engine.Node[] }) => children;
export function jsx(
type: string | engine.Component,
props: Record<string, unknown>,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
export function jsxDEV(
type: string | engine.Component,
props: Record<string, unknown>,
// Unused with the clover engine
_key: string,
// Unused with the clover engine
_isStaticChildren: boolean,
// Unused with the clover engine
_source: unknown,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
// jsxs
export { jsx as jsxs };
declare global {
namespace JSX {
interface IntrinsicElements {
[name: string]: Record<string, unknown>;
}
interface ElementChildrenAttribute {
children: {};
}
type Element = engine.Node;
type ElementType = keyof IntrinsicElements | engine.Component;
type ElementClass = ReturnType<engine.Component>;
}
}
import * as engine from "./ssr.ts";
export const Fragment = ({ children }: { children: engine.Node[] }) => children;
export function jsx(
type: string | engine.Component,
props: Record<string, unknown>,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
export function jsxDEV(
type: string | engine.Component,
props: Record<string, unknown>,
// Unused with the clover engine
_key: string,
// Unused with the clover engine
_isStaticChildren: boolean,
source: engine.SrcLoc,
): engine.Element {
const { fileName, lineNumber, columnNumber } = source;
// Assert the component type is valid to render.
if (typeof type !== "function" && typeof type !== "string") {
throw new Error(
`Invalid component type at ${fileName}:${lineNumber}:${columnNumber}: ` +
engine.inspect(type) +
". Clover SSR element must be a function or string",
);
}
// Construct an `ssr.Element`
return [engine.kElement, type, props, "", source];
}
// jsxs
export { jsx as jsxs };
declare global {
namespace JSX {
interface IntrinsicElements {
[name: string]: Record<string, unknown>;
}
interface ElementChildrenAttribute {
children: Node;
}
type Element = engine.Element;
type ElementType = keyof IntrinsicElements | engine.Component;
type ElementClass = ReturnType<engine.Component>;
}
}
import * as engine from "./ssr.ts";

View file

@ -1,125 +1,147 @@
// This file is used to integrate Marko into the Clover Engine and Sitegen
// To use, replace the "marko/html" import with this file.
export * from "#marko/html";
interface BodyContentObject {
[x: PropertyKey]: unknown;
content: ServerRenderer;
}
export const createTemplate = (
templateId: string,
renderer: ServerRenderer,
) => {
const { render } = marko.createTemplate(templateId, renderer);
function wrap(props: Record<string, unknown>, n: number) {
// Marko components
const cloverAsyncMarker = { isAsync: false };
let r: engine.Render | undefined = undefined;
try {
r = engine.getCurrentRender();
} catch {}
// Support using Marko outside of Clover SSR
if (r) {
const markoResult = render.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },
});
if (cloverAsyncMarker.isAsync) {
return markoResult.then(engine.html);
}
const rr = markoResult.toString();
return engine.html(rr);
} else {
return renderer(props, n);
}
}
wrap.render = render;
wrap.unwrapped = renderer;
return wrap;
};
export const dynamicTag = (
scopeId: number,
accessor: Accessor,
tag: unknown | string | ServerRenderer | BodyContentObject,
inputOrArgs: unknown,
content?: (() => void) | 0,
inputIsArgs?: 1,
serializeReason?: 1 | 0,
) => {
marko.dynamicTag;
if (typeof tag === "function") {
clover: {
const unwrapped = (tag as any).unwrapped;
if (unwrapped) {
tag = unwrapped;
break clover;
}
let r: engine.Render;
try {
r = engine.getCurrentRender();
if (!r) throw 0;
} catch {
r = marko.$global().clover as engine.Render;
}
if (!r) throw new Error("No Clover Render Active");
const subRender = engine.initRender(r.async !== -1, r.addon);
const resolved = engine.resolveNode(subRender, [
engine.kElement,
tag,
inputOrArgs,
]);
if (subRender.async > 0) {
const marker = marko.$global().cloverAsyncMarker;
marker.isAsync = true;
// Wait for async work to finish
const { resolve, reject, promise } = Promise.withResolvers<string>();
subRender.asyncDone = () => {
const rejections = subRender.rejections;
if (!rejections) return resolve(engine.renderNode(resolved));
(r.rejections ??= []).push(...rejections);
return reject(new Error("Render had errors"));
};
marko.fork(
scopeId,
accessor,
promise,
(string: string) => marko.write(string),
0,
);
} else {
marko.write(engine.renderNode(resolved));
}
return;
}
}
return marko.dynamicTag(
scopeId,
accessor,
tag,
inputOrArgs,
content,
inputIsArgs,
serializeReason,
);
};
export function fork(
scopeId: string,
accessor: Accessor,
promise: Promise<unknown>,
callback: (data: unknown) => void,
serializeMarker?: 0 | 1,
) {
const marker = marko.$global().cloverAsyncMarker;
marker.isAsync = true;
marko.fork(scopeId, accessor, promise, callback, serializeMarker);
}
import * as engine from "./ssr.ts";
import type { ServerRenderer } from "marko/html/template";
import { type Accessor } from "marko/common/types";
import * as marko from "#marko/html";
// This file is used to integrate Marko into the Clover Engine and Sitegen
// To use, replace the "marko/html" import with this file.
export * from "#marko/html";
interface BodyContentObject {
[x: PropertyKey]: unknown;
content: ServerRenderer;
}
export const createTemplate = (
templateId: string,
renderer: ServerRenderer,
) => {
const { render } = marko.createTemplate(templateId, renderer);
function wrap(props: Record<string, unknown>, n: number) {
// Marko Custom Tags
const cloverAsyncMarker = { isAsync: false };
let r: engine.Render | undefined = undefined;
try {
r = engine.getCurrentRender();
} catch {}
// Support using Marko outside of Clover SSR
if (r) {
engine.setCurrentRender(null);
const markoResult = render.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },
});
if (cloverAsyncMarker.isAsync) {
return markoResult.then(engine.html);
}
const rr = markoResult.toString();
return engine.html(rr);
} else {
return renderer(props, n);
}
}
wrap.render = render;
wrap.unwrapped = renderer;
return wrap;
};
export const dynamicTag = (
scopeId: number,
accessor: Accessor,
tag: unknown | string | ServerRenderer | BodyContentObject,
inputOrArgs: unknown,
content?: (() => void) | 0,
inputIsArgs?: 1,
serializeReason?: 1 | 0,
) => {
if (typeof tag === "function") {
clover: {
const unwrapped = (tag as any).unwrapped;
if (unwrapped) {
tag = unwrapped;
break clover;
}
let r: engine.Render;
try {
r = engine.getCurrentRender();
if (!r) throw 0;
} catch {
r = marko.$global().clover as engine.Render;
}
if (!r) throw new Error("No Clover Render Active");
const subRender = engine.initRender(r.async !== -1, r.addon);
const resolved = engine.resolveNode(subRender, [
engine.kElement,
tag,
inputOrArgs,
]);
if (subRender.async > 0) {
const marker = marko.$global().cloverAsyncMarker as Async;
marker.isAsync = true;
// Wait for async work to finish
const { resolve, reject, promise } = Promise.withResolvers<string>();
subRender.asyncDone = () => {
const rejections = subRender.rejections;
if (!rejections) return resolve(engine.renderNode(resolved));
(r.rejections ??= []).push(...rejections);
return reject(new Error("Render had errors"));
};
marko.fork(
scopeId,
accessor,
promise,
(string: string) => marko.write(string),
0,
);
} else {
marko.write(engine.renderNode(resolved));
}
return;
}
}
return marko.dynamicTag(
scopeId,
accessor,
tag,
inputOrArgs,
content,
inputIsArgs,
serializeReason,
);
};
export function fork(
scopeId: number,
accessor: Accessor,
promise: Promise<unknown>,
callback: (data: unknown) => void,
serializeMarker?: 0 | 1,
) {
const marker = marko.$global().cloverAsyncMarker as Async;
marker.isAsync = true;
marko.fork(scopeId, accessor, promise, callback, serializeMarker);
}
export function escapeXML(input: unknown) {
// The rationale of this check is that the default toString method
// creating `[object Object]` is universally useless to any end user.
if (
input == null ||
(typeof input === "object" && input &&
// only block this if it's the default `toString`
input.toString === Object.prototype.toString)
) {
throw new Error(
`Unexpected value in template placeholder: '` +
engine.inspect(input) + "'. " +
`To emit a literal '${input}', use \${String(value)}`,
);
}
return marko.escapeXML(input);
}
interface Async {
isAsync: boolean;
}
import * as engine from "./ssr.ts";
import type { ServerRenderer } from "marko/html/template";
import { type Accessor } from "marko/common/types";
import * as marko from "#marko/html";

View file

@ -0,0 +1,41 @@
import { test } from "node:test";
import * as engine from "./ssr.ts";
test("sanity", (t) => t.assert.equal(engine.ssrSync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) =>
t.assert.equal(
engine.ssrSync(
<main class={["a", "b"]}>
<h1 style="background-color:red">hello world</h1>
<p>haha</p>
{1}|
{0}|
{true}|
{false}|
{null}|
{undefined}|
</main>,
).text,
'<main class="a b"><h1 style=background-color:red>hello world</h1><p>haha</p>1|0|||||</main>',
));
test("unescaped/escaped html", (t) =>
t.assert.equal(
engine.ssrSync(<div>{engine.html("<fuck>")}{"\"&'`<>"}</div>).text,
"<div><fuck>&quot;&amp;&#x27;&#x60;&lt;&gt;</div>",
));
test("clsx built-in", (t) =>
t.assert.equal(
engine.ssrSync(
<>
<a class="a" />
<b class={null} />
<c class={undefined} />
<d class={["a", "b", null]} />
<e class={{ a: true, b: false }} />
<e
class={[null, "x", { z: true }, [{ m: true }, null, { v: false }]]}
/>
</>,
).text,
'<a class=a></a><b></b><c></c><d class="a b"></d><e class=a></e><e class="x z m"></e>',
));

View file

@ -1,299 +1,304 @@
// Clover's Rendering Engine is the backbone of her website generator. It
// converts objects and components (functions returning 'Node') into HTML. The
// engine is simple and self-contained, with integrations for JSX and Marko
// (which can interop with each-other) are provided next to this file.
//
// Add-ons to the rendering engine can provide opaque data, And retrieve it
// within component calls with 'getAddonData'. For example, 'sitegen' uses this
// to track needed client scripts without introducing patches to the engine.
type AddonData = Record<string | symbol, unknown>;
export function ssrSync(node: Node): Result;
export function ssrSync<A extends AddonData>(
node: Node,
addon: AddonData,
): Result<A>;
export function ssrSync(node: Node, addon: AddonData = {}) {
const r = initRender(false, addon);
const resolved = resolveNode(r, node);
return { text: renderNode(resolved), addon };
}
export function ssrAsync(node: Node): Promise<Result>;
export function ssrAsync<A extends AddonData>(
node: Node,
addon: AddonData,
): Promise<Result<A>>;
export function ssrAsync(node: Node, addon: AddonData = {}) {
const r = initRender(true, addon);
const resolved = resolveNode(r, node);
if (r.async === 0) {
return Promise.resolve({ text: renderNode(resolved), addon });
}
const { resolve, reject, promise } = Promise.withResolvers<Result>();
r.asyncDone = () => {
const rejections = r.rejections;
if (!rejections) return resolve({ text: renderNode(resolved), addon });
if (rejections.length === 1) return reject(rejections[0]);
return reject(new AggregateError(rejections));
};
return promise;
}
/** Inline HTML into a render without escaping it */
export function html(rawText: string) {
return [kDirectHtml, rawText];
}
interface Result<A extends AddonData = AddonData> {
text: string;
addon: A;
}
export interface Render {
/**
* Set to '-1' if rendering synchronously
* Number of async promises the render is waiting on.
*/
async: number | -1;
asyncDone: null | (() => void);
/** When components reject, those are logged here */
rejections: unknown[] | null;
/** Add-ons to the rendering engine store state here */
addon: AddonData;
}
export const kElement = Symbol("Element");
export const kDirectHtml = Symbol("DirectHtml");
/** Node represents a webpage that can be 'rendered' into HTML. */
export type Node =
| number
| string // Escape HTML
| Node[] // Concat
| Element // Render
| DirectHtml // Insert
| Promise<Node> // Await
// Ignore
| undefined
| null
| boolean;
export type Element = [
tag: typeof kElement,
type: string | Component,
props: Record<string, unknown>,
];
export type DirectHtml = [tag: typeof kDirectHtml, html: string];
/**
* Components must return a value; 'undefined' is prohibited here
* to avoid functions that are missing a return statement.
*/
export type Component = (
props: Record<string, unknown>,
) => Exclude<Node, undefined>;
/**
* Resolution narrows the type 'Node' into 'ResolvedNode'. Async trees are
* marked in the 'Render'. This operation performs everything besides the final
* string concatenation. This function is agnostic across async/sync modes.
*/
export function resolveNode(r: Render, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHTML(node);
if (typeof node === "number") return String(node); // no escaping ever
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
if (r.async === -1) {
throw new Error(`Asynchronous rendering is not supported here.`);
}
const placeholder: InsertionPoint = [null];
r.async += 1;
node
.then((result) => void (placeholder[0] = resolveNode(r, result)))
// Intentionally catching errors in `resolveNode`
.catch((e) => (r.rejections ??= []).push(e))
.finally(() => {
if (--r.async == 0) {
if (r.asyncDone == null) throw new Error("r.asyncDone == null");
r.asyncDone();
r.asyncDone = null;
}
});
// This lie is checked with an assertion in `renderNode`
return placeholder as [ResolvedNode];
}
if (!Array.isArray(node)) {
throw new Error(`Invalid node type: ${inspect(node)}`);
}
const type = node[0];
if (type === kElement) {
const { 1: tag, 2: props } = node;
if (typeof tag === "function") {
currentRender = r;
const result = tag(props);
currentRender = null;
return resolveNode(r, result);
}
if (typeof tag !== "string") throw new Error("Unexpected " + typeof type);
const children = props?.children;
if (children) return [kElement, tag, props, resolveNode(r, children)];
return node;
}
if (type === kDirectHtml) return node[1];
return node.map((elem) => resolveNode(r, elem));
}
export type ResolvedNode =
| ResolvedNode[] // Concat
| ResolvedElement // Render
| string; // Direct HTML
export type ResolvedElement = [
tag: typeof kElement,
type: string,
props: Record<string, unknown>,
children: ResolvedNode,
];
/**
* Async rendering is done by creating an array of one item,
* which is already a valid 'Node', but the element is written
* once the data is available. The 'Render' contains a count
* of how many async jobs are left.
*/
export type InsertionPoint = [null | ResolvedNode];
/**
* Convert 'ResolvedNode' into HTML text. This operation happens after all
* async work is settled. The HTML is emitted as concisely as possible.
*/
export function renderNode(node: ResolvedNode): string {
if (typeof node === "string") return node;
ASSERT(node, "Unresolved Render Node");
const type = node[0];
if (type === kElement) {
return renderElement(node as ResolvedElement);
}
node = node as ResolvedNode[]; // TS cannot infer.
let out = type ? renderNode(type) : "";
let len = node.length;
for (let i = 1; i < len; i++) {
const elem = node[i];
if (elem) out += renderNode(elem);
}
return out;
}
function renderElement(element: ResolvedElement) {
const { 1: tag, 2: props, 3: children } = element;
let out = "<" + tag;
let needSpace = true;
for (const prop in props) {
const value = props[prop];
if (!value || typeof value === "function") continue;
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHTML(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHTML(clsx(value as ClsxInput)))}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
// Do not process these
case "children":
case "ref":
case "dangerouslySetInnerHTML":
case "key":
continue;
}
if (needSpace) out += " ", needSpace = !attr.endsWith('"');
out += attr;
}
out += ">";
if (children) out += renderNode(children);
if (
tag !== "br" && tag !== "img" && tag !== "input" && tag !== "meta" &&
tag !== "link" && tag !== "hr"
) {
out += `</${tag}>`;
}
return out;
}
export function renderStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeHTML(String(style[styleName]))
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"';
return text;
}
// -- utility functions --
export function initRender(allowAsync: boolean, addon: AddonData): Render {
return {
async: allowAsync ? 0 : -1,
rejections: null,
asyncDone: null,
addon,
};
}
let currentRender: Render | null = null;
export function getCurrentRender() {
if (!currentRender) throw new Error("No Render Active");
return currentRender;
}
export function setCurrentRender(r?: Render | null) {
currentRender = r ?? null;
}
export function getUserData<T>(namespace: PropertyKey, def: () => T): T {
return (getCurrentRender().addon[namespace] ??= def()) as T;
}
export function inspect(object: unknown) {
try {
return require("node:util").inspect(object);
} catch {
return typeof object;
}
}
export type ClsxInput = string | Record<string, boolean | null> | ClsxInput[];
export function clsx(mix: ClsxInput) {
var k, y, str = "";
if (typeof mix === "string") {
return mix;
} else if (typeof mix === "object") {
if (Array.isArray(mix)) {
for (k = 0; k < mix.length; k++) {
if (mix[k] && (y = clsx(mix[k]))) {
str && (str += " ");
str += y;
}
}
} else {
for (k in mix) {
if (mix[k]) {
str && (str += " ");
str += k;
}
}
}
}
return str;
}
export const escapeHTML = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;")
.replace(/"/g, "&quot;").replace(/'/g, "&#x27;").replace(/`/g, "&#x60;");
// Clover's Rendering Engine is the backbone of her website generator. It
// converts objects and components (functions returning 'Node') into HTML. The
// engine is simple and self-contained, with integrations for JSX and Marko
// (which can interop with each-other) are provided next to this file.
//
// Add-ons to the rendering engine can provide opaque data, And retrieve it
// within component calls with 'getAddonData'. For example, 'sitegen' uses this
// to track needed client scripts without introducing patches to the engine.
export type Addons = Record<string | symbol, unknown>;
export function ssrSync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(false, addon);
const resolved = resolveNode(r, node);
return { text: renderNode(resolved), addon };
}
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(true, addon);
const resolved = resolveNode(r, node);
if (r.async === 0) {
return Promise.resolve({ text: renderNode(resolved), addon });
}
const { resolve, reject, promise } = Promise.withResolvers<Result>();
r.asyncDone = () => {
const rejections = r.rejections;
if (!rejections) return resolve({ text: renderNode(resolved), addon });
if (rejections.length === 1) return reject(rejections[0]);
return reject(new AggregateError(rejections));
};
return promise;
}
/** Inline HTML into a render without escaping it */
export function html(rawText: ResolvedNode): DirectHtml {
return [kDirectHtml, rawText];
}
interface Result<A extends Addons = Addons> {
text: string;
addon: A;
}
export interface Render {
/**
* Set to '-1' if rendering synchronously
* Number of async promises the render is waiting on.
*/
async: number | -1;
asyncDone: null | (() => void);
/** When components reject, those are logged here */
rejections: unknown[] | null;
/** Add-ons to the rendering engine store state here */
addon: Addons;
}
export const kElement = Symbol("Element");
export const kDirectHtml = Symbol("DirectHtml");
/** Node represents a webpage that can be 'rendered' into HTML. */
export type Node =
| number
| string // Escape HTML
| Node[] // Concat
| Element // Render
| DirectHtml // Insert
| Promise<Node> // Await
// Ignore
| undefined
| null
| boolean;
export type Element = [
tag: typeof kElement,
type: string | Component,
props: Record<string, unknown>,
_?: "",
source?: SrcLoc,
];
export type DirectHtml = [tag: typeof kDirectHtml, html: ResolvedNode];
/**
* Components must return a value; 'undefined' is prohibited here
* to avoid functions that are missing a return statement.
*/
export type Component = (
props: Record<any, any>,
) => Exclude<Node, undefined>;
/** Emitted by JSX runtime */
export interface SrcLoc {
fileName: string;
lineNumber: number;
columnNumber: number;
}
/**
* Resolution narrows the type 'Node' into 'ResolvedNode'. Async trees are
* marked in the 'Render'. This operation performs everything besides the final
* string concatenation. This function is agnostic across async/sync modes.
*/
export function resolveNode(r: Render, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node);
if (typeof node === "number") return String(node); // no escaping ever
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
if (r.async === -1) {
throw new Error(`Asynchronous rendering is not supported here.`);
}
const placeholder: InsertionPoint = [null];
r.async += 1;
node
.then((result) => void (placeholder[0] = resolveNode(r, result)))
// Intentionally catching errors in `resolveNode`
.catch((e) => (r.rejections ??= []).push(e))
.finally(() => {
if (--r.async == 0) {
if (r.asyncDone == null) throw new Error("r.asyncDone == null");
r.asyncDone();
r.asyncDone = null;
}
});
// This lie is checked with an assertion in `renderNode`
return placeholder as [ResolvedNode];
}
if (!Array.isArray(node)) {
throw new Error(`Invalid node type: ${inspect(node)}`);
}
const type = node[0];
if (type === kElement) {
const { 1: tag, 2: props } = node;
if (typeof tag === "function") {
currentRender = r;
try {
return resolveNode(r, tag(props));
} catch (e) {
const { 4: src } = node;
if (e && typeof e === "object") (e as { src?: string }).src = src;
throw e;
} finally {
currentRender = null;
}
}
if (typeof tag !== "string") throw new Error("Unexpected " + inspect(type));
const children = props?.children;
if (children) return [kElement, tag, props, resolveNode(r, children)];
return node;
}
if (type === kDirectHtml) return node[1];
return node.map((elem) => resolveNode(r, elem));
}
export type ResolvedNode =
| ResolvedNode[] // Concat
| ResolvedElement // Render
| string; // Direct HTML
export type ResolvedElement = [
tag: typeof kElement,
type: string,
props: Record<string, unknown>,
children: ResolvedNode,
];
/**
* Async rendering is done by creating an array of one item,
* which is already a valid 'Node', but the element is written
* once the data is available. The 'Render' contains a count
* of how many async jobs are left.
*/
export type InsertionPoint = [null | ResolvedNode];
/**
* Convert 'ResolvedNode' into HTML text. This operation happens after all
* async work is settled. The HTML is emitted as concisely as possible.
*/
export function renderNode(node: ResolvedNode): string {
if (typeof node === "string") return node;
ASSERT(node, "Unresolved Render Node");
const type = node[0];
if (type === kElement) {
return renderElement(node as ResolvedElement);
}
node = node as ResolvedNode[]; // TS cannot infer.
let out = type ? renderNode(type) : "";
let len = node.length;
for (let i = 1; i < len; i++) {
const elem = node[i];
if (elem) out += renderNode(elem);
}
return out;
}
function renderElement(element: ResolvedElement) {
const { 1: tag, 2: props, 3: children } = element;
let out = "<" + tag;
let needSpace = true;
for (const prop in props) {
const value = props[prop];
if (!value || typeof value === "function") continue;
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
// Do not process these
case "children":
case "ref":
case "dangerouslySetInnerHTML":
case "key":
continue;
}
if (needSpace) out += " ", needSpace = !attr.endsWith('"');
out += attr;
}
out += ">";
if (children) out += renderNode(children);
if (
tag !== "br" && tag !== "img" && tag !== "input" && tag !== "meta" &&
tag !== "link" && tag !== "hr"
) {
out += `</${tag}>`;
}
return out;
}
export function renderStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeHtml(String(style[styleName]))
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"';
return text;
}
// -- utility functions --
export function initRender(allowAsync: boolean, addon: Addons): Render {
return {
async: allowAsync ? 0 : -1,
rejections: null,
asyncDone: null,
addon,
};
}
let currentRender: Render | null = null;
export function getCurrentRender() {
if (!currentRender) throw new Error("No Render Active");
return currentRender;
}
export function setCurrentRender(r?: Render | null) {
currentRender = r ?? null;
}
export function getUserData<T>(namespace: PropertyKey, def: () => T): T {
return (getCurrentRender().addon[namespace] ??= def()) as T;
}
export function inspect(object: unknown) {
try {
return require("node:util").inspect(object);
} catch {
return typeof object;
}
}
export type ClsxInput = string | Record<string, boolean | null> | ClsxInput[];
export function clsx(mix: ClsxInput) {
var k, y, str = "";
if (typeof mix === "string") {
return mix;
} else if (typeof mix === "object") {
if (Array.isArray(mix)) {
for (k = 0; k < mix.length; k++) {
if (mix[k] && (y = clsx(mix[k]))) {
str && (str += " ");
str += y;
}
}
} else {
for (k in mix) {
if (mix[k]) {
str && (str += " ");
str += k;
}
}
}
}
return str;
}
export const escapeHtml = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;")
.replace(/"/g, "&quot;").replace(/'/g, "&#x27;").replace(/`/g, "&#x60;");

View file

@ -0,0 +1,40 @@
import { test } from "node:test";
import { renderStreaming, Suspense } from "./suspense.ts";
test("sanity", async (t) => {
let resolve: () => void = null!;
// @ts-expect-error
async function AsyncComponent() {
await new Promise<void>((done) => resolve = done);
return <button>wow!</button>;
}
const example = (
<main>
<h1>app shell</h1>
<Suspense fallback="loading...">
<AsyncComponent />
</Suspense>
<footer>(c) 2025</footer>
</main>
);
const iterator = renderStreaming(example);
const assertContinue = (actual: unknown, value: unknown) =>
t.assert.deepEqual(actual, { done: false, value });
assertContinue(
await iterator.next(),
"<template shadowrootmode=open><main><h1>app shell</h1><slot name=suspended_1>loading...</slot><footer>(c) 2025</footer></main></template>",
);
t.assert.ok(resolve !== null), resolve();
assertContinue(
await iterator.next(),
"<button slot=suspended_1>wow!</button>",
);
t.assert.deepEqual(
await iterator.next(),
{ done: true, value: {} },
);
});

View file

@ -0,0 +1,102 @@
// This file implements out-of-order HTML streaming, mimicking the React
// Suspense API. To use, place Suspense around an expensive async component
// and render the page with 'renderStreaming'.
//
// Implementation of this article:
// https://lamplightdev.com/blog/2024/01/10/streaming-html-out-of-order-without-javascript/
//
// I would link to an article from Next.js or React, but their examples
// are too verbose and not informative to what they actually do.
const kState = Symbol("SuspenseState");
interface SuspenseProps {
children: ssr.Node;
fallback?: ssr.Node;
}
interface State {
nested: boolean;
nextId: number;
completed: number;
pushChunk(name: string, node: ssr.ResolvedNode): void;
}
export function Suspense({ children, fallback }: SuspenseProps): ssr.Node {
const state = ssr.getUserData<State>(kState, () => {
throw new Error("Can only use <Suspense> with 'renderStreaming'");
});
if (state.nested) throw new Error("<Suspense> cannot be nested");
const parent = ssr.getCurrentRender()!;
const r = ssr.initRender(true, { [kState]: { nested: true } });
const resolved = ssr.resolveNode(r, children);
if (r.async == 0) return ssr.html(resolved);
const name = "suspended_" + (++state.nextId);
state.nested = true;
const ip: [ssr.ResolvedNode] = [
[
ssr.kElement,
"slot",
{ name },
fallback ? ssr.resolveNode(parent, fallback) : "",
],
];
state.nested = false;
r.asyncDone = () => {
const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, ip[0] = resolved);
};
return ssr.html(ip);
}
// TODO: add a User-Agent parameter, which is used to determine if a
// fallback path must be used.
// - Before ~2024 needs to use a JS implementation.
// - IE should probably bail out entirely.
export async function* renderStreaming<
T extends ssr.Addons = Record<never, unknown>,
>(
node: ssr.Node,
addon: T = {} as T,
) {
const {
text: begin,
addon: { [kState]: state, ...addonOutput },
} = await ssr.ssrAsync(node, {
...addon,
[kState]: {
nested: false,
nextId: 0,
completed: 0,
pushChunk: () => {},
} satisfies State as State,
});
if (state.nextId === 0) {
yield begin;
return addonOutput as unknown as T;
}
let resolve: (() => void) | null = null;
let chunks: string[] = [];
state.pushChunk = (slot, node) => {
while (node.length === 1 && Array.isArray(node)) node = node[0];
if (node[0] === ssr.kElement) {
(node as ssr.ResolvedElement)[2].slot = slot;
} else {
node = [ssr.kElement, "clover-suspense", {
style: "display:contents",
slot,
}, node];
}
chunks.push(ssr.renderNode(node));
resolve?.();
};
yield `<template shadowrootmode=open>${begin}</template>`;
do {
await new Promise<void>((done) => resolve = done);
yield* chunks;
chunks = [];
} while (state.nextId < state.completed);
return addonOutput as unknown as T;
}
import * as ssr from "./ssr.ts";

View file

@ -0,0 +1,79 @@
export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult>,
) {
return {
name: "clover vfs",
setup(b) {
b.onResolve(
{
filter: new RegExp(
`^(?:${
Object.keys(map).map((file) => string.escapeRegExp(file)).join(
"|",
)
})\$`,
),
},
({ path }) => ({ path, namespace: "vfs" }),
);
b.onLoad(
{ filter: /./, namespace: "vfs" },
({ path }) => {
const entry = map[path];
return ({
resolveDir: ".",
loader: "ts",
...typeof entry === "string" ? { contents: entry } : entry,
});
},
);
},
} satisfies esbuild.Plugin;
}
export function banFiles(
files: string[],
) {
return {
name: "clover vfs",
setup(b) {
b.onResolve(
{
filter: new RegExp(
`^(?:${
files.map((file) => string.escapeRegExp(file)).join("|")
})\$`,
),
},
({ path, importer }) => {
throw new Error(
`Loading ${path} (from ${importer}) is banned!`,
);
},
);
},
} satisfies esbuild.Plugin;
}
export function projectRelativeResolution(root = process.cwd() + "/src") {
return {
name: "project relative resolution ('@/' prefix)",
setup(b) {
b.onResolve({ filter: /^@\// }, ({ path: id }) => {
return {
path: path.resolve(root, id.slice(2)),
};
});
b.onResolve({ filter: /^#/ }, ({ path: id, importer }) => {
return {
path: hot.resolveFrom(importer, id),
};
});
},
} satisfies esbuild.Plugin;
}
import * as esbuild from "esbuild";
import * as string from "#sitegen/string";
import * as path from "node:path";
import * as hot from "./hot.ts";

View file

@ -1,61 +0,0 @@
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
// Node.js sync+promise fs methods. For convenince.
export {
existsSync,
readdir,
readdirSync,
readFile,
readFileSync,
rm,
rmSync,
stat,
statSync,
writeFile,
writeFileSync,
};
export function mkdir(dir: string) {
return nodeMkdir(dir, { recursive: true });
}
export function mkdirSync(dir: string) {
return nodeMkdirSync(dir, { recursive: true });
}
export async function writeMkdir(file: string, contents: Buffer | string) {
await mkdir(path.dirname(file));
return writeFile(file, contents);
}
export function writeMkdirSync(file: string, contents: Buffer | string) {
mkdirSync(path.dirname(file));
return writeFileSync(file, contents);
}
export function readDirRecOptionalSync(dir: string) {
try {
return readdirSync(dir, { withFileTypes: true });
} catch (err: any) {
if (err.code === "ENOENT") return [];
throw err;
}
}
import * as path from "node:path";
import {
existsSync,
mkdirSync as nodeMkdirSync,
readdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from "node:fs";
import {
mkdir as nodeMkdir,
readdir,
readFile,
rm,
stat,
writeFile,
} from "node:fs/promises";

456
framework/generate.ts Normal file
View file

@ -0,0 +1,456 @@
// This file contains the main site generator build process.
// By using `Incremental`'s ability to automatically purge stale
// assets, the `sitegen` function performs partial rebuilds.
export function main() {
return withSpinner<Record<string, unknown>, any>({
text: "Recovering State",
successText,
failureText: () => "sitegen FAIL",
}, async (spinner) => {
// const incr = Incremental.fromDisk();
// await incr.statAllFiles();
const incr = new Incremental();
const result = await sitegen(spinner, incr);
incr.toDisk(); // Allows picking up this state again
return result;
}) as ReturnType<typeof sitegen>;
}
export function successText({
elapsed,
inserted,
referenced,
unreferenced,
}: Awaited<ReturnType<typeof sitegen>>) {
const s = (array: unknown[]) => array.length === 1 ? "" : "s";
const kind = inserted.length === referenced.length ? "build" : "update";
const status = inserted.length > 0
? `${kind} ${inserted.length} key${s(inserted)}`
: unreferenced.length > 0
? `pruned ${unreferenced.length} key${s(unreferenced)}`
: `checked ${referenced.length} key${s(referenced)}`;
return `sitegen! ${status} in ${elapsed.toFixed(1)}s`;
}
export async function sitegen(
status: Spinner,
incr: Incremental,
) {
const startTime = performance.now();
let root = path.resolve(import.meta.dirname, "../src");
const join = (...sub: string[]) => path.join(root, ...sub);
// Sitegen reviews every defined section for resources to process
const sections: sg.Section[] =
require(path.join(root, "site.ts")).siteSections;
// Static files are compressed and served as-is.
// - "{section}/static/*.png"
let staticFiles: FileItem[] = [];
// Pages are rendered then served as static files.
// - "{section}/pages/*.marko"
let pages: FileItem[] = [];
// Views are dynamically rendered pages called via backend code.
// - "{section}/views/*.tsx"
let views: FileItem[] = [];
// Public scripts are bundled for the client as static assets under "/js/[...]"
// This is used for the file viewer's canvases.
// Note that '.client.ts' can be placed anywhere in the file structure.
// - "{section}/scripts/*.client.ts"
let scripts: FileItem[] = [];
// -- Scan for files --
status.text = "Scanning Project";
for (const section of sections) {
const { root: sectionRoot } = section;
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix = root === sectionRoot
? ""
: path.relative(root, sectionRoot) + "/";
const kinds = [
{
dir: sectionPath("pages"),
list: pages,
prefix: "/",
include: [".tsx", ".mdx", ".marko"],
exclude: [".client.ts", ".client.tsx"],
},
{
dir: sectionPath("static"),
list: staticFiles,
prefix: "/",
ext: true,
},
{
dir: sectionPath("scripts"),
list: scripts,
prefix: rootPrefix,
include: [".client.ts", ".client.tsx"],
},
{
dir: sectionPath("views"),
list: views,
prefix: rootPrefix,
include: [".tsx", ".mdx", ".marko"],
exclude: [".client.ts", ".client.tsx"],
},
];
for (
const { dir, list, prefix, include = [""], exclude = [], ext = false }
of kinds
) {
const items = fs.readDirRecOptionalSync(dir);
for (const subPath of items) {
const file = path.join(dir, subPath);
const stat = fs.statSync(file);
if (stat.isDirectory()) continue;
if (!include.some((e) => subPath.endsWith(e))) continue;
if (exclude.some((e) => subPath.endsWith(e))) continue;
const trim = ext
? subPath
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
".",
"/",
);
let id = prefix + trim.replaceAll("\\", "/");
if (prefix === "/" && id.endsWith("/index")) {
id = id.slice(0, -"/index".length) || "/";
}
list.push({ id, file: file });
}
}
}
const globalCssPath = join("global.css");
// TODO: make sure that `static` and `pages` does not overlap
// -- inline style sheets, used and shared by pages and views --
status.text = "Building";
const cssOnce = new OnceMap();
const cssQueue = new Queue({
name: "Bundle",
async fn([, key, files, theme]: [string, string, string[], css.Theme]) {
const { text, sources } = await css.bundleCssFiles(files, theme);
incr.put({
kind: "style",
key,
sources,
value: text,
});
},
passive: true,
getItemText: ([id]) => id,
maxJobs: 2,
});
function ensureCssGetsBuilt(
cssImports: string[],
theme: css.Theme,
referrer: string,
) {
const key = css.styleKey(cssImports, theme);
cssOnce.get(
key,
async () => {
incr.getArtifact("style", key) ??
await cssQueue.add([referrer, key, cssImports, theme]);
},
);
}
// -- server side render pages --
async function loadPageModule({ file }: FileItem) {
require(file);
}
async function renderPage(item: FileItem) {
// -- load and validate module --
let {
default: Page,
meta: metadata,
theme: pageTheme,
layout,
} = require(item.file);
if (!Page) {
throw new Error("Page is missing a 'default' export.");
}
if (!metadata) {
throw new Error("Page is missing 'meta' export with a title.");
}
// -- css --
if (layout?.theme) pageTheme = layout.theme;
const theme: css.Theme = {
...css.defaultTheme,
...pageTheme,
};
const cssImports = Array.from(
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
(file) => path.relative(hot.projectSrc, file),
);
ensureCssGetsBuilt(cssImports, theme, item.id);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
).then((m) => meta.renderMeta(m));
// -- html --
let page = [engine.kElement, Page, {}];
if (layout?.default) {
page = [engine.kElement, layout.default, { children: page }];
}
const bodyPromise = engine.ssrAsync(page, {
sitegen: sg.initRender(),
});
const [{ text, addon }, renderedMeta] = await Promise.all([
bodyPromise,
renderedMetaPromise,
]);
if (!renderedMeta.includes("<title>")) {
throw new Error(
"Page is missing 'meta.title'. " +
"All pages need a title tag.",
);
}
incr.put({
kind: "pageMetadata",
key: item.id,
// Incremental integrates with `hot.ts` + `require`
// to trace all the needed source files here.
sources: [item.file],
value: {
html: text,
meta: renderedMeta,
cssImports,
theme: theme ?? null,
clientRefs: Array.from(addon.sitegen.scripts),
},
});
}
async function prepareView(item: FileItem) {
const module = require(item.file);
if (!module.meta) {
throw new Error(`${item.file} is missing 'export const meta'`);
}
if (!module.default) {
throw new Error(`${item.file} is missing a default export.`);
}
const pageTheme = module.layout?.theme ?? module.theme;
const theme: css.Theme = {
...css.defaultTheme,
...pageTheme,
};
const cssImports = Array.from(
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
(file) => path.relative(hot.projectSrc, file),
);
ensureCssGetsBuilt(cssImports, theme, item.id);
incr.put({
kind: "viewMetadata",
key: item.id,
sources: [item.file],
value: {
file: path.relative(hot.projectRoot, item.file),
cssImports,
theme,
clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default,
},
});
}
// Of the pages that are already built, a call to 'ensureCssGetsBuilt' is
// required so that it's (1) re-built if needed, (2) not pruned from build.
const neededPages = pages.filter((page) => {
const existing = incr.getArtifact("pageMetadata", page.id);
if (existing) {
const { cssImports, theme } = existing;
ensureCssGetsBuilt(cssImports, theme, page.id);
}
return !existing;
});
const neededViews = views.filter((view) => {
const existing = incr.getArtifact("viewMetadata", view.id);
if (existing) {
const { cssImports, theme } = existing;
ensureCssGetsBuilt(cssImports, theme, view.id);
}
return !existing;
});
// Load the marko cache before render modules are loaded
incr.loadMarkoCache();
// This is done in two passes so that a page that throws during evaluation
// will report "Load Render Module" instead of "Render Static Page".
const spinnerFormat = status.format;
status.format = () => "";
const moduleLoadQueue = new Queue({
name: "Load Render Module",
fn: loadPageModule,
getItemText,
maxJobs: 1,
});
moduleLoadQueue.addMany(neededPages);
moduleLoadQueue.addMany(neededViews);
await moduleLoadQueue.done({ method: "stop" });
const pageQueue = new Queue({
name: "Render Static Page",
fn: renderPage,
getItemText,
maxJobs: 2,
});
pageQueue.addMany(neededPages);
const viewQueue = new Queue({
name: "Build Dynamic View",
fn: prepareView,
getItemText,
maxJobs: 2,
});
viewQueue.addMany(neededViews);
const pageAndViews = [
pageQueue.done({ method: "stop" }),
viewQueue.done({ method: "stop" }),
];
await Promise.allSettled(pageAndViews);
await Promise.all(pageAndViews);
status.format = spinnerFormat;
// -- bundle server javascript (backend and views) --
status.text = "Bundle JavaScript";
incr.snapshotMarkoCache();
const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node");
// -- bundle client javascript --
const referencedScripts = Array.from(
new Set(
[
...pages.map((item) =>
UNWRAP(
incr.getArtifact("pageMetadata", item.id),
`Missing pageMetadata ${item.id}`,
)
),
...views.map((item) =>
UNWRAP(
incr.getArtifact("viewMetadata", item.id),
`Missing viewMetadata ${item.id}`,
)
),
].flatMap((item) => item.clientRefs),
),
(script) => path.resolve(hot.projectSrc, script),
).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file)));
const extraPublicScripts = scripts.map((entry) => entry.file);
const clientJavaScriptPromise = bundle.bundleClientJavaScript(
referencedScripts,
extraPublicScripts,
incr,
);
await Promise.all([
serverJavaScriptPromise,
clientJavaScriptPromise,
cssQueue.done({ method: "stop" }),
]);
await bundle.finalizeServerJavaScript(incr, "node");
// -- copy/compress static files --
async function doStaticFile(item: FileItem) {
const body = await fs.readFile(item.file);
await incr.putAsset({
sources: [item.file],
key: item.id,
body,
});
}
const staticQueue = new Queue({
name: "Load Static",
fn: doStaticFile,
getItemText,
maxJobs: 16,
});
status.format = () => "";
staticQueue.addMany(
staticFiles.filter((file) => !incr.hasArtifact("asset", file.id)),
);
await staticQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- concatenate static rendered pages --
status.text = `Concat Pages`;
await Promise.all(pages.map(async (page) => {
if (incr.hasArtifact("asset", page.id)) return;
const {
html,
meta,
cssImports,
theme,
clientRefs,
} = UNWRAP(incr.out.pageMetadata.get(page.id));
const scriptIds = clientRefs.map(hot.getScriptId);
const styleKey = css.styleKey(cssImports, theme);
const style = UNWRAP(
incr.out.style.get(styleKey),
`Missing style ${styleKey}`,
);
const doc = wrapDocument({
body: html,
head: meta,
inlineCss: style,
scripts: scriptIds.map(
(ref) => UNWRAP(incr.out.script.get(ref), `Missing script ${ref}`),
).map((x) => `{${x}}`).join("\n"),
});
await incr.putAsset({
sources: [
page.file,
...incr.sourcesFor("style", styleKey),
...scriptIds.flatMap((ref) => incr.sourcesFor("script", ref)),
],
key: page.id,
body: doc,
headers: {
"Content-Type": "text/html",
},
});
}));
status.format = () => "";
status.text = ``;
// This will wait for all compression jobs to finish, which up
// to this point have been left as dangling promises.
await incr.wait();
const { inserted, referenced, unreferenced } = incr.shake();
// Flush the site to disk.
status.format = spinnerFormat;
status.text = `Incremental Flush`;
incr.flush("node"); // Write outputs
return {
incr,
inserted,
referenced,
unreferenced,
elapsed: (performance.now() - startTime) / 1000,
};
}
function getItemText({ file }: FileItem) {
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
}
import { OnceMap, Queue } from "#sitegen/async";
import { Incremental } from "./incremental.ts";
import * as bundle from "./bundle.ts";
import * as css from "./css.ts";
import * as engine from "./engine/ssr.ts";
import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as sg from "#sitegen";
import type { FileItem } from "#sitegen";
import * as path from "node:path";
import * as meta from "#sitegen/meta";
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
import { wrapDocument } from "./lib/view.ts";

View file

@ -34,18 +34,11 @@ export interface FileStat {
cssImportsRecursive: string[] | null;
lastModified: number;
imports: string[];
/* Used by 'incremental.ts' */
srcIds: string[];
}
let fsGraph = new Map<string, FileStat>();
export function setFsGraph(g: Map<string, FileStat>) {
if (fsGraph.size > 0) {
throw new Error("Cannot restore fsGraph when it has been written into");
}
fsGraph = g;
}
export function getFsGraph() {
return fsGraph;
const fileStats = new Map<string, FileStat>();
export function getFileStat(filepath: string) {
return fileStats.get(path.resolve(filepath));
}
function shouldTrackPath(filename: string) {
@ -60,14 +53,6 @@ Module.prototype._compile = function (
filename: string,
format: "module" | "commonjs",
) {
fs.writeMkdirSync(
".clover/debug-transpilation/" +
path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll(
"../",
"_/",
).replaceAll("/", "."),
content,
);
const result = ModulePrototypeUnderscoreCompile.call(
this,
content,
@ -78,25 +63,26 @@ Module.prototype._compile = function (
if (shouldTrackPath(filename)) {
const cssImportsMaybe: string[] = [];
const imports: string[] = [];
for (const { filename: file } of this.children) {
const relative = path.relative(projectRoot, file);
if (file.endsWith(".css")) cssImportsMaybe.push(relative);
for (const { filename: file, cloverClientRefs } of this.children) {
if (file.endsWith(".css")) cssImportsMaybe.push(file);
else {
const child = fsGraph.get(relative);
const child = fileStats.get(file);
if (!child) continue;
const { cssImportsRecursive } = child;
if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive);
imports.push(relative);
imports.push(file);
if (cloverClientRefs && cloverClientRefs.length > 0) {
(this.cloverClientRefs ??= [])
.push(...cloverClientRefs);
}
}
}
const relative = path.relative(projectRoot, filename);
fsGraph.set(relative, {
fileStats.set(filename, {
cssImportsRecursive: cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe))
: null,
imports,
lastModified: stat.mtimeMs,
srcIds: [],
});
}
return result;
@ -110,7 +96,9 @@ Module._resolveFilename = (...args) => {
try {
return require.resolve(replacedPath, { paths: [projectSrc] });
} catch (err: any) {
if (err.code === "MODULE_NOT_FOUND" && err.requireStack.length <= 1) {
if (
err.code === "MODULE_NOT_FOUND" && (err?.requireStack?.length ?? 0) <= 1
) {
err.message.replace(replacedPath, args[0]);
}
}
@ -119,26 +107,39 @@ Module._resolveFilename = (...args) => {
};
function loadEsbuild(module: NodeJS.Module, filepath: string) {
let src = fs.readFileSync(filepath, "utf8");
return loadEsbuildCode(module, filepath, src);
return loadEsbuildCode(module, filepath, fs.readFileSync(filepath, "utf8"));
}
function loadEsbuildCode(module: NodeJS.Module, filepath: string, src: string) {
interface LoadOptions {
scannedClientRefs?: string[];
}
function loadEsbuildCode(
module: NodeJS.Module,
filepath: string,
src: string,
opt: LoadOptions = {},
) {
if (filepath === import.meta.filename) {
module.exports = self;
return;
}
let loader: any = "tsx";
if (filepath.endsWith(".ts")) loader = "ts";
else if (filepath.endsWith(".jsx")) loader = "jsx";
else if (filepath.endsWith(".js")) loader = "js";
if (opt.scannedClientRefs) {
module.cloverClientRefs = opt.scannedClientRefs;
} else {
let { code, refs } = resolveClientRefs(src, filepath);
module.cloverClientRefs = refs;
src = code;
}
if (src.includes("import.meta")) {
src = `
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)};
` + src;
`.trim().replace(/\n/g, "") + src;
}
src = esbuild.transformSync(src, {
loader,
@ -146,25 +147,61 @@ function loadEsbuildCode(module: NodeJS.Module, filepath: string, src: string) {
target: "esnext",
jsx: "automatic",
jsxImportSource: "#ssr",
jsxDev: true,
sourcefile: filepath,
}).code;
return module._compile(src, filepath, "commonjs");
}
function resolveClientRef(sourcePath: string, ref: string) {
const filePath = resolveFrom(sourcePath, ref);
if (
!filePath.endsWith(".client.ts") &&
!filePath.endsWith(".client.tsx")
) {
throw new Error("addScript must be a .client.ts or .client.tsx");
}
return path.relative(projectSrc, filePath);
}
// TODO: extract the marko compilation tools out, lazy load them
export interface MarkoCacheEntry {
src: string;
scannedClientRefs: string[];
}
export const markoCache = new Map<string, MarkoCacheEntry>();
function loadMarko(module: NodeJS.Module, filepath: string) {
let src = fs.readFileSync(filepath, "utf8");
// A non-standard thing here is Clover Sitegen implements
// its own client side scripting stuff, so it overrides
// bare client import statements to it's own usage.
if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src.replace(
/^\s*client\s+import\s+("[^"]+|'[^']+)[^\n]+/m,
"<CloverScriptInclude src=$1 />",
) + '\nimport { Script as CloverScriptInclude } from "#sitegen";';
let cache = markoCache.get(filepath);
if (!cache) {
let src = fs.readFileSync(filepath, "utf8");
// A non-standard thing here is Clover Sitegen implements
// its own client side scripting stuff, so it overrides
// bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${
JSON.stringify(getScriptId(resolved))
} />`;
},
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
}
src = marko.compileSync(src, filepath).code;
src = src.replace("marko/debug/html", "#ssr/marko");
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
markoCache.set(filepath, cache);
}
src = marko.compileSync(filepath, {}).code;
src = src.replace("marko/debug/html", "#ssr/marko");
return loadEsbuildCode(module, filepath, src);
const { src, scannedClientRefs } = cache;
return loadEsbuildCode(module, filepath, src, {
scannedClientRefs,
});
}
function loadMdx(module: NodeJS.Module, filepath: string) {
@ -174,19 +211,26 @@ function loadMdx(module: NodeJS.Module, filepath: string) {
return loadEsbuildCode(module, filepath, src);
}
function loadCss(module: NodeJS.Module, filepath: string) {
function loadCss(module: NodeJS.Module, _filepath: string) {
module.exports = {};
}
export function reloadRecursive(filepath: string) {
filepath = path.resolve(filepath);
const existing = cache[filepath];
if (existing) deleteRecursive(filepath, existing);
fsGraph.clear();
if (existing) deleteRecursiveInner(filepath, existing);
fileStats.clear();
return require(filepath);
}
function deleteRecursive(id: string, module: any) {
export function unload(filepath: string) {
filepath = path.resolve(filepath);
const existing = cache[filepath];
if (existing) delete cache[filepath];
fileStats.delete(filepath);
}
function deleteRecursiveInner(id: string, module: any) {
if (id.includes(path.sep + "node_modules" + path.sep)) {
return;
}
@ -194,15 +238,31 @@ function deleteRecursive(id: string, module: any) {
for (const child of module.children) {
if (child.filename.includes("/engine/")) return;
const existing = cache[child.filename];
if (existing === child) deleteRecursive(child.filename, existing);
if (existing === child) deleteRecursiveInner(child.filename, existing);
}
}
export function getCssImports(filepath: string) {
filepath = path.resolve(filepath);
if (!require.cache[filepath]) throw new Error(filepath + " was never loaded");
return fsGraph.get(path.relative(projectRoot, filepath))
?.cssImportsRecursive ?? [];
return fileStats.get(filepath)?.cssImportsRecursive ?? [];
}
export function getClientScriptRefs(filepath: string) {
filepath = path.resolve(filepath);
const module = require.cache[filepath];
if (!module) throw new Error(filepath + " was never loaded");
return module.cloverClientRefs ?? [];
}
export function getSourceCode(filepath: string) {
filepath = path.resolve(filepath);
const module = require.cache[filepath];
if (!module) throw new Error(filepath + " was never loaded");
if (!module.cloverSourceCode) {
throw new Error(filepath + " did not record source code");
}
return module.cloverSourceCode;
}
export function resolveFrom(src: string, dest: string) {
@ -216,9 +276,57 @@ export function resolveFrom(src: string, dest: string) {
}
}
const importRegExp =
/import\s+(\*\sas\s([a-zA-Z0-9$_]+)|{[^}]+})\s+from\s+(?:"#sitegen"|'#sitegen')/s;
const getSitegenAddScriptRegExp = /addScript(?:\s+as\s+([a-zA-Z0-9$_]+))?/;
interface ResolvedClientRefs {
code: string;
refs: string[];
}
export function resolveClientRefs(
code: string,
filepath: string,
): ResolvedClientRefs {
// This match finds a call to 'import ... from "#sitegen"'
const importMatch = code.match(importRegExp);
if (!importMatch) return { code, refs: [] };
const items = importMatch[1];
let identifier = "";
if (items.startsWith("{")) {
const clauseMatch = items.match(getSitegenAddScriptRegExp);
if (!clauseMatch) return { code, refs: [] }; // did not import
identifier = clauseMatch[1] || "addScript";
} else if (items.startsWith("*")) {
identifier = importMatch[2] + "\\s*\\.\\s*addScript";
} else {
throw new Error("Impossible");
}
identifier = identifier.replaceAll("$", "\\$"); // only needed escape
const findCallsRegExp = new RegExp(
`\\b(${identifier})\\s*\\(("[^"]+"|'[^']+')\\)`,
"gs",
);
const scannedClientRefs = new Set<string>();
code = code.replace(findCallsRegExp, (_, call, arg) => {
const ref = JSON.parse(`"${arg.slice(1, -1)}"`);
const resolved = resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `${call}(${JSON.stringify(getScriptId(resolved))})`;
});
return { code, refs: Array.from(scannedClientRefs) };
}
export function getScriptId(file: string) {
return (path.isAbsolute(file) ? path.relative(projectSrc, file) : file)
.replaceAll("\\", "/");
}
declare global {
namespace NodeJS {
interface Module {
cloverClientRefs?: string[];
cloverSourceCode?: string;
_compile(
this: NodeJS.Module,
content: string,
@ -228,8 +336,14 @@ declare global {
}
}
}
declare module "node:module" {
export function _resolveFilename(
id: string,
parent: NodeJS.Module,
): unknown;
}
import * as fs from "./fs.ts";
import * as fs from "./lib/fs.ts";
import * as path from "node:path";
import { pathToFileURL } from "node:url";
import * as esbuild from "esbuild";

View file

@ -1,55 +1,108 @@
// `Incremental` contains multiple maps for the different parts of a site
// build, and tracks reused items across builds. It also handles emitting and
// updating the built site. This structure is self contained and serializable.
//
// Tracking is simple: Files map to one or more 'source IDs', which map to one
// or more 'artifact'. This two layer approach allows many files (say a page +
// all its imports) to map to the build of a page, which produces an HTML file
// plus a list of scripts.
// Incremental contains multiple maps for the different kinds
// of Artifact, which contain a list of source files which
// were used to produce it. When files change, Incremental sees
// that the `mtime` is newer, and purges the referenced artifacts.
import { Buffer } from "node:buffer";
type SourceId = string; // relative to project root, e.g. 'src/global.css'
type ArtifactId = string; // `${ArtifactType}\0${string}`
type Sha1Id = string; // Sha1 hex string
// -- artifact types --
interface ArtifactMap {
/* An asset (serve with "#sitegen/asset" */
asset: Asset;
/* The bundled text of a '.client.ts' script */
// TODO: track imports this has into `asset`
script: string;
/* The bundled style tag contents. Keyed by 'css.styleKey' */
style: string;
/* Metadata about a static page */
pageMetadata: PageMetadata;
/* Metadata about a dynamic view */
viewMetadata: ViewMetadata;
/* Cached '.marko' server compilation */
serverMarko: hot.MarkoCacheEntry;
/* Backend source code, pre-replacement. Keyed by platform type. */
backendBundle: BackendBundle;
/* One file in the backend receives post-processing. */
backendReplace: Buffer;
}
type AllArtifactMaps = {
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
};
type ArtifactType = keyof ArtifactMap;
interface Asset {
type ArtifactKind = keyof ArtifactMap;
/* Automatic path tracing is performed to make it so that
* specifying 'sources: [file]' refers to it and everything it imports.
* These kinds do not have that behavior
*/
const exactDependencyKinds = ["serverMarko"];
export interface Asset {
buffer: Buffer;
headers: Record<string, string | undefined>;
hash: string;
}
export interface PutBase {
srcTag?: string; // deprecated
srcId: string;
key: string;
/**
* This interface intentionally omits the *contents*
* of its scripts and styles for fine-grained rebuilds.
*/
export interface PageMetadata {
html: string;
meta: string;
cssImports: string[];
theme: css.Theme;
clientRefs: string[];
}
/**
* Like a page, this intentionally omits resources,
* but additionally omits the bundled server code.
*/
export interface ViewMetadata {
file: string;
// staticMeta: string | null; TODO
cssImports: string[];
theme: css.Theme;
clientRefs: string[];
hasLayout: boolean;
}
export interface BackendBundle {
magicWord: string;
fileWithMagicWord: string | null;
files: Record<string, Buffer>;
}
export interface Put<T extends ArtifactType> extends PutBase {
type: T;
// -- incremental support types --
export interface PutBase {
sources: SourceId[];
key: string;
}
export interface Put<T extends ArtifactKind> extends PutBase {
kind: T;
value: ArtifactMap[T];
}
export interface Output {
type: ArtifactType;
key: string;
export interface Invalidations {
lastModified: number;
outputs: Set<ArtifactId>;
files: Set<SourceId>;
}
const gzip = util.promisify(zlib.gzip);
const zstd = util.promisify(zlib.zstdCompress);
export class Incremental {
/** The generated artifacts */
out: AllArtifactMaps = {
out: {
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
} = {
asset: new Map(),
script: new Map(),
style: new Map(),
pageMetadata: new Map(),
viewMetadata: new Map(),
serverMarko: new Map(),
backendBundle: new Map(),
backendReplace: new Map(),
};
/** Tracking filesystem entries to `srcId` */
invals = new Map<SourceId, Invalidations>();
/** Tracking output keys to files */
sources = new Map<ArtifactId, SourceId[]>();
/** Compressed resources */
compress = new Map<string, Compressed>();
compress = new Map<Sha1Id, Compressed>();
compressQueue = new Queue<CompressJob, void>({
name: "Compress",
maxJobs: 5,
@ -57,59 +110,183 @@ export class Incremental {
passive: true,
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
});
/** Tracking filesystem entries to `srcId` */
files = new Map<string, hot.FileStat>();
srcIds = new Map<string, Output[]>();
static fromSerialized() {
/** Reset at the end of each update */
round = {
inserted: new Set<ArtifactId>(),
referenced: new Set<ArtifactId>(),
};
getArtifact<T extends ArtifactKind>(kind: T, key: string) {
this.round.referenced.add(`${kind}\0${key}`);
return this.out[kind].get(key);
}
serialize() {
const writer = new BufferWriter();
const asset = Array.from(
this.out.asset,
([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
return [key, {
raw,
gzip,
zstd,
hash,
headers,
}];
},
hasArtifact(kind: ArtifactKind, key: string) {
return this.getArtifact(kind, key) != null;
}
sourcesFor(kind: ArtifactKind, key: string) {
return UNWRAP(
this.sources.get(kind + "\0" + key),
`No artifact '${kind}' '${key}'`,
);
const script = Array.from(this.out.script);
const meta = Buffer.from(
JSON.stringify({
asset,
script,
}),
"utf-8",
);
const lengthBuffer = Buffer.alloc(4);
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
return Buffer.concat([meta, lengthBuffer, ...writer.buffers]);
}
serializeToDisk(file = ".clover/incr.state") {
const buffer = this.serialize();
fs.writeFileSync(file, buffer);
shake() {
const toPublic = (str: string) => {
const [kind, key] = str.split("\0");
return { kind: kind as ArtifactKind, key };
};
const inserted = Array.from(this.round.inserted, toPublic);
const referenced = Array.from(this.round.referenced, toPublic);
const unreferenced: { kind: ArtifactKind; key: string }[] = [];
for (const kind in this.out) {
const map = this.out[kind as keyof typeof this.out];
if (!map) continue;
for (const key of map.keys()) {
if (!this.round.referenced.has(`${kind}\0${key}`)) {
unreferenced.push({ kind: kind as ArtifactKind, key });
// this.out[kind as ArtifactKind].delete(key);
}
}
}
this.round.inserted.clear();
this.round.referenced.clear();
return { inserted, referenced, unreferenced };
}
put<T extends ArtifactType>({
srcId,
type,
/*
* Put built artifacts into the incremental cache. The entry points
* used to build this must be provided. 'Incremental' will trace JS
* imports and file modification times tracked by 'hot.ts'.
*/
put<T extends ArtifactKind>({
sources,
kind,
key,
value,
}: Put<T>) {
this.out[type].set(key, value);
// These three invariants affect incremental accuracy.
if (this.round.inserted.has(`${kind}\0${key}`)) {
console.error(
`Artifact ${kind}:${key} was inserted multiple times in the same round!`,
);
} else if (!this.round.referenced.has(`${kind}\0${key}`)) {
console.error(
`Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`,
);
} else if (this.out[kind].has(key)) {
console.error(
`Artifact ${kind}:${key} is not stale, but overwritten.`,
);
}
this.out[kind].set(key, value);
this.round.inserted.add(`${kind}\0${key}`);
// Update sources information
ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key);
sources = sources.map((src) => path.normalize(src));
const fullKey = `${kind}\0${key}`;
const prevSources = this.sources.get(fullKey);
const newSources = new Set(
sources.map((file) =>
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
),
);
this.sources.set(fullKey, [...newSources]);
for (const source of prevSources ?? []) {
if (sources.includes(source)) {
newSources.delete(source);
continue;
}
const invals = UNWRAP(this.invals.get(source));
ASSERT(invals.outputs.has(fullKey));
invals.outputs.delete(fullKey);
}
// Use reflection from the plugin system to get imports.
for (const source of newSources) {
const invals = this.#getOrInitInvals(source);
invals.outputs.add(fullKey);
this.#followImports(source);
}
}
// TODO: this doesnt remove stuff when it disappeary
#getOrInitInvals(source: string) {
let invals = this.invals.get(source);
if (!invals) {
const lastModified = hot.getFileStat(source)?.lastModified ??
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
this.invals.set(
source,
invals = {
lastModified,
files: new Set(),
outputs: new Set(),
},
);
}
return invals;
}
#followImports(file: string) {
const stat = hot.getFileStat(file);
if (!stat) return;
for (const i of stat.imports) {
const invals = this.#getOrInitInvals(i);
invals.files.add(file);
this.#followImports(i);
}
}
async statAllFiles() {
for (const file of this.invals.keys()) {
try {
const mtime = fs.statSync(file).mtimeMs;
this.updateStat(file, mtime);
} catch (err) {
}
}
}
updateStat(file: string, newLastModified: number | null) {
file = path.relative(hot.projectRoot, file);
const stat = this.invals.get(file);
ASSERT(stat, "Updated stat on untracked file " + file);
const hasUpdate = !newLastModified || stat.lastModified < newLastModified;
if (hasUpdate) {
// Invalidate
console.info(file + " " + (newLastModified ? "updated" : "deleted"));
hot.unload(file);
const invalidQueue = [file];
let currentInvalid;
while (currentInvalid = invalidQueue.pop()) {
const invalidations = this.invals.get(currentInvalid);
ASSERT(
invalidations,
"No reason to track file '" + currentInvalid +
"' if it has no invalidations",
);
const { files, outputs } = invalidations;
for (const out of outputs) {
const [kind, artifactKey] = out.split("\0");
this.out[kind as ArtifactKind].delete(artifactKey);
}
invalidQueue.push(...files);
}
}
if (newLastModified) {
stat.lastModified = newLastModified;
} else {
this.invals.delete(file);
}
return hasUpdate;
}
async putAsset(info: PutAsset) {
@ -126,30 +303,25 @@ export class Incremental {
},
hash,
};
const a = this.put({ ...info, kind: "asset", value });
if (!this.compress.has(hash)) {
const label = info.key;
this.compress.set(hash, {
zstd: undefined,
gzip: undefined,
});
await Promise.all([
this.compressQueue.add({ label, buffer, algo: "zstd", hash }),
this.compressQueue.add({ label, buffer, algo: "gzip", hash }),
]);
this.compressQueue.add({ label, buffer, algo: "zstd", hash });
this.compressQueue.add({ label, buffer, algo: "gzip", hash });
}
return this.put({ ...info, type: "asset", value });
return a;
}
async compressImpl({ algo, buffer, hash }: CompressJob) {
let out;
switch (algo) {
case "zstd":
out = await zstd(buffer);
break;
case "gzip":
out = await gzip(buffer, { level: 9 });
break;
}
if (algo === "zstd") out = await zstd(buffer);
else if (algo === "gzip") out = await gzip(buffer, { level: 9 });
else algo satisfies never;
let entry = this.compress.get(hash);
if (!entry) {
this.compress.set(
@ -163,32 +335,232 @@ export class Incremental {
entry![algo] = out;
}
invalidate(srcId: string) {
}
async wait() {
await this.compressQueue.done({ method: "stop" });
}
async flush() {
serialize() {
const writer = new BufferWriter();
const asset = Object.fromEntries(
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
// -- artifact --
const asset = Array.from(
this.out.asset,
([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
return [key, {
raw,
gzip,
zstd,
headers,
hash,
headers: headers as Record<string, string>,
}] satisfies SerializedMeta["asset"][0];
},
);
const script = Array.from(this.out.script);
const style = Array.from(this.out.style);
const pageMetadata = Array.from(this.out.pageMetadata);
const viewMetadata = Array.from(this.out.viewMetadata);
const serverMarko = Array.from(this.out.serverMarko);
const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => {
return [k, {
magicWord: v.magicWord,
fileWithMagicWord: v.fileWithMagicWord,
files: Object.entries(v.files).map(
([file, contents]) => [
file,
writer.write(contents, "backendBundle" + k + ":" + file),
],
),
}] satisfies SerializedMeta["backendBundle"][0];
});
const backendReplace = Array.from(
this.out.backendReplace,
([k, v]) =>
[
k,
writer.write(v, "backendReplace" + k),
] satisfies SerializedMeta["backendReplace"][0],
);
// -- incremental metadata --
const invals = Array.from(this.invals, ([key, value]) => {
const { lastModified, files, outputs } = value;
return [key, {
m: lastModified,
f: [...files],
o: [...outputs],
}] satisfies SerializedMeta["invals"][0];
});
const sources = Array.from(this.sources, ([key, value]) => {
return [key, ...value] as [string, ...string[]];
});
const json = {
asset,
script,
invals,
sources,
style,
pageMetadata,
viewMetadata,
serverMarko,
backendBundle,
backendReplace,
} satisfies SerializedMeta;
const meta = Buffer.from(JSON.stringify(json), "utf-8");
const lengthBuffer = Buffer.alloc(4);
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
}
static fromSerialized(buffer: Buffer): Incremental {
const metaLength = buffer.readUint32LE(0);
const meta: SerializedMeta = JSON.parse(
buffer.subarray(4, 4 + metaLength).toString("utf8"),
);
const view = ([start, end]: View) =>
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
const incr = new Incremental();
incr.out = {
asset: new Map(meta.asset.map(([key, value]) => {
const { hash, raw, gzip, zstd, headers } = value;
if ((gzip || zstd) && !incr.compress.has(hash)) {
incr.compress.set(hash, {
gzip: gzip ? view(gzip) : undefined,
zstd: zstd ? view(zstd) : undefined,
});
}
return [key, {
buffer: view(raw),
headers: headers,
hash: hash,
}];
})),
script: new Map(meta.script),
style: new Map(meta.style),
pageMetadata: new Map(meta.pageMetadata),
viewMetadata: new Map(meta.viewMetadata),
serverMarko: new Map(meta.serverMarko),
backendBundle: new Map(meta.backendBundle.map(([key, value]) => {
return [key, {
magicWord: value.magicWord,
fileWithMagicWord: value.fileWithMagicWord,
files: Object.fromEntries(
value.files.map(([file, contents]) => [file, view(contents)]),
),
}];
})),
backendReplace: new Map(
meta.backendReplace.map(([key, contents]) => [key, view(contents)]),
),
};
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
return [key, {
lastModified: m,
files: new Set(f),
outputs: new Set(o),
}];
}));
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
return incr;
}
/*
* Move the cached (server) marko transpilations from this incremental
* into the running process.
*/
loadMarkoCache() {
hot.markoCache.clear();
for (const [key, value] of this.out.serverMarko) {
hot.markoCache.set(path.resolve(hot.projectRoot, key), value);
}
}
/*
* Move the cached (server) marko transpilations from this incremental
* into the running process.
*/
snapshotMarkoCache() {
for (const [file, value] of hot.markoCache) {
const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/");
// Only insert if it doesn't exist. Calling 'put' when it
// already exists would inform the user of extra calls to put.
if (!this.hasArtifact("serverMarko", key)) {
this.put({
kind: "serverMarko",
sources: [file],
key,
value,
});
}
}
}
toDisk(file = ".clover/incr.state") {
const buffer = this.serialize();
fs.writeFileSync(file, buffer);
}
static fromDisk(file = ".clover/incr.state"): Incremental {
try {
const buffer = fs.readFileSync(file);
return Incremental.fromSerialized(buffer);
} catch (err: any) {
if (err.code === "ENOENT") return new Incremental();
throw err;
}
}
async wait() {
await this.compressQueue.done({ method: "success" });
}
async flush(
platform: bundle.ServerPlatform,
dir = path.resolve(".clover/out"),
) {
ASSERT(!this.compressQueue.active);
const join = (...args: string[]) => path.join(dir, ...args);
const writer = new BufferWriter();
// TODO: ensure all compressed got compressed
const asset = Object.fromEntries(
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz");
const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd");
return [key, { raw, gzip, zstd, headers }];
}),
);
await Promise.all([
fs.writeFile(".clover/static.json", JSON.stringify(asset)),
fs.writeFile(".clover/static.blob", writer.get()),
]);
const backendBundle = UNWRAP(this.out.backendBundle.get(platform));
// Arrange output files
const outFiles: Array<[file: string, contents: string | Buffer]> = [
// Asset manifest
["static.json", JSON.stringify(asset)],
["static.blob", writer.get()],
// Backend
...Object.entries(backendBundle.files).map(([subPath, contents]) =>
[
subPath,
subPath === backendBundle.fileWithMagicWord
? UNWRAP(this.out.backendReplace.get(platform))
: contents,
] as [string, Buffer]
),
];
// TODO: check duplicates
// Perform all i/o
await Promise.all(
outFiles.map(([subPath, contents]) =>
fs.writeMkdir(join(subPath), contents, { flush: true })
),
);
}
}
@ -230,10 +602,10 @@ class BufferWriter {
export type View = [start: number, end: number];
// Alongside this type is a byte buffer, containing all the assets.
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: View;
gzip: View;
@ -249,17 +621,37 @@ export interface SerializedMeta {
hash: string;
headers: Record<string, string>;
}]>;
script: [key: string, value: string][];
script: Array<[key: string, value: string]>;
style: Array<[key: string, value: string]>;
pageMetadata: Array<[key: string, PageMetadata]>;
viewMetadata: Array<[key: string, ViewMetadata]>;
serverMarko: Array<[key: string, hot.MarkoCacheEntry]>;
backendBundle: Array<[platform: string, {
magicWord: string;
fileWithMagicWord: string | null;
files: Array<[string, View]>;
}]>;
backendReplace: Array<[key: string, View]>;
invals: Array<[key: string, {
/** Modified */
m: number;
f: SourceId[];
o: ArtifactId[];
}]>;
sources: Array<[string, ...string[]]>;
}
function never(): never {
throw new Error("Impossible");
}
const gzip = util.promisify(zlib.gzip);
const zstd = util.promisify(zlib.zstdCompress);
import * as path from "node:path";
import * as fs from "./fs.ts";
import * as fs from "#sitegen/fs";
import * as zlib from "node:zlib";
import * as util from "node:util";
import { Queue } from "./queue.ts";
import { Queue } from "#sitegen/async";
import * as hot from "./hot.ts";
import * as mime from "./mime.ts";
import * as mime from "#sitegen/mime";
import * as path from "node:path";
import { Buffer } from "node:buffer";
import * as css from "./css.ts";
import type * as bundle from "./bundle.ts";

View file

@ -8,8 +8,8 @@ export type StaticPageId = string;
export async function reload() {
const [map, buf] = await Promise.all([
fs.readFile(".clover/static.json", "utf8"),
fs.readFile(".clover/static.blob"),
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"),
fs.readFile(path.join(import.meta.dirname, "static.blob")),
]);
assets = {
map: JSON.parse(map),
@ -18,15 +18,18 @@ export async function reload() {
}
export async function reloadSync() {
const map = fs.readFileSync(".clover/static.json", "utf8");
const buf = fs.readFileSync(".clover/static.blob");
const map = fs.readFileSync(
path.join(import.meta.dirname, "static.json"),
"utf8",
);
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob"));
assets = {
map: JSON.parse(map),
buf,
};
}
export async function assetMiddleware(c: Context, next: Next) {
export async function middleware(c: Context, next: Next) {
if (!assets) await reload();
const asset = assets!.map[c.req.path];
if (asset) {
@ -35,6 +38,19 @@ export async function assetMiddleware(c: Context, next: Next) {
return next();
}
export async function notFound(c: Context) {
if (!assets) await reload();
let pathname = c.req.path;
do {
const asset = assets!.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname);
const asset = assets!.map["/404"];
if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404);
}
export async function serveAsset(
c: Context,
id: StaticPageId,
@ -62,14 +78,13 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
if (ifnonematch) {
const etag = asset.headers.ETag;
if (etagMatches(etag, ifnonematch)) {
c.res = new Response(null, {
return c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
headers: {
ETag: etag,
},
});
return;
}
}
const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
@ -90,10 +105,16 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
} else {
body = subarrayAsset(asset.raw);
}
c.res = new Response(body, { headers, status });
return c.res = new Response(body, { headers, status });
}
import * as fs from "./fs.ts";
process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload();
});
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, View } from "./incremental.ts";
import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts";
import { Buffer } from "node:buffer";
import * as path from "node:path";

View file

@ -1,7 +1,4 @@
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as path from "node:path";
import process from "node:process";
const five_minutes = 5 * 60 * 1000;
interface QueueOptions<T, R> {
name: string;
@ -37,6 +34,12 @@ export class Queue<T, R> {
this.#passive = options.passive ?? false;
}
cancel() {
const bar = this.#cachedProgress;
bar?.stop();
this.#queue = [];
}
get bar() {
const cached = this.#cachedProgress;
if (!cached) {
@ -68,10 +71,10 @@ export class Queue<T, R> {
return cached;
}
add(args: T) {
addReturn(args: T) {
this.#total += 1;
this.updateTotal();
if (this.#active.length > this.#maxJobs) {
if (this.#active.length >= this.#maxJobs) {
const { promise, resolve, reject } = Promise.withResolvers<R>();
this.#queue.push([args, resolve, reject]);
return promise;
@ -79,6 +82,10 @@ export class Queue<T, R> {
return this.#run(args);
}
add(args: T) {
return this.addReturn(args).then(() => {}, () => {});
}
addMany(items: T[]) {
this.#total += items.length;
this.updateTotal();
@ -95,10 +102,12 @@ export class Queue<T, R> {
const itemText = this.#getItemText(args);
const spinner = new Spinner(itemText);
spinner.stop();
(spinner as any).redraw = () => (bar as any).redraw();
const active = this.#active;
try {
active.unshift(spinner);
bar.props = { active };
// console.log(this.#name + ": " + itemText);
const result = await this.#fn(args, spinner);
this.#done++;
return result;
@ -107,6 +116,7 @@ export class Queue<T, R> {
(err as any).job = itemText;
}
this.#errors.push(err);
console.error(util.inspect(err, false, Infinity, true));
throw err;
} finally {
active.splice(active.indexOf(spinner), 1);
@ -142,7 +152,7 @@ export class Queue<T, R> {
}
}
async done(o: { method: "success" | "stop" }) {
async done(o?: { method: "success" | "stop" }) {
if (this.#active.length === 0) {
this.#end(o);
return;
@ -156,8 +166,8 @@ export class Queue<T, R> {
#end(
{ method = this.#passive ? "stop" : "success" }: {
method: "success" | "stop";
},
method?: "success" | "stop";
} = {},
) {
const bar = this.#cachedProgress;
if (this.#errors.length > 0) {
@ -170,6 +180,16 @@ export class Queue<T, R> {
if (bar) bar[method]();
}
get active(): boolean {
return this.#active.length !== 0;
}
[Symbol.dispose]() {
if (this.active) {
this.cancel();
}
}
}
const cwd = process.cwd();
@ -200,7 +220,80 @@ export class OnceMap<T> {
const result = compute();
this.ongoing.set(key, result);
result.finally(() => this.ongoing.delete(key));
return result;
}
}
interface ARCEValue<T> {
value: T;
[Symbol.dispose]: () => void;
}
export function RefCountedExpirable<T>(
init: () => Promise<T>,
deinit: (value: T) => void,
expire: number = five_minutes,
): () => Promise<ARCEValue<T>> {
let refs = 0;
let item: ARCEValue<T> | null = null;
let loading: Promise<ARCEValue<T>> | null = null;
let timer: ReturnType<typeof setTimeout> | null = null;
function deref() {
ASSERT(item !== null);
if (--refs !== 0) return;
ASSERT(timer === null);
timer = setTimeout(() => {
ASSERT(refs === 0);
ASSERT(loading === null);
ASSERT(item !== null);
deinit(item.value);
item = null;
timer = null;
}, expire);
}
return async function () {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
if (item !== null) {
refs++;
return item;
}
if (loading !== null) {
refs++;
return loading;
}
const p = Promise.withResolvers<ARCEValue<T>>();
loading = p.promise;
try {
const value = await init();
item = { value, [Symbol.dispose]: deref };
refs++;
p.resolve(item);
return item;
} catch (e) {
p.reject(e);
throw e;
} finally {
loading = null;
}
};
}
export function once<T>(fn: () => Promise<T>): () => Promise<T> {
let result: T | Promise<T> | null = null;
return async () => {
if (result) return result;
result = await fn();
return result;
};
}
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as path from "node:path";
import process from "node:process";
import * as util from "node:util";

111
framework/lib/fs.ts Normal file
View file

@ -0,0 +1,111 @@
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
// Node.js sync+promise fs methods. For convenince.
export {
createReadStream,
createWriteStream,
existsSync,
open,
readdir,
readdirSync,
readFile,
readFileSync,
rm,
rmSync,
stat,
statSync,
writeFile,
writeFileSync,
type FileHandle,
};
export function mkdir(dir: string) {
return nodeMkdir(dir, { recursive: true });
}
export function mkdirSync(dir: string) {
return nodeMkdirSync(dir, { recursive: true });
}
export type WriteFileAsyncOptions = Parameters<typeof writeFile>[2];
export async function writeMkdir(
file: string,
contents: Buffer | string,
options?: WriteFileAsyncOptions,
) {
await mkdir(path.dirname(file));
return writeFile(file, contents, options);
}
export function writeMkdirSync(file: string, contents: Buffer | string) {
mkdirSync(path.dirname(file));
return writeFileSync(file, contents);
}
export function readDirRecOptionalSync(dir: string) {
try {
return readdirSync(dir, { recursive: true, encoding: "utf8" });
} catch (err: any) {
if (err.code === "ENOENT") return [];
throw err;
}
}
export async function readJson<T>(file: string) {
return JSON.parse(await readFile(file, "utf-8")) as T;
}
export function readJsonSync<T>(file: string) {
return JSON.parse(readFileSync(file, "utf-8")) as T;
}
export async function removeEmptyDirectories(dir: string, removeRoot = false) {
try {
const entries = await readdir(dir, { withFileTypes: true });
let len = entries.length;
for (const entry of entries) {
if (entry.isDirectory()) {
const subDirPath = path.join(dir, entry.name);
if (await removeEmptyDirectories(subDirPath, true)) len -= 1;
}
}
if (len === 0) {
if (removeRoot) {
await rmdir(dir);
}
return true;
}
} catch (error: any) {
if (error.code === "ENOENT") {
// Directory doesn't exist, ignore
return;
}
throw error;
}
return false;
}
import * as path from "node:path";
import {
createReadStream,
createWriteStream,
existsSync,
mkdirSync as nodeMkdirSync,
readdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from "node:fs";
import {
mkdir as nodeMkdir,
open,
readdir,
readFile,
rm,
rmdir,
stat,
writeFile,
type FileHandle,
} from "node:fs/promises";
export { Stats } from "node:fs";

192
framework/lib/markdown.tsx Normal file
View file

@ -0,0 +1,192 @@
/* Impementation of CommonMark specification for markdown with support
* for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here.
*/
function parse(src: string, options: Partial<ParseOpts> = {}) {
}
/* Render markdown content. Same function as 'parse', but JSX components
* only take one argument and must start with a capital letter. */
export function Markdown(
{ src, ...options }: { src: string } & Partial<ParseOpts>,
) {
return parse(src, options);
}
function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links };
const parts: engine.Node[] = [];
const ruleList = Object.values(rules);
parse: while (true) {
for (const rule of ruleList) {
if (!rule.match) continue;
const match = src.match(rule.match);
if (!match) continue;
const index = UNWRAP(match.index);
const after = src.slice(index + match[0].length);
const parse = rule.parse({ after, match: match[0], opts });
if (!parse) continue;
// parse before
parts.push(src.slice(0, index), parse.result);
src = parse.rest ?? after;
continue parse;
}
break;
}
parts.push(src);
return parts;
}
// -- interfaces --
interface ParseOpts {
gfm: boolean;
blockRules: Record<string, BlockRule>;
inlineRules: Record<string, InlineRule>;
}
interface InlineOpts {
rules: Record<string, InlineRule>;
links: Map<string, LinkRef>;
}
interface InlineRule {
match: RegExp;
parse(opts: {
after: string;
match: string;
opts: InlineOpts;
}): InlineParse | null;
}
interface InlineParse {
result: engine.Node;
rest?: string;
}
interface LinkRef {
href: string;
title: string | null;
}
interface BlockRule {
match: RegExp;
parse(opts: {}): unknown;
}
export const inlineRules: Record<string, InlineRule> = {
code: {
match: /`+/,
// 6.1 - code spans
parse({ after, match }) {
const end = after.indexOf(match);
if (end === -1) return null;
let inner = after.slice(0, end);
const rest = after.slice(end + match.length);
// If the resulting string both begins and ends with a space
// character, but does not consist entirely of space characters,
// a single space character is removed from the front and back.
if (inner.match(/^ [^ ]+ $/)) inner = inner.slice(1, -1);
return { result: <code>{inner}</code>, rest };
},
},
link: {
match: /(?<!!)\[/,
// 6.3 - links
parse({ after, opts }) {
// Match '[' to let the inner-most link win.
const splitText = splitFirst(after, /[[\]]/);
if (!splitText) return null;
if (splitText.delim !== "]") return null;
const { first: textSrc, rest: afterText } = splitText;
let href: string, title: string | null = null, rest: string;
if (afterText[0] === "(") {
// Inline link
const splitTarget = splitFirst(afterText.slice(1), /\)/);
if (!splitTarget) return null;
({ rest } = splitTarget);
const target = parseLinkTarget(splitTarget.first);
if (!target) return null;
({ href, title } = target);
} else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null;
const name = splitTarget.first.trim().length === 0
// Collapsed reference link
? textSrc.trim()
// Full Reference Link
: splitTarget.first.trim();
const target = opts.links.get(name);
if (!target) return null;
({ href, title } = target);
({ rest } = splitTarget);
} else {
// Shortcut reference link
const target = opts.links.get(textSrc);
if (!target) return null;
({ href, title } = target);
rest = afterText;
}
return {
result: <a {...{ href, title }}>{parseInline(textSrc, opts)}</a>,
rest,
};
},
},
image: {
match: /!\[/,
// 6.4 - images
parse({ after, opts }) {
// Match '[' to let the inner-most link win.
const splitText = splitFirst(after, /[[\]]/);
if (!splitText) return null;
if (splitText.delim !== "]") return null;
const { first: textSrc, rest: afterText } = splitText;
},
},
emphasis: {
// detect left-flanking delimiter runs, but this expression does not
// consider preceding escapes. instead, those are programatically
// checked inside the parse function.
match: /(?:\*+|(?<!\p{P})_+)(?!\s|\p{P}|$)/u,
// 6.2 - emphasis and strong emphasis
parse({ before, match, after, opts }) {
// find out how long the delim sequence is
// look for 'ends'
},
},
autolink: {},
html: {},
br: {
match: / +\n|\\\n/,
parse() {
return { result: <br /> };
},
},
};
function parseLinkTarget(src: string) {
let href: string, title: string | null = null;
href = src;
return { href, title };
}
/* Find a delimiter while considering backslash escapes. */
function splitFirst(text: string, match: RegExp) {
let first = "", delim: string, escaped: boolean;
do {
const find = text.match(match);
if (!find) return null;
delim = find[0];
const index = UNWRAP(find.index);
let i = index - 1;
escaped = false;
while (i >= 0 && text[i] === "\\") escaped = !escaped, i -= 1;
first += text.slice(0, index - +escaped);
text = text.slice(index + find[0].length);
} while (escaped);
return { first, delim, rest: text };
}
console.log(engine.ssrSync(parseInline("meow `bwaa` `` ` `` `` `z``")));
import * as engine from "#ssr";
import type { ParseOptions } from "node:querystring";

24
framework/lib/meta.ts Normal file
View file

@ -0,0 +1,24 @@
export interface Meta {
title: string;
description?: string | undefined;
openGraph?: OpenGraph;
alternates?: Alternates;
}
export interface OpenGraph {
title?: string;
description?: string | undefined;
type: string;
url: string;
}
export interface Alternates {
canonical: string;
types: { [mime: string]: AlternateType };
}
export interface AlternateType {
url: string;
title: string;
}
export function renderMeta({ title }: Meta): string {
return `<title>${esc(title)}</title>`;
}
import { escapeHtml as esc } from "../engine/ssr.ts";

32
framework/lib/mime.ts Normal file
View file

@ -0,0 +1,32 @@
declare const MIME_INLINE_DATA: never;
const entries = typeof MIME_INLINE_DATA !== "undefined"
? MIME_INLINE_DATA
: fs.readFileSync(path.join(import.meta.dirname, "mime.txt"), "utf8")
.split("\n")
.map((line) => line.trim())
.filter((line) => line && !line.startsWith("#"))
.map((line) => line.split(/\s+/, 2) as [string, string]);
export const rawEntriesText = entries;
const extensions = new Map(entries.filter((x) => x[0].startsWith(".")));
const fullNames = new Map(entries.filter((x) => !x[0].startsWith(".")));
/**
* Accepts:
* - Full file path or basename
* - Extension (with or without dot)
*/
export function contentTypeFor(file: string) {
const slash = file.indexOf("/");
if (slash !== -1) file = file.slice(slash + 1);
const dot = file.indexOf(".");
if (dot === -1) file = "." + file;
else if (dot > 0) {
let entry = fullNames.get(file);
if (entry) return entry;
file = file.slice(dot);
}
return extensions.get(file) ?? "application/octet-stream";
}
import * as fs from "#sitegen/fs";
import * as path from "node:path";

99
framework/lib/mime.txt Normal file
View file

@ -0,0 +1,99 @@
# media types
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/MIME_types
.aac audio/x-aac
.aif audio/x-aiff
.aifc audio/x-aiff
.aiff audio/x-aiff
.asm text/x-asm
.avi video/x-msvideo
.bat application/x-msdownload
.c text/x-c
.chat text/x-clover-chatlog
.class application/java-vm
.cmd application/x-msdownload
.com application/x-msdownload
.conf text/plain
.cpp text/x-c
.css text/css
.csv text/csv
.cxx text/x-c
.def text/plain
.diff text/plain
.dll application/x-msdownload
.dmg application/octet-stream
.doc application/msword
.docx application/vnd.openxmlformats-officedocument.wordprocessingml.document
.epub application/epub+zip
.exe application/x-msdownload
.flv video/x-flv
.fbx application/fbx
.gz application/x-gzip
.h text/x-c
.h264 video/h264
.hh text/x-c
.htm text/html;charset=utf-8
.html text/html;charset=utf-8
.ico image/x-icon
.ics text/calendar
.in text/plain
.jar application/java-archive
.java text/x-java-source
.jpeg image/jpeg
.jpg image/jpeg
.jpgv video/jpeg
.jxl image/jxl
.js application/javascript
.json application/json
.latex application/x-latex
.list text/plain
.log text/plain
.m4a audio/mp4
.man text/troff
.mid audio/midi
.midi audio/midi
.mov video/quicktime
.mp3 audio/mpeg
.mp4 video/mp4
.msh model/mesh
.msi application/x-msdownload
.obj application/octet-stream
.ogg audio/ogg
.otf application/x-font-otf
.pdf application/pdf
.png image/png
.ppt application/vnd.ms-powerpoint
.pptx application/vnd.openxmlformats-officedocument.presentationml.presentation
.psd image/vnd.adobe.photoshop
.py text/x-python
.rar application/x-rar-compressed
.rss application/rss+xml
.rtf application/rtf
.rtx text/richtext
.s text/x-asm
.pem application/x-pem-file"
.ser application/java-serialized-object
.sh application/x-sh
.sig application/pgp-signature
.silo model/mesh
.svg image/svg+xml
.t text/troff
.tar application/x-tar
.text text/plain
.tgz application/x-gzip
.tif image/tiff
.tiff image/tiff
.torrent application/x-bittorrent
.ttc application/x-font-ttf
.ttf application/x-font-ttf
.txt text/plain
.urls text/uri-list
.v text/x-v
.wav audio/x-wav
.wmv video/x-ms-wmv
.xls application/vnd.ms-excel
.xlsx application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
.xml application/xml
.xps application/vnd.ms-xpsdocument
# special cased based on file name
rss.xml application/rss+xml

44
framework/lib/sitegen.ts Normal file
View file

@ -0,0 +1,44 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
*/
export interface FileItem {
id: string;
file: string;
}
export interface SitegenRender {
scripts: Set<string>;
}
export function initRender(): SitegenRender {
return {
scripts: new Set(),
};
}
export function getRender() {
return ssr.getUserData<SitegenRender>("sitegen", () => {
throw new Error(
"This function can only be used in a page (static or view)",
);
});
}
export function inRender() {
return "sitegen" in ssr.getCurrentRender();
}
/** Add a client-side script to the page. */
export function addScript(id: ScriptId | { value: ScriptId }) {
getRender().scripts.add(typeof id === "string" ? id : id.value);
}
export interface Section {
root: string;
}
import * as ssr from "../engine/ssr.ts";

View file

@ -10,7 +10,7 @@ export function getDb(file: string) {
if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase(
new DatabaseSync(path.join(".clover/", fileWithExt)),
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)),
);
map.set(file, db);
return db;
@ -41,61 +41,95 @@ export class WrappedDatabase {
(key, version) values (?, ?);
`),
));
const { changes, lastInsertRowid } = s.run(name, 1);
console.log(changes, lastInsertRowid);
if (changes === 1) {
this.node.exec(schema);
}
const { changes } = s.run(name, 1);
if (changes === 1) this.node.exec(schema);
}
prepare<Args extends unknown[] = [], Result = unknown>(
query: string,
): Stmt<Args, Result> {
return new Stmt(this.node.prepare(query));
query = query.trim();
const lines = query.split("\n");
const trim = Math.min(
...lines.map((line) =>
line.trim().length === 0 ? Infinity : line.match(/^\s*/)![0].length
),
);
query = lines.map((x) => x.slice(trim)).join("\n");
let prepared;
try {
prepared = this.node.prepare(query);
} catch (err) {
if (err) (err as { query: string }).query = query;
throw err;
}
return new Stmt(prepared);
}
}
export class Stmt<Args extends unknown[] = unknown[], Row = unknown> {
#node: StatementSync;
#class: any | null = null;
query: string;
constructor(node: StatementSync) {
this.#node = node;
this.query = node.sourceSQL;
}
/** Get one row */
get(...args: Args): Row | null {
const item = this.#node.get(...args as any) as Row;
if (!item) return null;
const C = this.#class;
if (C) Object.setPrototypeOf(item, C.prototype);
return item;
return this.#wrap(args, () => {
const item = this.#node.get(...args as any) as Row;
if (!item) return null;
const C = this.#class;
if (C) Object.setPrototypeOf(item, C.prototype);
return item;
});
}
getNonNull(...args: Args) {
const item = this.get(...args);
if (!item) throw new Error("Query returned no result");
if (!item) {
throw this.#wrap(args, () => new Error("Query returned no result"));
}
return item;
}
iter(...args: Args): Iterator<Row> {
return this.array(...args)[Symbol.iterator]();
iter(...args: Args): IterableIterator<Row> {
return this.#wrap(args, () => this.array(...args)[Symbol.iterator]());
}
/** Get all rows */
array(...args: Args): Row[] {
const array = this.#node.all(...args as any) as Row[];
const C = this.#class;
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
return array;
return this.#wrap(args, () => {
const array = this.#node.all(...args as any) as Row[];
const C = this.#class;
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
return array;
});
}
/** Return the number of changes / row ID */
run(...args: Args) {
return this.#node.run(...args as any);
return this.#wrap(args, () => this.#node.run(...args as any));
}
as<R>(Class: { new (): R }): Stmt<Args, R> {
this.#class = Class;
return this as any;
}
#wrap<T>(args: unknown[], fn: () => T) {
try {
return fn();
} catch (err: any) {
if (err && typeof err === "object") {
err.query = this.query;
args = args.flat(Infinity);
err.queryArgs = args.length === 1 ? args[0] : args;
}
throw err;
}
}
}
import { DatabaseSync, StatementSync } from "node:sqlite";
import * as fs from "./fs.ts";
import * as path from "node:path";

3
framework/lib/string.ts Normal file
View file

@ -0,0 +1,3 @@
export function escapeRegExp(source: string) {
return source.replace(/[\$\\]/g, "\\$&");
}

100
framework/lib/view.ts Normal file
View file

@ -0,0 +1,100 @@
// This import is generated by code 'bundle.ts'
export interface View {
component: engine.Component;
meta:
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
layout?: engine.Component;
inlineCss: string;
scripts: Record<string, string>;
}
let views: Record<string, View> = null!;
let scripts: Record<string, string> = null!;
// An older version of the Clover Engine supported streaming suspense
// boundaries, but those were never used. Pages will wait until they
// are fully rendered before sending.
export async function renderView(
context: hono.Context,
id: string,
props: Record<string, unknown>,
) {
return context.html(await renderViewToString(id, { context, ...props }));
}
export async function renderViewToString(
id: string,
props: Record<string, unknown>,
) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const {
component,
inlineCss,
layout,
meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata(props) : metadata,
).then((m) => meta.renderMeta(m));
// -- html --
let page: engine.Element = [engine.kElement, component, props];
if (layout) page = [engine.kElement, layout, { children: page }];
const { text: body, addon: { sitegen } } = await engine.ssrAsync(page, {
sitegen: sg.initRender(),
});
// -- join document and send --
return wrapDocument({
body,
head: await renderedMetaPromise,
inlineCss,
scripts: joinScripts(
Array.from(
sitegen.scripts,
(id) => UNWRAP(scripts[id], `Missing script ${id}`),
),
),
});
}
export function provideViewData(v: typeof views, s: typeof scripts) {
views = v;
scripts = s;
}
export function joinScripts(scriptSources: string[]) {
const { length } = scriptSources;
if (length === 0) return "";
if (length === 1) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";");
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts";
import type * as hono from "#hono";
import * as engine from "../engine/ssr.ts";
import * as sg from "./sitegen.ts";

View file

@ -1,30 +0,0 @@
const db = new Map(
fs.readFileSync(path.join(import.meta.dirname, "mime.txt"), "utf8")
.split("\n").filter(Boolean).map((line) =>
line.split(/\s+/) as [string, string]
),
);
/**
* Accepts:
* - Full file path
* - Extension (with or without dot)
*/
export function contentTypeFor(file: string) {
if (file.includes("/") || file.includes("\\")) {
// Some file names are special cased.
switch (path.basename(file)) {
case "rss.xml":
return "application/rss+xml";
}
file = path.extname(file);
}
const dot = file.indexOf(".");
if (dot === -1) file = "." + file;
else if (dot > 0) file = file.slice(dot);
return db.get(file) ?? "application/octet-stream";
}
import * as fs from "./fs.ts";
import * as path from "node:path";

View file

@ -1,49 +0,0 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
export interface SitegenRender {
scripts: Set<ScriptId>;
}
export function initRender(): SitegenRender {
return {
scripts: new Set(),
};
}
export function getRender() {
return ssr.getUserData<SitegenRender>("sitegen", () => {
throw new Error(
"This function can only be used in a page (static or view)",
);
});
}
/** Add a client-side script to the page. */
export function addScript(id: ScriptId) {
const srcFile: string = util.getCallSites()
.find((site) => !site.scriptName.startsWith(import.meta.dirname))!
.scriptName;
const filePath = hot.resolveFrom(srcFile, id);
if (
!filePath.endsWith(".client.ts") &&
!filePath.endsWith(".client.tsx")
) {
throw new Error("addScript must be a .client.ts or .client.tsx");
}
getRender().scripts.add(filePath);
}
export function Script({ src }: { src: ScriptId }) {
if (!src) throw new Error("Missing 'src' attribute");
addScript(src);
return null;
}
export interface Section {
root: string;
}
import * as ssr from "./engine/ssr.ts";
import * as util from "node:util";
import * as hot from "./hot.ts";

View file

@ -1,310 +0,0 @@
// Sitegen! Clover's static site generator, built with love.
export function main() {
return withSpinner({
text: "Recovering State",
successText: ({ elapsed }) =>
"sitegen! update in " + elapsed.toFixed(1) + "s",
failureText: () => "sitegen FAIL",
}, sitegen);
}
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
*/
interface FileItem {
id: string;
file: string;
}
async function sitegen(status: Spinner) {
const startTime = performance.now();
let root = path.resolve(import.meta.dirname, "../src");
const join = (...sub: string[]) => path.join(root, ...sub);
const incr = new Incremental();
// Sitegen reviews every defined section for resources to process
const sections: Section[] =
require(path.join(root, "sections.ts")).siteSections;
// Static files are compressed and served as-is.
// - "{section}/static/*.png"
let staticFiles: FileItem[] = [];
// Pages are rendered then served as static files.
// - "{section}/pages/*.marko"
let pages: FileItem[] = [];
// Views are dynamically rendered pages called via backend code.
// - "{section}/views/*.tsx"
let views: FileItem[] = [];
// Public scripts are bundled for the client as static assets under "/js/[...]"
// This is used for the file viewer's canvases.
// Note that '.client.ts' can be placed anywhere in the file structure.
// - "{section}/scripts/*.client.ts"
let scripts: FileItem[] = [];
// 'backend.ts'
const backendFiles = [];
// -- Scan for files --
status.text = "Scanning Project";
for (const section of sections) {
const { root: sectionRoot } = section;
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix = root === sectionRoot
? ""
: path.relative(root, sectionRoot) + "/";
const kinds = [
{
dir: sectionPath("pages"),
list: pages,
prefix: "/",
exclude: [".css", ".client.ts", ".client.tsx"],
},
{ dir: sectionPath("static"), list: staticFiles, prefix: "/", ext: true },
{ dir: sectionPath("scripts"), list: scripts, prefix: rootPrefix },
{
dir: sectionPath("views"),
list: views,
prefix: rootPrefix,
exclude: [".css", ".client.ts", ".client.tsx"],
},
];
for (const { dir, list, prefix, exclude = [], ext = false } of kinds) {
const items = fs.readDirRecOptionalSync(dir);
item: for (const item of items) {
if (item.isDirectory()) continue;
for (const e of exclude) {
if (item.name.endsWith(e)) continue item;
}
const file = path.relative(dir, item.parentPath + "/" + item.name);
const trim = ext
? file
: file.slice(0, -path.extname(file).length).replaceAll(".", "/");
let id = prefix + trim.replaceAll("\\", "/");
if (prefix === "/" && id.endsWith("/index")) {
id = id.slice(0, -"/index".length) || "/";
}
list.push({ id, file: path.join(item.parentPath, item.name) });
}
}
let backendFile = [
sectionPath("backend.ts"),
sectionPath("backend.tsx"),
].find((file) => fs.existsSync(file));
if (backendFile) backendFiles.push(backendFile);
}
scripts = scripts.filter(({ file }) => !file.match(/\.client\.[tj]sx?/));
const globalCssPath = join("global.css");
// TODO: invalidate incremental resources
// -- server side render --
status.text = "Building";
const cssOnce = new OnceMap();
const cssQueue = new Queue<[string, string[], css.Theme], string>({
name: "Bundle",
fn: ([, files, theme]) => css.bundleCssFiles(files, theme),
passive: true,
getItemText: ([id]) => id,
maxJobs: 2,
});
interface RenderResult {
body: string;
head: string;
inlineCss: string;
scriptFiles: string[];
item: FileItem;
}
const renderResults: RenderResult[] = [];
async function loadPageModule({ file }: FileItem) {
require(file);
}
async function renderPage(item: FileItem) {
// -- load and validate module --
let { default: Page, meta: metadata, theme: pageTheme, layout } = require(
item.file,
);
if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) {
throw new Error("Page is missing 'meta' export with a title.");
}
if (layout?.theme) pageTheme = layout.theme;
const theme = {
bg: "#fff",
fg: "#050505",
primary: "#2e7dab",
...pageTheme,
};
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
).then((m) => meta.resolveAndRenderMetadata(m));
// -- css --
const cssImports = [globalCssPath, ...hot.getCssImports(item.file)];
const cssPromise = cssOnce.get(
cssImports.join(":") + JSON.stringify(theme),
() => cssQueue.add([item.id, cssImports, theme]),
);
// -- html --
const sitegenApi = sg.initRender();
const bodyPromise = await ssr.ssrAsync(<Page />, {
sitegen: sitegenApi,
});
const [body, inlineCss, renderedMeta] = await Promise.all([
bodyPromise,
cssPromise,
renderedMetaPromise,
]);
if (!renderedMeta.includes("<title>")) {
throw new Error(
"Page is missing 'meta.title'. " +
"All pages need a title tag.",
);
}
// The script content is not ready, allow another page to Render. The page
// contents will be rebuilt at the end. This is more convenient anyways
// because it means client scripts don't re-render the page.
renderResults.push({
body,
head: renderedMeta,
inlineCss,
scriptFiles: Array.from(sitegenApi.scripts),
item: item,
});
}
// This is done in two passes so that a page that throws during evaluation
// will report "Load Render Module" instead of "Render Static Page".
const spinnerFormat = status.format;
status.format = () => "";
const moduleLoadQueue = new Queue({
name: "Load Render Module",
fn: loadPageModule,
getItemText,
maxJobs: 1,
});
moduleLoadQueue.addMany(pages);
await moduleLoadQueue.done({ method: "stop" });
const pageQueue = new Queue({
name: "Render Static Page",
fn: renderPage,
getItemText,
maxJobs: 2,
});
pageQueue.addMany(pages);
await pageQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- bundle scripts --
const referencedScripts = Array.from(
new Set(renderResults.flatMap((r) => r.scriptFiles)),
);
const extraPublicScripts = scripts.map((entry) => entry.file);
const uniqueCount = new Set([
...referencedScripts,
...extraPublicScripts,
]).size;
status.text = `Bundle ${uniqueCount} Scripts`;
await bundle.bundleClientJavaScript(
referencedScripts,
extraPublicScripts,
incr,
);
// -- copy/compress static files --
async function doStaticFile(item: FileItem) {
const body = await fs.readFile(item.file);
incr.putAsset({
srcId: "static:" + item.file,
key: item.id,
body,
});
}
const staticQueue = new Queue({
name: "Load Static",
fn: doStaticFile,
getItemText,
maxJobs: 16,
});
status.format = () => "";
staticQueue.addMany(staticFiles);
await staticQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- concatenate static rendered pages --
status.text = `Concat ${renderResults.length} Pages`;
await Promise.all(
renderResults.map(
async ({ item: page, body, head, inlineCss, scriptFiles }) => {
const doc = wrapDocument({
body,
head,
inlineCss,
scripts: scriptFiles.map(
(id) =>
UNWRAP(
incr.out.script.get(
path.basename(id).replace(/\.client\.[jt]sx?$/, ""),
),
),
).map((x) => `{${x}}`).join("\n"),
});
incr.putAsset({
srcId: "page:" + page.file,
key: page.id,
body: doc,
headers: {
"Content-Type": "text/html",
},
});
},
),
);
status.format = () => "";
status.text = ``;
// This will wait for all compression jobs to finish, which up
// to this point have been left as dangling promises.
await incr.wait();
// Flush the site to disk.
status.format = spinnerFormat;
status.text = `Incremental Flush`;
incr.flush();
incr.serializeToDisk(); // Allows picking up this state again
return { elapsed: (performance.now() - startTime) / 1000 };
}
function getItemText({ file }: FileItem) {
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
}
function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${scripts ? `<script>${scripts}</script>` : ""}</body>`;
}
import type { Section } from "./sitegen-lib.ts";
import { OnceMap, Queue } from "./queue.ts";
import { Incremental } from "./incremental.ts";
import * as bundle from "./bundle.ts";
import * as css from "./css.ts";
import * as fs from "./fs.ts";
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
import * as meta from "./meta/index.ts";
import * as ssr from "./engine/ssr.ts";
import * as sg from "./sitegen-lib.ts";
import * as hot from "./hot.ts";
import * as path from "node:path";

View file

@ -0,0 +1,4 @@
<div meow=null />
<div>
wait(${null})
</div>

View file

@ -0,0 +1,6 @@
import Component from './Component.marko';
<h1>web page</h1>
<if=!false>
<Component=null/>
</>

198
framework/watch.ts Normal file
View file

@ -0,0 +1,198 @@
// File watcher and live reloading site generator
const debounceMilliseconds = 25;
export async function main() {
let subprocess: child_process.ChildProcess | null = null;
// Catch up state by running a main build.
const { incr } = await generate.main();
// ...and watch the files that cause invals.
const watch = new Watch(rebuild);
watch.add(...incr.invals.keys());
statusLine();
// ... and then serve it!
serve();
function serve() {
if (subprocess) {
subprocess.removeListener("close", onSubprocessClose);
subprocess.kill();
}
subprocess = child_process.fork(".clover/out/server.js", [
"--development",
], {
stdio: "inherit",
});
subprocess.on("close", onSubprocessClose);
}
function onSubprocessClose(code: number | null, signal: string | null) {
subprocess = null;
const status = code != null ? `code ${code}` : `signal ${signal}`;
console.error(`Backend process exited with ${status}`);
}
process.on("beforeExit", () => {
subprocess?.removeListener("close", onSubprocessClose);
});
function rebuild(files: string[]) {
files = files.map((file) => path.relative(hot.projectRoot, file));
const changed: string[] = [];
for (const file of files) {
let mtimeMs: number | null = null;
try {
mtimeMs = fs.statSync(file).mtimeMs;
} catch (err: any) {
if (err?.code !== "ENOENT") throw err;
}
if (incr.updateStat(file, mtimeMs)) changed.push(file);
}
if (changed.length === 0) {
console.warn("Files were modified but the 'modify' time did not change.");
return;
}
withSpinner<any, Awaited<ReturnType<typeof generate.sitegen>>>({
text: "Rebuilding",
successText: generate.successText,
failureText: () => "sitegen FAIL",
}, async (spinner) => {
console.info("---");
console.info(
"Updated" +
(changed.length === 1
? " " + changed[0]
: changed.map((file) => "\n- " + file)),
);
const result = await generate.sitegen(spinner, incr);
incr.toDisk(); // Allows picking up this state again
for (const file of watch.files) {
const relative = path.relative(hot.projectRoot, file);
if (!incr.invals.has(relative)) watch.remove(file);
}
return result;
}).then((result) => {
// Restart the server if it was changed or not running.
if (
!subprocess ||
result.inserted.some(({ kind }) => kind === "backendReplace")
) {
serve();
} else if (
subprocess &&
result.inserted.some(({ kind }) => kind === "asset")
) {
subprocess.send({ type: "clover.assets.reload" });
}
return result;
}).catch((err) => {
console.error(util.inspect(err));
}).finally(statusLine);
}
function statusLine() {
console.info(
`Watching ${incr.invals.size} files \x1b[36m[last change: ${
new Date().toLocaleTimeString()
}]\x1b[39m`,
);
}
}
class Watch {
files = new Set<string>();
stale = new Set<string>();
onChange: (files: string[]) => void;
watchers: fs.FSWatcher[] = [];
/** Has a trailing slash */
roots: string[] = [];
debounce: ReturnType<typeof setTimeout> | null = null;
constructor(onChange: Watch["onChange"]) {
this.onChange = onChange;
}
add(...files: string[]) {
const { roots, watchers } = this;
let newRoots: string[] = [];
for (let file of files) {
file = path.resolve(file);
if (this.files.has(file)) continue;
this.files.add(file);
// Find an existing watcher
if (roots.some((root) => file.startsWith(root))) continue;
if (newRoots.some((root) => file.startsWith(root))) continue;
newRoots.push(path.dirname(file) + path.sep);
}
if (newRoots.length === 0) return;
// Filter out directories that are already specified
newRoots = newRoots
.sort((a, b) => a.length - b.length)
.filter((dir, i, a) => {
for (let j = 0; j < i; j++) if (dir.startsWith(a[j])) return false;
return true;
});
// Append Watches
let i = roots.length;
for (const root of newRoots) {
this.watchers.push(fs.watch(
root,
{ recursive: true, encoding: "utf-8" },
this.#handleEvent.bind(this, root),
));
this.roots.push(root);
}
// If any new roots shadow over and old one, delete it!
while (i > 0) {
i -= 1;
const root = roots[i];
if (newRoots.some((newRoot) => root.startsWith(newRoot))) {
watchers.splice(i, 1)[0].close();
roots.splice(i, 1);
}
}
}
remove(...files: string[]) {
for (const file of files) this.files.delete(path.resolve(file));
// Find watches that are covering no files
const { roots, watchers } = this;
const existingFiles = Array.from(this.files);
let i = roots.length;
while (i > 0) {
i -= 1;
const root = roots[i];
if (!existingFiles.some((file) => file.startsWith(root))) {
watchers.splice(i, 1)[0].close();
roots.splice(i, 1);
}
}
}
stop() {
for (const w of this.watchers) w.close();
}
#handleEvent(root: string, event: fs.WatchEventType, subPath: string | null) {
if (!subPath) return;
const file = path.join(root, subPath);
if (!this.files.has(file)) return;
this.stale.add(file);
const { debounce } = this;
if (debounce !== null) clearTimeout(debounce);
this.debounce = setTimeout(() => {
this.debounce = null;
this.onChange(Array.from(this.stale));
this.stale.clear();
}, debounceMilliseconds);
}
}
import * as fs from "node:fs";
import { withSpinner } from "@paperclover/console/Spinner";
import * as generate from "./generate.ts";
import * as path from "node:path";
import * as util from "node:util";
import * as hot from "./hot.ts";
import * as child_process from "node:child_process";

2177
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,36 +1,45 @@
{
"private": true,
"type": "module",
"dependencies": {
"@hono/node-server": "^1.14.3",
"@mdx-js/mdx": "^3.1.0",
"@paperclover/console": "git+https://git.paperclover.net/clo/console.git",
"blurhash": "^2.0.5",
"codemirror": "^6.0.1",
"devalue": "^5.1.1",
"esbuild": "^0.25.5",
"hls.js": "^1.6.5",
"hono": "^4.7.11",
"marko": "^6.0.20",
"unique-names-generator": "^4.7.1"
"puppeteer": "^24.10.1",
"sharp": "^0.34.2",
"unique-names-generator": "^4.7.1",
"vscode-oniguruma": "^2.0.1",
"vscode-textmate": "^9.2.0"
},
"devDependencies": {
"@types/node": "^22.15.29",
"@types/node": "^24.0.10",
"typescript": "^5.8.3"
},
"imports": {
"#sitegen": "./framework/sitegen-lib.ts",
"#sqlite": "./framework/sqlite.ts",
"#backend": "./src/backend.ts",
"#debug": "./framework/debug.safe.ts",
"#sitegen": "./framework/lib/sitegen.ts",
"#sitegen/*": "./framework/lib/*.ts",
"#ssr": "./framework/engine/ssr.ts",
"#ssr/jsx-dev-runtime": "./framework/engine/jsx-runtime.ts",
"#ssr/jsx-runtime": "./framework/engine/jsx-runtime.ts",
"#ssr/marko": "./framework/engine/marko-runtime.ts",
"#marko/html": {
"development": "marko/debug/html",
"production": "marko/html",
"types": "marko/html"
"types": "marko/html",
"production": "marko/production",
"node": "marko/debug/html"
},
"#hono/platform": {
"#hono": "hono",
"#hono/conninfo": {
"bun": "hono/bun",
"deno": "hono/deno",
"node": "@hono/node-server",
"node": "@hono/node-server/conninfo",
"worker": "hono/cloudflare-workers"
}
}

114
readme.md Normal file
View file

@ -0,0 +1,114 @@
# clover sitegen framework
this repository contains clover's "sitegen" framework, which is a set of tools
that assist building websites. these tools power <https://paperclover.net>.
- **HTML "Server Side Rendering") engine written from scratch.** (~500 lines)
- A more practical JSX runtime (`class` instead of `className`, built-in
`clsx`, `html()` helper over `dangerouslySetInnerHTML` prop, etc).
- Integration with [Marko] for concisely written components.
- TODO: MDX-like compiler for content-heavy pages like blogs.
- Different languages can be used at the same time. Supports `async function`
components, `<Suspense />`, and custom extensions.
- **Incremental static site generator and build system.**
- Build entire production site at start, incremental updates when pages
change; Build system state survives coding sessions.
- The only difference in development and production mode is hidden source-maps
and stripped `console.debug` calls. The site you see locally is the same
site you see deployed.
- (TODO) Tests, Lints, and Type-checking is run alongside, and only re-runs
checks when the files change. For example, changing a component re-tests
only pages that use that component and re-lints only the changed file.
- **Integrated libraries for building complex, content heavy web sites.**
- Static asset serving with ETag and build-time compression.
- Dynamicly rendered pages with static client. (`#import "#sitegen/view"`)
- Databases with a typed SQLite wrapper. (`import "#sitegen/sqlite"`)
- TODO: Meta and Open Graph generation. (`export const meta`)
- TODO: Font subsetting tools to reduce bytes downloaded by fonts.
- **Built on the battle-tested Node.js runtime.**
None of these tools are complex or revolutionary. Rather, this project is the
sum of many years of experience on managing content heavy websites, and an
example on how other over-complicate other frameworks.
[Marko]: https://next.markojs.com
Included is `src`, which contains `paperclover.net`. Website highlights:
- [Question/Answer board, custom markdown parser and components][q+a].
- [File viewer with fast ui/ux + optimized media streaming][file].
- [Personal, friends-only blog with password protection][friends].
[q+a]: https://paperclover.net/q+a
[file]: https://paperclover.net/file
[friends]: https://paperclover.net/friends
## Development
minimum system requirements:
- a cpu with at least 1 core.
- random access memory.
- windows 7 or later, macos, or other operating system.
my development machine, for example, is Dell Inspiron 7348 with Core i7
npm install
# production generation
node run generate
node .clover/out/server
# "development" watch mode
node run watch
<!-- `repl.js` will open a read-eval-print-loop where plugin state is cached (on my -->
<!-- 2014 dev laptop, startup time is 600-1000ms). every file in `framework` and -->
<!-- `src` besides `hot.ts` can be edited and quickly re-run. for example, to run -->
<!-- `framework/generate.ts`, you can type "generate" into the shell. since -->
<!-- top-level await is not supported (plugins are built on `require` as Node has -->
<!-- poor module support), CLIs can include a `main` function, which is executed -->
<!-- when the REPL runs it. -->
for unix systems, the provided `flake.nix` can be used with `nix develop` to
open a shell with all needed system dependencies.
## Deployment
There are two primary server components to be deployed: the web server and the
sourth of truth server. The latter is a singleton that runs on Clover's NAS,
which holds the full contents of the file storage. The web server pulls data
from the source of truth and renders web pages, and can be duplicated to
multiple cloud hosts without issue.
Deployment of the source of truth can be done with Docker Compose:
services:
backend:
container_name: backend
build:
# this uses loopback to hit the self-hosted git server
context: http://127.0.0.1:3000/clo/sitegen.git
dockerfile: src/source-of-truth.dockerfile
environment:
# configuration
- PORT=43200
- CLOVER_DB=/data
- CLOVER_FILE_RAW=/published
- CLOVER_FILE_DERIVED=/data/derived
- CLOVER_SOT_KEY=... # guards private/unreleased content
ports:
- '43200:43200'
restart: unless-stopped
volumes:
- /mnt/storage1/clover/Documents/Config/paperclover:/data
- /mnt/storage1/clover/Published:/published
Due to caching, one may need to manually purge images via
`docker image rm ix-clover-backend -f` when an update is desired
TODO: deployment instructions for a web node
## Contributions
No contributions to `src` accepted, only `framework`.

12
repl.js
View file

@ -11,24 +11,24 @@ process.stderr.write("Loading...");
const { hot } = await import("./run.js"); // get plugins ready
const { errorAllWidgets } = hot.load("@paperclover/console/Widget");
process.stderr.write("\r" + " ".repeat("Loading...".length) + "\r");
hot.load("node:repl").start({
const repl = hot.load("node:repl").start({
prompt: "% ",
eval(code, _global, _id, done) {
evaluate(code)
.catch((err) => {
// TODO: improve @paperclover/console's ability to print AggregateError
// and errors with extra random properties
console.error(inspect(err));
console.error(util.inspect(err, false, 10, true));
})
.then((result) => done(null, result));
},
ignoreUndefined: true,
//completer,
});
repl.setupHistory(".clover/repl-history.txt", () => {});
setTimeout(() => {
hot.reloadRecursive("./framework/engine/ssr.ts");
hot.reloadRecursive("./framework/bundle.ts");
hot.reloadRecursive("./framework/generate.ts");
}, 100);
async function evaluate(code) {
@ -41,11 +41,11 @@ async function evaluate(code) {
if (code[0] === "=") {
try {
const result = await eval(code[1]);
console.log(inspect(result));
console.info(inspect(result));
} catch (err) {
if (err instanceof SyntaxError) {
const result = await eval("(async() => { return " + code + " })()");
console.log(inspect(result));
console.info(inspect(result));
} else {
throw err;
}

90
run.js
View file

@ -1,55 +1,87 @@
// This file allows using Node.js in combination with
// all available plugins. Usage: "node run <script>"
import * as path from "node:path";
import * as util from "node:util";
import * as zlib from "node:zlib";
import * as url from "node:url";
import process from "node:process";
// Disable experimental warnings (Type Stripping, etc)
{
const { emit: originalEmit } = process;
const warnings = ["ExperimentalWarning"];
process.emit = function (event, error) {
return event === "warning" && warnings.includes(error.name)
? false
: originalEmit.apply(process, arguments);
};
if (!zlib.zstdCompress) {
const brand = process.versions.bun
? `bun ${process.versions.bun}`
: process.versions.deno
? `deno ${process.versions.deno}`
: null;
globalThis.console.error(
`sitegen depends on a node.js v24. your runtime is missing feature\n` +
`this is node.js version ${process.version}${
brand ? ` (${brand})` : ""
}\n\n` +
`get node.js --> https://nodejs.org/en/download/current`,
);
process.exit(1);
}
// Disable experimental warnings (Type Stripping, etc)
const { emit: originalEmit } = process;
const warnings = ["ExperimentalWarning"];
process.emit = function (event, error) {
return event === "warning" && warnings.includes(error.name)
? false
: originalEmit.apply(process, arguments);
};
// Init hooks
const hot = await import("./framework/hot.ts");
await import("#debug");
const console = hot.load("@paperclover/console");
globalThis.console.log = console.info;
globalThis.console["log"] = console.info;
globalThis.console.info = console.info;
globalThis.console.warn = console.warn;
globalThis.console.error = console.error;
globalThis.console.debug = console.scoped("dbg");
globalThis.UNWRAP = (t, ...args) => {
if (t == null) {
throw new Error(
args.length > 0 ? util.format(...args) : "UNWRAP(" + t + ")",
);
}
return t;
};
globalThis.ASSERT = (t, ...args) => {
if (!t) {
throw new Error(
args.length > 0 ? util.format(...args) : "Assertion Failed",
);
}
};
// Load with hooks
if (process.argv[1].startsWith(import.meta.filename.slice(0, -".js".length))) {
if (process.argv.length == 2) {
console.error("usage: node run <script> [...args]");
process.exit(1);
}
const file = path.resolve(process.argv[2]);
let found;
for (const dir of ["./", "./src/", "./framework/"]) {
try {
found = hot.resolveFrom(import.meta.filename, dir + process.argv[2]);
break;
} catch (e) {
continue;
}
}
if (!found) {
console.error("Cannot find script: " + process.argv[2]);
process.exit(1);
}
process.argv = [process.argv[0], ...process.argv.slice(2)];
hot.load(file).main?.();
try {
const mod = await hot.load(found);
if (mod.main) mod.main();
else if (mod.default?.fetch) {
const protocol = "http";
const { serve } = hot.load("@hono/node-server");
serve({
fetch: mod.default.fetch,
}, ({ address, port }) => {
if (address === "::") address = "::1";
console.info(url.format({
protocol,
hostname: address,
port,
}));
});
}
} catch (e) {
console.error(util.inspect(e));
}
}
export { hot };

75
src/admin.ts Normal file
View file

@ -0,0 +1,75 @@
const cookieAge = 60 * 60 * 24 * 365; // 1 year
let lastKnownToken: string | null = null;
function compareToken(token: string) {
if (token === lastKnownToken) return true;
lastKnownToken = fs.readFileSync(".clover/admin-token.txt", "utf8").trim();
return token === lastKnownToken;
}
export async function middleware(c: Context, next: Next) {
if (c.req.path.startsWith("/admin")) {
return adminInner(c, next);
}
return next();
}
export function adminInner(c: Context, next: Next) {
const token = c.req.header("Cookie")?.match(/admin-token=([^;]+)/)?.[1];
if (c.req.path === "/admin/login") {
const key = c.req.query("key");
if (key) {
if (compareToken(key)) {
return c.body(null, 303, {
"Location": "/admin",
"Set-Cookie":
`admin-token=${key}; Path=/; HttpOnly; SameSite=Strict; Max-Age=${cookieAge}`,
});
}
return serveAsset(c, "/admin/login/fail", 403);
}
if (token && compareToken(token)) {
return c.redirect("/admin", 303);
}
if (c.req.method === "POST") {
return serveAsset(c, "/admin/login/fail", 403);
} else {
return serveAsset(c, "/admin/login", 200);
}
}
if (c.req.path === "/admin/logout") {
return c.body(null, 303, {
"Location": "/admin/login",
"Set-Cookie":
`admin-token=; Path=/; HttpOnly; SameSite=Strict; Max-Age=0`,
});
}
if (token && compareToken(token)) {
return next();
}
return c.redirect("/admin/login", 303);
}
export function hasAdminToken(c: Context) {
const token = c.req.header("Cookie")?.match(/admin-token=([^;]+)/)?.[1];
return token && compareToken(token);
}
export async function main() {
const key = crypto.randomUUID();
await fs.writeMkdir(".clover/admin-token.txt", key);
const start = ({
win32: "start",
darwin: "open",
} as Record<string, string>)[process.platform] ?? "xdg-open";
child_process.exec(`${start} http://[::1]:3000/admin/login?key=${key}`);
}
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import { serveAsset } from "#sitegen/assets";
import * as child_process from "node:child_process";

41
src/backend.ts Normal file
View file

@ -0,0 +1,41 @@
// This is the main file for the backend
const app = new Hono();
const logHttp = scoped("http", { color: "magenta" });
// Middleware
app.use(trimTrailingSlash());
app.use(removeDuplicateSlashes);
app.use(logger((msg) => msg.startsWith("-->") && logHttp(msg.slice(4))));
app.use(admin.middleware);
// Backends
app.route("", require("./q+a/backend.ts").app);
app.route("", require("./file-viewer/backend.tsx").app);
// Asset middleware has least precedence
app.use(assets.middleware);
// Handlers
app.notFound(assets.notFound);
export default app;
async function removeDuplicateSlashes(c: Context, next: Next) {
const path = c.req.path;
if (/\/\/+/.test(path)) {
const normalizedPath = path.replace(/\/\/+/g, "/");
const query = c.req.query();
const queryString = Object.keys(query).length > 0
? "?" + new URLSearchParams(query).toString()
: "";
return c.redirect(normalizedPath + queryString, 301);
}
await next();
}
import { type Context, Hono, type Next } from "#hono";
import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console";

View file

@ -0,0 +1,10 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -0,0 +1,290 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-06-13",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions.
```marko
<div>
// `input` is like props, but given in the top-level scope
<time datetime=input.date.toISOString()>
// Interpolation with JS template string syntax
${formatTimeNicely(input.date)}
</time>
<div>
<a href=`/users/${input.user.id}`>${input.user.name}</a>
</div>
// Capital letter variables for imported components
<MarkdownContent message=input.message />
// Components also can be auto-imported by lowercase.
// This will look upwards for a `tags/` folder containing
// "custom-footer.marko", similar to how Node.js finds
// package names in all upwards `node_modules` folders.
<custom-footer />
</div>
// ESM `import` / `export` just work as expected.
// I prefer my imports at the end, to highlight the markup.
import MarkdownContent from "./MarkdownContent.marko";
import { formatTimeNicely } from "../date-helpers.ts";
```
Tags with the `value` attribute have a shorthand, which is used by the built-in
`<if>` for conditional rendering.
```marko
// Sugar for <input value="string" />
<input="string" />
// and it composes amazingly to the 'if' built-in
<if=input.user>
<UserProfile=input.user />
</if>
```
Tags can also return values into the scope for use in the template using `/`, such as `<id>` for unique ID generation. This is available to components that `<return=output/>`.
```
<id/uniqueId />
<input id=uniqueId type="checkbox" name="allow_trans_rights" />
<label for=uniqueId>click me!</>
// ^ oh, you can also omit the
// closing tag name if you want.
```
It's important that I started with the two forms of "Tag I/O": `=` for input
and `/` for output. With those building blocks, we introduce local variables
with `const`
```
<const/rendered = markdownToHtml(input.value) />
// This is how you insert raw HTML to the document
<inline-html=rendered />
// It supports all of the cozy destructuring syntax JS has
<const/{ id, name } = user />
```
Unlike JSX, when you pass content within a tag (`input.content` instead of
JSX's `children`), instead of it being a JSX element, it is actually a
function. This means that the `for` tag can render the content multiple times.
```
<ul>
<for from=1 to=10>
// Renders a new random number for each iteration.
<li>${Math.random()}</li>
</>
</ul>
```
Since `content` is a function, it can take arguments. This is done with `|`
```
<h1>my friends</h1>
<ul>
// I tend to omit the closing tag names for the built-in control
// flow tags, but I keep them for HTML tags. It's kinda like how
// in JavaScript you just write `}` to close your `if`s and loops.
//
// Anyways <for> also has 'of'
<for|item| of=user.friends>
<li class="friend">${item.name}</li>
</>
// They support the same syntax JavaScript function params allows,
// so you can have destructuring here too, and multiple params.
<for|{ name }, index| of=user.friends>
// By the way you can also use emmet-style class and ID shorthands.
<li.friend>My #${index + 1} friend is ${name}</li>
</>
</ul>
```
Instead of named slots, Marko has attribute tags. These are more powerful than
slots since they are functions, and can also act as sugar for more complicated
attributes.
```
<Layout title="Welcome">
<@header variant="big">
<h1>the next big thing</h1>
</@header>
<p>body text...</p>
</Layout>
// The `input` variable inside of <Layout /> is:
//
// {
// title: "Welcome",
// header: {
// content: /* function rendering "<h1>the next big thing</h1>" */,
// variant: "big",
// },
// content: /* function rendering "<p>body text</p>" */
// }
```
This layout could be implemented as such:
```marko
<main>
<if=input.header />
<const/{ ...headerProps, content }=input.header />
<header ...headerProps>
// Instead of assigning to a variable with a capital letter,
// template interpolation works on tag names. This can also
// be a string to render the native HTML tag of that kind.
<${content} />
</header>
<hr />
</>
<${input.content} />
</main>
```
The last syntax feature missing is calling a tag with parameters. That is done
just like a regular function call, with '('.
```
<Something(item, index) />
```
In fact, attributes can just be sugar over this syntax; _this technically isn't
true but it's close enough for the example_
```
<SpecialButton type="submit" class="red" />
// is equal to
<SpecialButton({ type: "submit", class: "red" }) />
```
All of the above is about how Marko's syntax works, and how it performs HTML
generation with components. Marko also allows interactive components, but an
explaination of that is beyond the scope of this page, mostly since I have not
used it. A brief example of it, modified from their documentation.
```marko
// Reactive variables with <let/> just work...
<let/basicCounter=0 />
<button onClick() { basicCounter += 1 }>${basicCounter}</button>
// ...but a counter is boring.
<let/todos=[
{ id: 0, text: "Learn Marko" },
{ id: 1, text: "Make a Website" },
]/>
// 'by' is like React JSX's "key" property, but it's optional.
<ul><for|todo, i| of=todos by=(todo => todo.id)>
<li.todo>
// this variable remains stable even if the list
// re-orders, because 'by' was specified.
<let/done=false/>
<label>
<span>${todo.text}</span>
// ':=' creates a two-way reactive binding,
// (it passes a callback for `checkedChanged`)
<input type="checkbox" checked:=done />
</label>
<button
title="delete"
disabled=!done
onClick() {
todos = todos.toSpliced(i, 1);
}
> &times; </button>
</li>
</></ul>
// Form example
<let/nextId=2/>
<form onSubmit(e) {
e.preventDefault();
todos = todos.concat({
id: nextId++,
// HTMLFormElement exposes all its named input
// elements as extra properties on the object.
text: e.target.text.value,
});
// And you can clear it with 'reset()'
e.target.reset();
}>
// We don't 'onChange' like a React loser. The form
// value can be read in the submit event like normal.
<input name="text" placeholder="Another Item">
<button type="submit">Add</button>
</form>
```
## Usage on `paperclover.net`
TODO: document a lot of feedback, how i embedded Marko
My website uses statically generated HTML. That is why I have not needed to use
reactive variables. My generator doesn't even try compiling components
client-side.
Here is the actual component used to render [questions on the clover q+a][/q+a].
```marko
// Renders a `Question` entry including its markdown body.
export interface Input {
question: Question;
admin?: boolean;
}
// 2024-12-31 05:00:00 EST
export const transitionDate = 1735639200000;
<const/{ question, admin } = input />
<const/{ id, date, text } = question/>
<${"e-"}
f=(date > transitionDate ? true : undefined)
id=admin ? `q${id}` : undefined
>
<if=admin>
<a
style="margin-right: 0.5rem"
href=`/admin/q+a/${id}`
>[EDIT]</a>
</>
<a>
<time
datetime=formatQuestionISOTimestamp(date)
>${formatQuestionTimestamp(date)}</time>
</a>
<CloverMarkdown ...{ text } />
</>
// this singleton script will make all the '<time>' tags clickable.
client import "./clickable-links.client.ts";
import type { Question } from "@/q+a/models/Question.ts";
import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/format.ts";
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
```
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

433
src/file-viewer/backend.tsx Normal file
View file

@ -0,0 +1,433 @@
export const app = new Hono();
interface APIDirectoryList {
path: string;
readme: string | null;
files: APIFile[];
}
interface APIFile {
basename: string;
dir: boolean;
time: number;
size: number;
duration: number | null;
}
function checkCotyledonCookie(c: Context) {
const cookie = c.req.header("Cookie");
if (!cookie) return false;
const cookies = cookie.split("; ").map((x) => x.split("="));
return cookies.some(
(kv) => kv[0].trim() === "cotyledon" && kv[1].trim() === "agree",
);
}
function isCotyledonPath(path: string) {
if (path === "/cotyledon") return true;
const year = path.match(/^\/(\d{4})($|\/)/);
if (!year) return false;
const yearInt = parseInt(year[1]);
if (yearInt < 2025 && yearInt >= 2017) return true;
return false;
}
app.post("/file/cotyledon", async (c) => {
c.res = new Response(null, {
status: 200,
headers: {
"Set-Cookie": "cotyledon=agree; Path=/",
},
});
});
app.get("/file/*", async (c, next) => {
const ua = c.req.header("User-Agent")?.toLowerCase() ?? "";
const lofi = ua.includes("msie") || false;
// Discord ignores 'robots.txt' which violates the license agreement.
if (ua.includes("discordbot")) {
return next();
}
let rawFilePath = c.req.path.slice(5) || "/";
if (rawFilePath.endsWith("$partial")) {
return getPartialPage(c, rawFilePath.slice(0, -"$partial".length));
}
let hasCotyledonCookie = checkCotyledonCookie(c);
if (isCotyledonPath(rawFilePath)) {
if (!hasCotyledonCookie) {
return serveAsset(c, "/file/cotyledon_speedbump", 403);
} else if (rawFilePath === "/cotyledon") {
return serveAsset(c, "/file/cotyledon_enterance", 200);
}
}
while (rawFilePath.length > 1 && rawFilePath.endsWith("/")) {
rawFilePath = rawFilePath.slice(0, -1);
}
const file = MediaFile.getByPath(rawFilePath);
if (!file) {
// perhaps a specific 404 page for media files?
return next();
}
const permissions = FilePermissions.getByPrefix(rawFilePath);
if (permissions !== 0) {
const friendAuthChallenge = requireFriendAuth(c);
if (friendAuthChallenge) return friendAuthChallenge;
}
// File listings
if (file.kind === MediaFileKind.directory) {
if (c.req.header("Accept")?.includes("application/json")) {
const json = {
path: file.path,
files: file.getPublicChildren().map((f) => ({
basename: f.basename,
dir: f.kind === MediaFileKind.directory,
time: f.date.getTime(),
size: f.size,
duration: f.duration ? f.duration : null,
})),
readme: file.contents ? file.contents : null,
} satisfies APIDirectoryList;
return c.json(json);
}
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file,
hasCotyledonCookie,
});
return;
}
// Redirect to directory list for regular files if client accepts HTML
let viewMode = c.req.query("view");
if (c.req.query("dl") !== undefined) {
viewMode = "download";
}
if (
viewMode == undefined &&
c.req.header("Accept")?.includes("text/html") &&
!lofi
) {
prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", {
file,
hasCotyledonCookie,
});
return;
}
const download = viewMode === "download";
const etag = file.hash;
const filePath = file.path;
const expectedSize = file.size;
let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader = encoding === "raw"
? expectedSize
// Size cannot be known because of compression modes
: undefined;
// Etag
{
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch && etagMatches(etag, ifNoneMatch)) {
c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
headers: fileHeaders(file, download, sizeHeader),
});
return;
}
}
// Head
if (c.req.method === "HEAD") {
c.res = new Response(null, {
headers: fileHeaders(file, download, sizeHeader),
});
return;
}
// Prevalidate range requests
let rangeHeader = c.req.header("Range") ?? null;
if (rangeHeader) encoding = "raw";
const ifRangeHeader = c.req.header("If-Range");
if (ifRangeHeader && ifRangeOutdated(file, ifRangeHeader)) {
// > If the condition is not fulfilled, the full resource is
// > sent back with a 200 OK status.
rangeHeader = null;
}
let foundFile;
while (true) {
let second = false;
try {
foundFile = await fetchFile(filePath, encoding);
if (second) {
console.warn(`File ${filePath} has missing compression: ${encoding}`);
}
break;
} catch (error) {
if (encoding !== "raw") {
encoding = "raw";
sizeHeader = file.size;
second = true;
continue;
}
return c.text(
"internal server error: this file is present in the database but could not be fetched",
);
}
}
const [streamOrBuffer, actualEncoding, src] = foundFile;
encoding = actualEncoding;
// Range requests
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Range_requests
// Compression is skipped because it's a confusing, but solvable problem.
// See https://stackoverflow.com/questions/33947562/is-it-possible-to-send-http-response-using-gzip-and-byte-ranges-at-the-same-time
if (rangeHeader) {
const ranges = parseRange(rangeHeader, file.size);
// TODO: multiple ranges
if (ranges && ranges.length === 1) {
return (c.res = handleRanges(ranges, file, streamOrBuffer, download));
}
}
// Respond in a streaming fashion
c.res = new Response(streamOrBuffer, {
headers: {
...fileHeaders(file, download, sizeHeader),
...(encoding !== "raw" && {
"Content-Encoding": encoding,
}),
"X-Cache": src,
},
});
});
app.get("/canvas/:script", async (c, next) => {
const script = c.req.param("script");
if (!hasAsset(`/js/canvas/${script}.js`)) {
return next();
}
return renderView(c, "file-viewer/canvas", {
script,
});
});
function decideEncoding(encodings: string | undefined): CompressionFormat {
if (encodings?.includes("zstd")) return "zstd";
if (encodings?.includes("gzip")) return "gzip";
return "raw";
}
function fileHeaders(
file: MediaFile,
download: boolean,
size: number | undefined = file.size,
) {
return {
Vary: "Accept-Encoding, Accept",
"Content-Type": contentTypeFor(file.path),
"Content-Length": size.toString(),
ETag: file.hash,
"Last-Modified": file.date.toUTCString(),
...(download && {
"Content-Disposition": `attachment; filename="${file.basename}"`,
}),
};
}
function ifRangeOutdated(file: MediaFile, ifRangeHeader: string) {
// etag
if (ifRangeHeader[0] === '"') {
return ifRangeHeader.slice(1, -1) !== file.hash;
}
// date
return new Date(ifRangeHeader) < file.date;
}
/** The end is inclusive */
type Ranges = Array<[start: number, end: number]>;
function parseRange(rangeHeader: string, fileSize: number): Ranges | null {
const [unit, ranges] = rangeHeader.split("=");
if (unit !== "bytes") return null;
const result: Array<[start: number, end: number]> = [];
const rangeParts = ranges.split(",");
for (const range of rangeParts) {
const split = range.split("-");
if (split.length !== 2) return null;
const [start, end] = split;
if (start === "" && end === "") return null;
const parsedRange: [number, number] = [
start === "" ? fileSize - +end : +start,
end === "" ? fileSize - 1 : +end,
];
result.push(parsedRange);
}
// Validate that ranges do not intersect
result.sort((a, b) => a[0] - b[0]);
for (let i = 1; i < result.length; i++) {
if (result[i][0] <= result[i - 1][1]) {
return null;
}
}
return result;
}
function handleRanges(
ranges: Ranges,
file: MediaFile,
streamOrBuffer: ReadableStream | Buffer,
download: boolean,
): Response {
// TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody = streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, {
status: 206,
headers: {
...fileHeaders(file, download, rangeSize),
"Content-Range": `bytes ${ranges[0][0]}-${ranges[0][1]}/${file.size}`,
},
});
}
function applyRangesToBuffer(
buffer: Buffer,
ranges: Ranges,
rangeSize: number,
): Uint8Array {
const result = new Uint8Array(rangeSize);
let offset = 0;
for (const [start, end] of ranges) {
result.set(buffer.subarray(start, end + 1), offset);
offset += end - start + 1;
}
return result;
}
function applySingleRangeToStream(
stream: ReadableStream,
ranges: Ranges,
): ReadableStream {
let reader: ReadableStreamDefaultReader<Uint8Array>;
let position = 0;
const [start, end] = ranges[0];
return new ReadableStream({
async start(controller) {
reader = stream.getReader();
try {
while (position <= end) {
const { done, value } = await reader.read();
if (done) {
controller.close();
return;
}
const buffer = new Uint8Array(value);
const bufferStart = position;
const bufferEnd = position + buffer.length - 1;
position += buffer.length;
if (bufferEnd < start) {
continue;
}
if (bufferStart > end) {
break;
}
const sendStart = Math.max(0, start - bufferStart);
const sendEnd = Math.min(buffer.length - 1, end - bufferStart);
if (sendStart <= sendEnd) {
controller.enqueue(buffer.slice(sendStart, sendEnd + 1));
}
}
controller.close();
} catch (error) {
controller.error(error);
} finally {
reader.releaseLock();
}
},
cancel() {
reader?.releaseLock();
},
});
}
function getPartialPage(c: Context, rawFilePath: string) {
if (isCotyledonPath(rawFilePath)) {
if (!checkCotyledonCookie(c)) {
let root = Speedbump();
// Remove the root element, it's created client side!
root = root[2].children as ssr.Element;
const html = ssr.ssrSync(root).text;
c.header("X-Cotyledon", "true");
return c.html(html);
}
}
const file = MediaFile.getByPath(rawFilePath);
const permissions = FilePermissions.getByPrefix(rawFilePath);
if (permissions !== 0) {
const friendAuthChallenge = requireFriendAuth(c);
if (friendAuthChallenge) return friendAuthChallenge;
}
if (rawFilePath.endsWith("/")) {
rawFilePath = rawFilePath.slice(0, -1);
}
if (!file) {
return c.json({ error: "File not found" }, 404);
}
let root = MediaPanel({
file,
isLast: true,
activeFilename: null,
hasCotyledonCookie: rawFilePath === "" && checkCotyledonCookie(c),
});
// Remove the root element, it's created client side!
root = root[2].children as ssr.Element;
const html = ssr.ssrSync(root).text;
return c.html(html);
}
import { type Context, Hono } from "hono";
import * as ssr from "#ssr";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import { FilePermissions } from "@/file-viewer/models/FilePermissions.ts";
import { MediaPanel } from "@/file-viewer/views/clofi.tsx";
import { Speedbump } from "@/file-viewer/cotyledon.tsx";
import {
type CompressionFormat,
fetchFile,
prefetchFile,
} from "@/file-viewer/cache.ts";

View file

@ -0,0 +1,785 @@
// The file scanner incrementally updates an sqlite database with file
// stats. Additionally, it runs "processors" on files, which precompute
// expensive data such as running `ffprobe` on all media to get the
// duration.
//
// Processors are also used to derive compressed and optimized assets,
// which is how automatic JXL / AV1 encoding is done. Derived files are
// uploaded to the clover NAS to be pulled by VPS instances for hosting.
//
// This is the third iteration of the scanner, hence its name "scan3";
// Remember that any software you want to be maintainable and high
// quality cannot be written with AI.
const root = path.resolve("/Volumes/clover/Published");
const workDir = path.resolve(".clover/derived");
export async function main() {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(1)
}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
// Read a directory or file stat and queue up changed files.
using qList = new async.Queue({
name: "Discover Tree",
async fn(absPath: string) {
const stat = await fs.stat(absPath);
const publicPath = toPublicPath(absPath);
const mediaFile = MediaFile.getByPath(publicPath);
if (stat.isDirectory()) {
const items = await fs.readdir(absPath);
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
if (mediaFile) {
const deleted = mediaFile.getChildren()
.filter((child) => !items.includes(child.basename))
.flatMap((child) =>
child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren()
: child
);
qMeta.addMany(deleted.map((mediaFile) => ({
absPath: path.join(root, mediaFile.path),
publicPath: mediaFile.path,
stat: null,
mediaFile,
})));
}
return;
}
// All processes must be performed again if there is no file.
if (
!mediaFile ||
stat.size !== mediaFile.size ||
stat.mtime.getTime() !== mediaFile.date.getTime()
) {
qMeta.add({ absPath, publicPath, stat, mediaFile });
return;
}
// If the scanners changed, it may mean more processes should be run.
queueProcessors({ absPath, stat, mediaFile });
},
maxJobs: 24,
});
using qMeta = new async.Queue({
name: "Update Metadata",
async fn({ absPath, publicPath, stat, mediaFile }: UpdateMetadataJob) {
if (!stat) {
// File was deleted.
await runUndoProcessors(UNWRAP(mediaFile));
return;
}
// TODO: run scrubLocationMetadata first
const hash = await new Promise<string>((resolve, reject) => {
const reader = fs.createReadStream(absPath);
reader.on("error", reject);
const hasher = crypto.createHash("sha1").setEncoding("hex");
hasher.on("error", reject);
hasher.on("readable", () => resolve(hasher.read()));
reader.pipe(hasher);
});
let date = stat.mtime;
if (
mediaFile &&
mediaFile.date.getTime() < stat.mtime.getTime() &&
(Date.now() - stat.mtime.getTime()) < monthMilliseconds
) {
date = mediaFile.date;
console.warn(
`M-time on ${publicPath} was likely corrupted. ${
formatDate(mediaFile.date)
} -> ${formatDate(stat.mtime)}`,
);
}
mediaFile = MediaFile.createFile({
path: publicPath,
date,
hash,
size: stat.size,
duration: mediaFile?.duration ?? 0,
dimensions: mediaFile?.dimensions ?? "",
contents: mediaFile?.contents ?? "",
});
await queueProcessors({ absPath, stat, mediaFile });
},
getItemText: (job) =>
job.publicPath.slice(1) + (job.stat ? "" : " (deleted)"),
maxJobs: 10,
});
using qProcess = new async.Queue({
name: "Process Contents",
async fn(
{ absPath, stat, mediaFile, processor, index, after }: ProcessJob,
spin,
) {
await processor.run({ absPath, stat, mediaFile, spin });
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
for (const dependantJob of after) {
ASSERT(dependantJob.needs > 0, `dependantJob.needs > 0, ${dependantJob.needs}`);
dependantJob.needs -= 1;
if (dependantJob.needs == 0) qProcess.add(dependantJob);
}
},
getItemText: ({ mediaFile, processor }) =>
`${mediaFile.path.slice(1)} - ${processor.name}`,
maxJobs: 4,
});
function decodeProcessors(input: string) {
return input
.split(";")
.filter(Boolean)
.map(([a, b, c]) => ({
id: a,
hash: (b.charCodeAt(0) << 8) + c.charCodeAt(0),
}));
}
async function queueProcessors(
{ absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">,
) {
const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
);
if (possible.length === 0) return;
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
ASSERT(hash <= 0xFFFF, `${hash.toString(16)} has no bits above 16 set`);
let processed = mediaFile.processed;
// If the hash has changed, migrate the bitfield over.
// This also runs when the processor hash is in it's initial 0 state.
const order = decodeProcessors(mediaFile.processors);
if ((processed & 0xFFFF) !== hash) {
const previous = order.filter((_, i) =>
(processed & (1 << (16 + i))) !== 0
);
processed = hash;
for (const { id, hash } of previous) {
const p = processors.find((p) => p.id === id);
if (!p) continue;
const index = possible.indexOf(p);
if (index !== -1 && p.hash === hash) {
processed |= 1 << (16 + index);
} else {
if (p.undo) await p.undo(mediaFile);
}
}
mediaFile.setProcessors(
processed,
possible.map((p) =>
p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF)
).join(";"),
);
} else {
possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id))
);
}
// Queue needed processors.
const jobs: ProcessJob[] = [];
for (let i = 0, { length } = possible; i < length; i += 1) {
if ((processed & (1 << (16 + i))) === 0) {
const job: ProcessJob = {
absPath,
stat,
mediaFile,
processor: possible[i],
index: i,
after: [],
needs: possible[i].depends.length,
};
jobs.push(job);
if (job.needs === 0) qProcess.add(job);
}
}
for (const job of jobs) {
for (const dependId of job.processor.depends) {
const dependJob = jobs.find((j) => j.processor.id === dependId);
if (dependJob) {
dependJob.after.push(job);
} else {
ASSERT(job.needs > 0, `job.needs !== 0, ${job.needs}`);
job.needs -= 1;
if (job.needs === 0) qProcess.add(job);
}
}
}
}
async function runUndoProcessors(mediaFile: MediaFile) {
const { processed } = mediaFile;
const previous = decodeProcessors(mediaFile.processors)
.filter((_, i) => (processed & (1 << (16 + i))) !== 0);
for (const { id } of previous) {
const p = processors.find((p) => p.id === id);
if (!p) continue;
if (p.undo) {
await p.undo(mediaFile);
}
}
mediaFile.delete();
}
// Add the root & recursively iterate!
qList.add(root);
await qList.done();
await qMeta.done();
await qProcess.done();
// Update directory metadata
const dirs = MediaFile.getDirectoriesToReindex()
.sort((a, b) => b.path.length - a.path.length);
for (const dir of dirs) {
const children = dir.getChildren();
// readme.txt
const readmeContent = children.find((x) =>
x.basename === "readme.txt"
)?.contents ?? "";
// dirsort
let dirsort: string[] | null = null;
const dirSortRaw =
children.find((x) => x.basename === ".dirsort")?.contents ?? "";
if (dirSortRaw) {
dirsort = dirSortRaw.split("\n")
.map((x) => x.trim())
.filter(Boolean);
}
// Permissions
if (children.some((x) => x.basename === ".friends")) {
FilePermissions.setPermissions(dir.path, 1);
} else {
FilePermissions.setPermissions(dir.path, 0);
}
// Recursive stats.
let totalSize = 0;
let newestDate = new Date(0);
let allHashes = "";
for (const child of children) {
totalSize += child.size;
allHashes += child.hash;
if (child.basename !== "/readme.txt" && child.date > newestDate) {
newestDate = child.date;
}
}
const dirHash = crypto.createHash("sha1")
.update(dir.path + allHashes)
.digest("hex");
MediaFile.markDirectoryProcessed({
id: dir.id,
timestamp: newestDate,
contents: readmeContent,
size: totalSize,
hash: dirHash,
dirsort,
});
}
// Sync to remote
if ((await fs.readdir(workDir)).length > 0) {
await rsync.spawn({
args: [
"--links",
"--recursive",
"--times",
"--partial",
"--progress",
"--remove-source-files",
"--delay-updates",
workDir + "/",
"clo@zenith:/mnt/storage1/clover/Documents/Config/clover_file/derived/",
],
title: "Uploading Derived Assets",
cwd: process.cwd(),
});
await fs.removeEmptyDirectories(workDir);
} else {
console.info("No new derived assets");
}
console.info(
"Updated file viewer index in \x1b[1m" +
((performance.now() - start) / 1000).toFixed(1) + "s\x1b[0m",
);
MediaFile.db.prepare("VACUUM").run();
const { duration, count } = MediaFile.db.prepare<
[],
{ count: number; duration: number }
>(`
select
count(*) as count,
sum(duration) as duration
from media_files
`).getNonNull();
console.info();
console.info(
"Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
);
}
interface Process {
name: string;
enable?: boolean;
include?: Set<string>;
exclude?: Set<string>;
depends?: string[];
version?: number;
/* Perform an action. */
run(args: ProcessFileArgs): Promise<void>;
/* Should detect if `run` was never even run before before undoing state */
undo?(mediaFile: MediaFile): Promise<void>;
}
const execFileRaw = util.promisify(child_process.execFile);
const execFile: typeof execFileRaw = ((
...args: Parameters<typeof execFileRaw>
) =>
execFileRaw(...args).catch((e: any) => {
if (e?.message?.startsWith?.("Command failed")) {
if (e.code > (2 ** 31)) e.code |= 0;
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
}
throw e;
})) as any;
const ffprobeBin = testProgram("ffprobe", "--help");
const ffmpegBin = testProgram("ffmpeg", "--help");
const ffmpegOptions = [
"-hide_banner",
"-loglevel",
"warning",
];
const procDuration: Process = {
name: "calculate duration",
enable: ffprobeBin !== null,
include: rules.extsDuration,
async run({ absPath, mediaFile }) {
const { stdout } = await execFile(ffprobeBin!, [
"-v",
"error",
"-show_entries",
"format=duration",
"-of",
"default=noprint_wrappers=1:nokey=1",
absPath,
]);
const duration = parseFloat(stdout.trim());
if (Number.isNaN(duration)) {
throw new Error("Could not extract duration from " + stdout);
}
mediaFile.setDuration(Math.ceil(duration));
},
};
// NOTE: Never re-order the processors. Add new ones at the end.
const procDimensions: Process = {
name: "calculate dimensions",
enable: ffprobeBin != null,
include: rules.extsDimensions,
async run({ absPath, mediaFile }) {
const ext = path.extname(absPath);
let dimensions;
if (ext === ".svg") {
// Parse out of text data
const content = await fs.readFile(absPath, "utf8");
const widthMatch = content.match(/width="(\d+)"/);
const heightMatch = content.match(/height="(\d+)"/);
if (widthMatch && heightMatch) {
dimensions = `${widthMatch[1]}x${heightMatch[1]}`;
}
} else {
// Use ffprobe to observe streams
const { stdout } = await execFile("ffprobe", [
"-v",
"error",
"-select_streams",
"v:0",
"-show_entries",
"stream=width,height",
"-of",
"csv=s=x:p=0",
absPath,
]);
if (stdout.includes("x")) {
dimensions = stdout.trim();
}
}
mediaFile.setDimensions(dimensions ?? "");
},
};
const procLoadTextContents: Process = {
name: "load text content",
include: rules.extsReadContents,
async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text);
},
};
const procHighlightCode: Process = {
name: "highlight source code",
include: new Set(rules.extsCode.keys()),
async run({ absPath, mediaFile, stat }) {
const language = UNWRAP(
rules.extsCode.get(path.extname(absPath).toLowerCase()),
);
// An issue is that .ts is an overloaded extension, shared between
// 'transport stream' and 'typescript'.
//
// Filter used here is:
// - more than 1mb
// - invalid UTF-8
if (stat.size > 1_000_000) return;
let code;
const buf = await fs.readFile(absPath);
try {
code = new TextDecoder("utf-8", { fatal: true }).decode(buf);
} catch (error) {
mediaFile.setContents("");
return;
}
const content = await highlight.highlightCode(code, language);
mediaFile.setContents(content);
},
};
const procImageSubsets: Process = {
name: "encode image subsets",
include: rules.extsImage,
depends: ["calculate dimensions"],
version: 2,
async run({ absPath, mediaFile, spin }) {
const { width, height } = UNWRAP(mediaFile.parseDimensions());
const targetSizes = transcodeRules.imageSizes.filter((w) => w < width);
const baseStatus = spin.text;
using stack = new DisposableStack();
for (const size of targetSizes) {
const { w, h } = resizeDimensions(width, height, size);
for (const { ext, args } of transcodeRules.imagePresets) {
spin.text = baseStatus +
` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
stack.use(
await produceAsset(
`${mediaFile.hash}/${size}${ext}`,
async (out) => {
await fs.mkdir(path.dirname(out));
await fs.rm(out, { force: true });
await execFile(ffmpegBin!, [
...ffmpegOptions,
"-i",
absPath,
"-vf",
`scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`,
...args,
out,
]);
return [out];
},
),
);
}
}
stack.move();
},
async undo(mediaFile) {
const { width } = UNWRAP(mediaFile.parseDimensions());
const targetSizes = transcodeRules.imageSizes.filter((w) => w < width);
for (const size of targetSizes) {
for (const { ext } of transcodeRules.imagePresets) {
unproduceAsset(`${mediaFile.hash}/${size}${ext}`);
}
}
},
};
const qualityMap: Record<string, string> = {
u: "ultra-high",
h: "high",
m: "medium",
l: "low",
d: "data-saving",
};
const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
name: `encode ${preset.codec} ${UNWRAP(qualityMap[preset.id[1]])}`,
include: rules.extsVideo,
enable: ffmpegBin != null,
async run({ absPath, mediaFile, spin }) {
if ((mediaFile.duration ?? 0) < 10) return;
await produceAsset(`${mediaFile.hash}/${preset.id}`, async (base) => {
base = path.dirname(base);
await fs.mkdir(base);
let inputArgs = ["-i", absPath];
try {
const config = await fs.readJson<any>(
path.join(
path.dirname(absPath),
path.basename(absPath, path.extname(absPath)) + ".json",
),
);
if (config.encoder && typeof config.encoder.videoSrc === "string") {
const { videoSrc, audioSrc, rate } = config.encoder;
inputArgs = [
...rate ? ["-r", String(rate)] : [],
"-i",
videoSrc,
...audioSrc ? ["-i", audioSrc] : [],
];
}
} catch (err: any) {
if (err?.code !== "ENOENT") throw err;
}
const args = transcodeRules.getVideoArgs(
preset,
base,
inputArgs,
);
try {
const fakeProgress = new Progress({ text: spin.text, spinner: null });
fakeProgress.stop();
spin.format = (now: number) => fakeProgress.format(now);
// @ts-expect-error
fakeProgress.redraw = () => spin.redraw();
await ffmpeg.spawn({
ffmpeg: ffmpegBin!,
title: fakeProgress.text,
progress: fakeProgress,
args,
cwd: base,
});
return await collectFiles();
} catch (err) {
for (const file of await collectFiles()) {
try {
fs.rm(file);
} catch {}
}
throw err;
}
async function collectFiles(): Promise<string[]> {
return (await fs.readdir(base))
.filter((basename) => basename.startsWith(preset.id))
.map((basename) => path.join(base, basename));
}
});
},
}));
const procCompression = [
{ name: "gzip", fn: () => zlib.createGzip({ level: 9 }) },
{ name: "zstd", fn: () => zlib.createZstdCompress() },
].map(({ name, fn }) => ({
name: `compress ${name}`,
exclude: rules.extsPreCompressed,
async run({ absPath, mediaFile }) {
if ((mediaFile.size ?? 0) < 10) return;
await produceAsset(`${mediaFile.hash}/${name}`, async (base) => {
fs.mkdirSync(path.dirname(base));
await stream.promises.pipeline(
fs.createReadStream(absPath),
fn(),
fs.createWriteStream(base),
);
return [base];
});
},
} satisfies Process as Process));
const processors = [
procDimensions,
procDuration,
procLoadTextContents,
procHighlightCode,
procImageSubsets,
...procVideos,
...procCompression,
]
.map((process, id, all) => {
const strIndex = (id: number) =>
String.fromCharCode("a".charCodeAt(0) + id);
return {
...process as Process,
id: strIndex(id),
// Create a unique key.
hash: new Uint16Array(
crypto.createHash("sha1")
.update(
process.run.toString() +
(process.version ? String(process.version) : ""),
)
.digest().buffer,
).reduce((a, b) => a ^ b),
depends: (process.depends ?? []).map((depend) => {
const index = all.findIndex((p) => p.name === depend);
if (index === -1) throw new Error(`Cannot find depend '${depend}'`);
if (index === id) throw new Error(`Cannot depend on self: '${depend}'`);
return strIndex(index);
}),
};
});
function resizeDimensions(w: number, h: number, desiredWidth: number) {
ASSERT(desiredWidth < w, `${desiredWidth} < ${w}`);
return { w: desiredWidth, h: Math.floor((h / w) * desiredWidth) };
}
async function produceAsset(
key: string,
builder: (prefix: string) => Promise<string[]>,
) {
const asset = AssetRef.putOrIncrement(key);
try {
if (asset.refs === 1) {
const paths = await builder(path.join(workDir, key));
asset.addFiles(
paths.map((file) =>
path.relative(workDir, file)
.replaceAll("\\", "/")
),
);
}
return {
[Symbol.dispose]: () => asset.unref(),
};
} catch (err: any) {
if (err && typeof err === "object") err.assetKey = key;
asset.unref();
throw err;
}
}
async function unproduceAsset(key: string) {
const ref = AssetRef.get(key);
if (ref) {
ref.unref();
console.log(`unref ${key}`);
// TODO: remove associated files from target
}
}
interface UpdateMetadataJob {
absPath: string;
publicPath: string;
stat: fs.Stats | null;
mediaFile: MediaFile | null;
}
interface ProcessFileArgs {
absPath: string;
stat: fs.Stats;
mediaFile: MediaFile;
spin: Spinner;
}
interface ProcessJob {
absPath: string;
stat: fs.Stats;
mediaFile: MediaFile;
processor: typeof processors[0];
index: number;
after: ProcessJob[];
needs: number;
}
export function skipBasename(basename: string): boolean {
// dot files must be incrementally tracked
if (basename === ".dirsort") return true;
if (basename === ".friends") return true;
return (
basename.startsWith(".") ||
basename.startsWith("._") ||
basename.startsWith(".tmp") ||
basename === ".DS_Store" ||
basename.toLowerCase() === "thumbs.db" ||
basename.toLowerCase() === "desktop.ini"
);
}
export function toPublicPath(absPath: string) {
ASSERT(path.isAbsolute(absPath), "non-absolute " + absPath);
if (absPath === root) return "/";
return "/" + path.relative(root, absPath).replaceAll("\\", "/");
}
export function testProgram(name: string, helpArgument: string) {
try {
child_process.spawnSync(name, [helpArgument]);
return name;
} catch (err) {
console.warn(`Missing or corrupt executable '${name}'`);
}
return null;
}
const monthMilliseconds = 30 * 24 * 60 * 60 * 1000;
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as async from "#sitegen/async";
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as zlib from "node:zlib";
import * as child_process from "node:child_process";
import * as util from "node:util";
import * as crypto from "node:crypto";
import * as stream from "node:stream";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import { AssetRef } from "@/file-viewer/models/AssetRef.ts";
import { FilePermissions } from "@/file-viewer/models/FilePermissions.ts";
import {
formatDate,
formatDurationLong,
formatSize,
} from "@/file-viewer/format.ts";
import * as rules from "@/file-viewer/rules.ts";
import * as highlight from "@/file-viewer/highlight.ts";
import * as ffmpeg from "@/file-viewer/ffmpeg.ts";
import * as rsync from "@/file-viewer/rsync.ts";
import * as transcodeRules from "@/file-viewer/transcode-rules.ts";

418
src/file-viewer/cache.ts Normal file
View file

@ -0,0 +1,418 @@
import { Agent, get } from "node:https";
import * as fs from "node:fs";
import * as path from "node:path";
import { Buffer } from "node:buffer";
import type { ClientRequest } from "node:http";
import LRUCache from "lru-cache";
import { open } from "node:fs/promises";
import { createHash } from "node:crypto";
import { scoped } from "@paperclover/console";
import { escapeUri } from "./format.ts";
declare const Deno: any;
const sourceOfTruth = "https://nas.paperclover.net:43250";
const caCert = fs.readFileSync("src/file-viewer/cert.pem");
const diskCacheRoot = path.join(import.meta.dirname, "../.clover/filecache/");
const diskCacheMaxSize = 14 * 1024 * 1024 * 1024; // 14GB
const ramCacheMaxSize = 1 * 1024 * 1024 * 1024; // 1.5GB
const loadInProgress = new Map<
string,
Promise<{ stream: ReadableStream }> | { stream: ReadableStream }
>();
// Disk cache serializes the access times
const diskCacheState: Record<string, [size: number, lastAccess: number]> =
loadDiskCacheState();
const diskCache = new LRUCache<string, number>({
maxSize: diskCacheMaxSize,
ttl: 0,
sizeCalculation: (value) => value,
dispose: (_, key) => {
delete diskCacheState[key];
},
onInsert: (size, key) => {
diskCacheState[key] = [size, Date.now()];
},
});
const ramCache = new LRUCache<string, Buffer>({
maxSize: ramCacheMaxSize,
ttl: 0,
sizeCalculation: (value) => value.byteLength,
});
let diskCacheFlush: NodeJS.Timeout | undefined;
{
// Initialize the disk cache by validating all files exist, and then
// inserting them in last to start order. State is repaired pessimistically.
const toDelete = new Set(Object.keys(diskCacheState));
fs.mkdirSync(diskCacheRoot, { recursive: true });
for (
const file of fs.readdirSync(diskCacheRoot, {
recursive: true,
encoding: "utf-8",
})
) {
const key = file.split("/").pop()!;
if (key.length !== 40) continue;
const entry = diskCacheState[key];
if (!entry) {
fs.rmSync(path.join(diskCacheRoot, file), {
recursive: true,
force: true,
});
delete diskCacheState[key];
continue;
}
toDelete.delete(key);
}
for (const key of toDelete) {
delete diskCacheState[key];
}
saveDiskCacheState();
const sorted = Object.keys(diskCacheState).sort((a, b) =>
diskCacheState[b][1] - diskCacheState[a][1]
);
for (const key of sorted) {
diskCache.set(key, diskCacheState[key][0]);
}
}
export type CacheSource = "ram" | "disk" | "miss" | "lan" | "flight";
export type CompressionFormat = "gzip" | "zstd" | "raw";
const compressionFormatMap = {
gzip: "gz",
zstd: "zstd",
raw: "file",
} as const;
const log = scoped("file_cache");
const lanMount = "/Volumes/clover/Published";
const hasLanMount = fs.existsSync(lanMount);
/**
* Fetches a file with the given compression format.
* Uncompressed files are never persisted to disk.
*
* Returns a promise to either:
* - Buffer: the data is from RAM cache
* - ReadableStream: the data is being streamed in from disk/server
*
* Additionally, returns a string indicating the source of the data, for debugging.
*
* Callers must be able to consume both output types.
*/
export async function fetchFile(
pathname: string,
format: CompressionFormat = "raw",
): Promise<
[Buffer | ReadableStream, encoding: CompressionFormat, src: CacheSource]
> {
// 1. Ram cache
const cacheKey = hashKey(`${pathname}:${format}`);
const ramCacheHit = ramCache.get(cacheKey);
if (ramCacheHit) {
log(`ram hit: ${format}${pathname}`);
return [ramCacheHit, format, "ram"];
}
// 2. Tee an existing loading stream.
const inProgress = loadInProgress.get(cacheKey);
if (inProgress) {
const stream = await inProgress;
const [stream1, stream2] = stream.stream.tee();
loadInProgress.set(cacheKey, { stream: stream2 });
log(`in-flight copy: ${format}${pathname}`);
return [stream1, format, "flight"];
}
// 3. Disk cache + Load into ram cache.
if (format !== "raw") {
const diskCacheHit = diskCache.get(cacheKey);
if (diskCacheHit) {
diskCacheState[cacheKey] = [diskCacheHit, Date.now()];
saveDiskCacheStateLater();
log(`disk hit: ${format}/${pathname}`);
return [
startInProgress(
cacheKey,
new ReadableStream({
start: async (controller) => {
const stream = fs.createReadStream(
path.join(diskCacheRoot, cacheKey),
);
const chunks: Buffer[] = [];
stream.on("data", (chunk) => {
controller.enqueue(chunk);
chunks.push(chunk as Buffer);
});
stream.on("end", () => {
controller.close();
ramCache.set(cacheKey, Buffer.concat(chunks));
finishInProgress(cacheKey);
});
stream.on("error", (error) => {
controller.error(error);
});
},
}),
),
format,
"disk",
];
}
}
// 4. Lan Mount (access files that prod may not have)
if (hasLanMount) {
log(`lan hit: ${format}/${pathname}`);
return [
startInProgress(
cacheKey,
new ReadableStream({
start: async (controller) => {
const stream = fs.createReadStream(
path.join(lanMount, pathname),
);
const chunks: Buffer[] = [];
stream.on("data", (chunk) => {
controller.enqueue(chunk);
chunks.push(chunk as Buffer);
});
stream.on("end", () => {
controller.close();
ramCache.set(cacheKey, Buffer.concat(chunks));
finishInProgress(cacheKey);
});
stream.on("error", (error) => {
controller.error(error);
});
},
}),
),
"raw",
"lan",
];
}
// 4. Fetch from server
const url = `${compressionFormatMap[format]}${escapeUri(pathname)}`;
log(`miss: ${format}${pathname}`);
const response = await startInProgress(cacheKey, fetchFileUncached(url));
const [stream1, stream2] = response.tee();
handleDownload(cacheKey, format, stream2);
return [stream1, format, "miss"];
}
export async function prefetchFile(
pathname: string,
format: CompressionFormat = "zstd",
) {
const cacheKey = hashKey(`${pathname}:${format}`);
const ramCacheHit = ramCache.get(cacheKey);
if (ramCacheHit) {
return;
}
if (hasLanMount) return;
const url = `${compressionFormatMap[format]}${pathname}`;
log(`prefetch: ${format}${pathname}`);
const stream2 = await startInProgress(cacheKey, fetchFileUncached(url));
handleDownload(cacheKey, format, stream2);
}
async function handleDownload(
cacheKey: string,
format: CompressionFormat,
stream2: ReadableStream,
) {
let chunks: Buffer[] = [];
if (format !== "raw") {
const file = await open(path.join(diskCacheRoot, cacheKey), "w");
try {
for await (const chunk of stream2) {
await file.write(chunk);
chunks.push(chunk);
}
} finally {
file.close();
}
} else {
for await (const chunk of stream2) {
chunks.push(chunk);
}
}
const final = Buffer.concat(chunks);
chunks.length = 0;
ramCache.set(cacheKey, final);
if (format !== "raw") {
diskCache.set(cacheKey, final.byteLength);
}
finishInProgress(cacheKey);
}
function hashKey(key: string): string {
return createHash("sha1").update(key).digest("hex");
}
function startInProgress<T extends Promise<ReadableStream> | ReadableStream>(
cacheKey: string,
promise: T,
): T {
if (promise instanceof Promise) {
let resolve2: (stream: { stream: ReadableStream }) => void;
let reject2: (error: Error) => void;
const stream2Promise = new Promise<{ stream: ReadableStream }>(
(resolve, reject) => {
resolve2 = resolve;
reject2 = reject;
},
);
const stream1Promise = new Promise<ReadableStream>((resolve, reject) => {
promise.then((stream) => {
const [stream1, stream2] = stream.tee();
const stream2Obj = { stream: stream2 };
resolve2(stream2Obj);
loadInProgress.set(cacheKey, stream2Obj);
resolve(stream1);
}, reject);
});
loadInProgress.set(cacheKey, stream2Promise);
return stream1Promise as T;
} else {
const [stream1, stream2] = promise.tee();
loadInProgress.set(cacheKey, { stream: stream2 });
return stream1 as T;
}
}
function loadDiskCacheState(): Record<
string,
[size: number, lastAccess: number]
> {
try {
const state = JSON.parse(
fs.readFileSync(path.join(diskCacheRoot, "state.json"), "utf-8"),
);
return state;
} catch (error) {
return {};
}
}
function saveDiskCacheStateLater() {
if (diskCacheFlush) {
return;
}
diskCacheFlush = setTimeout(() => {
saveDiskCacheState();
}, 60_000) as NodeJS.Timeout;
if (diskCacheFlush.unref) {
diskCacheFlush.unref();
}
}
process.on("exit", () => {
saveDiskCacheState();
});
function saveDiskCacheState() {
fs.writeFileSync(
path.join(diskCacheRoot, "state.json"),
JSON.stringify(diskCacheState),
);
}
function finishInProgress(cacheKey: string) {
loadInProgress.delete(cacheKey);
}
// Self signed certificate must be trusted to be able to request the above URL.
//
// Unfortunately, Bun and Deno are both not node.js compatible, so those two
// runtimes need fallback implementations. The fallback implementations calls
// fetch with the `agent` value as the RequestInit. Since `fetch` decompresses
// the body for you, it must be disabled.
const agent: any = typeof Bun !== "undefined"
? {
// Bun has two non-standard fetch extensions
decompress: false,
tls: {
ca: caCert,
},
}
// TODO: https://github.com/denoland/deno/issues/12291
// : typeof Deno !== "undefined"
// ? {
// // Deno configures through the non-standard `client` extension
// client: Deno.createHttpClient({
// caCerts: [caCert.toString()],
// }),
// }
// Node.js supports node:http
: new Agent({
ca: caCert,
});
function fetchFileNode(pathname: string): Promise<ReadableStream> {
return new Promise((resolve, reject) => {
const request: ClientRequest = get(`${sourceOfTruth}/${pathname}`, {
agent,
});
request.on("response", (response) => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to fetch ${pathname}`));
return;
}
const stream = new ReadableStream({
start(controller) {
response.on("data", (chunk) => {
controller.enqueue(chunk);
});
response.on("end", () => {
controller.close();
});
response.on("error", (error) => {
controller.error(error);
reject(error);
});
},
});
resolve(stream);
});
request.on("error", (error) => {
reject(error);
});
});
}
async function fetchFileDenoBun(pathname: string): Promise<ReadableStream> {
const req = await fetch(`${sourceOfTruth}/${pathname}`, agent);
if (!req.ok) {
throw new Error(`Failed to fetch ${pathname}`);
}
return req.body!;
}
const fetchFileUncached =
typeof Bun !== "undefined" || typeof Deno !== "undefined"
? fetchFileDenoBun
: fetchFileNode;
export async function toBuffer(
stream: ReadableStream | Buffer,
): Promise<Buffer> {
if (!(stream instanceof ReadableStream)) {
return stream;
}
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
}

23
src/file-viewer/cert.pem Normal file
View file

@ -0,0 +1,23 @@
-----BEGIN CERTIFICATE-----
MIIDxTCCAq2gAwIBAgIUBaaOXVkkE+6yarNyvzofETb+WLEwDQYJKoZIhvcNAQEL
BQAwdzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVN0YXRlMQ0wCwYDVQQHDARDaXR5
MRUwEwYDVQQKDAxNZWRpYSBTZXJ2ZXIxDzANBgNVBAMMBnplbml0aDEhMB8GCSqG
SIb3DQEJARYSbWVAcGFwZXJjbG92ZXIubmV0MB4XDTI1MDQyNzIxNTU0MFoXDTM1
MDQyNTIxNTU0MFowdzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVN0YXRlMQ0wCwYD
VQQHDARDaXR5MRUwEwYDVQQKDAxNZWRpYSBTZXJ2ZXIxDzANBgNVBAMMBnplbml0
aDEhMB8GCSqGSIb3DQEJARYSbWVAcGFwZXJjbG92ZXIubmV0MIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv7lLwx8XwsuTeaIxTsHDL+Lx7eblsJ0XylVm
0/iIJS1Mrq6Be9St6vDWK/BWqqAn+MdqzSfLMy8EKazuHKtbTm2vlUIkjw28SoWP
6cRSCLx4hFGbF4tmRO+Bo+/4PpHPnheeolkjJ+CLO87tZ752D9JzjVND+WIj1QO+
bm+JBIi1TFREPh22/fSZBRpaRgqHcUEhICaiXaufvxQ6eihQfGSe00I7zRzGgnMl
51xjzkKkXd+r/FwTykd8ScJN25FMVDLsfJR59//geAZXYS25gQ4YL6R8u7ijidlS
IoDG8N+Fzw7W4yI+y8fIN4W1x/HsjiQ665CuWY3TMYo98OaGwwIDAQABo0kwRzAm
BgNVHREEHzAdggZ6ZW5pdGiCE25hcy5wYXBlcmNsb3Zlci5uZXQwHQYDVR0OBBYE
FDXkgNsMYZv1Pr+95RCCk7eHACGOMA0GCSqGSIb3DQEBCwUAA4IBAQB6942odKyD
TudifxRXbvcVe9LxSd7NimxRZzM5wTgA5KkxQT4CBM2wEPH/7e7Q/8scB9HbH2uP
f2vixoCM+Z3BWiYHFFk+1pf2myUdiFV2BC9g80txEerRkGLc18V6CdYNJ9wNPkiO
LW/RzXfEv+sqhaXh8dA46Ruz6SAbmscTMMYW4e9VYR+1p4Sm5UpTxrHzeg21YJKn
ud8kO1r7RhVgUGzkAzNaIMiBuJqGGdD5yV7Ng5C/DlJ9AAeYu1diM5LkIKjf+/8M
t/3l4eXS3Lda6+21rDvmfoK4Za6CAhcwgXIpqiRixE2MQNsxZ2XiJBVQHPrh8xYk
L5fq8KTGFwtd
-----END CERTIFICATE-----

View file

@ -0,0 +1,398 @@
// WARNING
// -------
// This file contains spoilers for COTYLEDON
// Consider reading through the entire archive before picking apart this
// code, as this contains the beginning AND the ending sections, which
// contains very percise storytelling. You've been warned...
//
// --> https://paperclover.net/file/cotyledon <--
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
// SPEEDBUMP
export function Speedbump() {
return (
<div class="panel last">
<div className="header">
an interlude
</div>
<div className="content file-view file-view-text speedbump">
<canvas
style="linear-gradient(45deg, #111318, #181f20)"
data-canvas="cotyledon"
>
</canvas>
<header>
<h1>cotyledon</h1>
</header>
<div id="captcha" style="display: none;">
<p style="max-width:480px">
please prove you're not a robot by selecting all of the images with
four-leaf clovers, until there are only regular clovers.
<noscript>
this will require javascript enabled on your computer to verify
the mouse clicks.
</noscript>
</p>
<div className="enter-container">
<div className="image-grid">
<button>
<img src="/captcha/1.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/2.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/3.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/4.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/5.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/6.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/7.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/8.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/9.jpeg" alt="a four-leaf clover" />
</button>
</div>
</div>
<div class="enter-container">
<button id="enter2">all done</button>
</div>
</div>
<div id="first">
<p>
this place is sacred, but dangerous. i have to keep visitors to an
absolute minimum; you'll get dust on all the artifacts.
</p>
<p>
by entering our museum, you agree not to use your camera. flash off
isn't enough; the bits and bytes are alergic even to a camera's
sensor
</p>
<p style="font-size:0.9rem;">
(in english: please do not store downloads after you're done viewing
them)
</p>
<div class="enter-container">
<button id="enter">break my boundaries</button>
</div>
</div>
</div>
</div>
);
}
// OPENING
export function Readme() {
return (
<div class="panel last">
<div className="header">
cotyledon
</div>
<div className="content file-view file-view-text">
<div style="max-width: 71ch;padding:3rem;font-family:rmo,monospace">
<p style="margin-top:0">
welcome to the archive. if this is your first time here, i recommend
starting in '<a href="/file/2017">2017</a>' and going
chronologically from there. however, there is truly no wrong way to
explore.
</p>
<p>
note that there is a blanket trigger warning for everything in this
archive: while there is nothing visually offensive, some portions of
the text and emotions conveyed through this may hit extremely hard.
you are warned.
</p>
<p>
all file dates are real. at least as real as i could figure out.
when i moved data across drives over my years, i accidentally had a
few points where i stamped over all the dates with the day that
moved the files. even fucked it up a final time in february 2025,
while in the process of unfucking things.
</p>
<p>
thankfully, my past self knew i'd want to assemble this kind of
site, and because of that they were crazy about storing the dates of
things inside of html, json/yaml files, and even in fucking
databases. i'm glad it was all stored though, but jeez what a nerd.
</p>
<p>
a few files were touched up for privacy, or otherwise re-encoded.
some of them i added extra metadata.
</p>
<p>
from the bottom of my heart: i hope you enjoy. it has been a
nightmare putting this all together. technically and emotionally
speaking. i'm glad we can put this all behind us, mark it as
completed, and get started with the good shit.
</p>
<p>
love,<br />clo
</p>
<br />
<p>
start here -&gt; <a href="/file/2017">2017</a>
</p>
</div>
</div>
</div>
);
}
// TRUE ENDING. Written in Apple Notes.
export function ForEveryone() {
return (
<>
<div class="for_everyone">
<p>today is my 21st birthday. april 30th, 2025.</p>
<p>it's been nearly six months starting hormones.</p>
<p>sometimes i feel great,</p>
<p>sometimes i get dysphoria.</p>
<p>with the walls around me gone</p>
<p>that shit hits way harder than it did before.</p>
<p>ugh..</p>
<p>i'm glad the pain i felt is now explained,</p>
<p>but now rendered in high definition.</p>
<p>the smallest strands of hair on my face and belly act</p>
<p>as sharpened nails to pierce my soul.</p>
<p></p>
<p>it's all a pathway to better days; the sun had risen.</p>
<p>one little step at a time for both of us.</p>
<p>today i quit my job. free falling, it feels so weird.</p>
<p>like sky diving.</p>
<p>the only thing i feel is cold wind.</p>
<p>the only thing i see is everything,</p>
<p>and it's beautiful.</p>
<p>i have a month of falling before the parachute activates,</p>
<p>gonna spend as much time of it on art as i can.</p>
<p>that was, after all, my life plan:</p>
<p>i wanted to make art, all the time,</p>
<p>for everyone.</p>
<p></p>
<p>then you see what happened</p>
<p>to the world and the internet.</p>
<p>i never really got to live through that golden age,</p>
<p>it probably sucked back then too.</p>
<p>but now the big sites definitely stopped being fun.</p>
<p>they slide their cold hands up my body</p>
<p>and feel me around. it's unwelcoming, and</p>
<p>inconsiderate to how sensitive my skin is.</p>
<p>i'm so fucking glad i broke up with YouTube</p>
<p>and their devilish friends.</p>
<p>my NAS is at 5 / 24 TB</p>
<p>and probably wont fill for the next decade.</p>
<p></p>
<p>it took 2 months for me to notice my body changed.</p>
<p>that day was really nice, but it hurt a lot.</p>
<p>a sharp, satisfying pain in my chest gave me life.</p>
<p>learned new instincts for my arms</p>
<p>so they'd stop poking my new shape.</p>
<p>when i look at my face</p>
<p>it's like a different person.</p>
<p>she was the same as before, but completely new.</p>
<p>something changed</p>
<p>or i'm now used to seeing what makes me smile.</p>
<p>regardless, whatever i see in the mirror, i smile.</p>
<p>and, i don't hear that old name much anymore</p>
<p>aside from nightmares. and you'll never repeat it, ok?</p>
<p>okay.</p>
<p></p>
<p>been playing 'new canaan' by 'bill wurtz' on loop</p>
<p>in the background.</p>
<p>it kinda just feels right.</p>
<p>especially when that verse near the end comes on.</p>
<p></p>
<p>more people have been allowed to visit me.</p>
<p>my apartment used to be just for me,</p>
<p>but the more i felt like a person</p>
<p>the more i felt like having others over.</p>
<p>still have to decorate and clean it a little,</p>
<p>but it isn't a job to do alone.</p>
<p>we dragged a giant a rug across the city one day,</p>
<p>and it felt was like anything was possible.</p>
<p>sometimes i have ten people visit in a day,</p>
<p>or sometimes i focus my little eyes on just one.</p>
<p>i never really know what i want to do</p>
<p>until the time actually comes.</p>
<p></p>
{/* FILIP */}
<p>i think about the times i was by the water with you.</p>
<p>the sun setting warmly, icy air fell on our shoulders.</p>
{/* NATALIE */}
<p>and how we walked up to the top of that hill,</p>
<p>you picked up and disposed a nail on the ground,</p>
<p>walking the city thru places i've never been.</p>
{/* BEN */}
<p>or hiking through the park talking about compilers,</p>
<p>tiring me out until i'd fall asleep in your arms.</p>
{/* ELENA */}
<p>and the way you held on to my hand as i woke up,</p>
<p>noticing how i was trying to hide nightmare's tears.</p>
<p></p>
{/* HIGH SCHOOL */}
<p>i remember we were yelling lyrics loudly,</p>
<p>out of key yet cheered on because it was fun.</p>
{/* ADVAITH/NATALIE */}
<p>and when we all toured the big corporate office,</p>
{/* AYU/HARRIS */}
<p>then snuck in to some startup's office after hours;</p>
<p>i don't remember what movie we watched.</p>
{/* COLLEGE, DAY 1 IN EV's ROOM */}
<p>i remember laying on the bunk bed,</p>
<p>while the rest played a card game.</p>
{/* MEGHAN/MORE */}
<p>with us all laying on the rug, staring at the TV</p>
<p>
as the ending twist to {/* SEVERANCE */ "that show"} was revealed.
</p>
<p></p>
<p>all the moments i cherish,</p>
<p>i love because it was always me.</p>
<p>i didn't have to pretend,</p>
<p>even if i didn't know who i was at the time.</p>
<p>you all were there. for me.</p>
<p></p>
<p>i don't want to pretend any more</p>
<p>i want to be myself. for everyone.</p>
<p></p>
<p>oh, the song ended. i thought it was on loop?</p>
<p>it's late... can hear the crickets...</p>
<p>and i can almost see the moon... mmmm...</p>
<p>...nah, too much light pollution.</p>
<p></p>
<p>one day. one day.</p>
<p></p>
<p class="normal">
before i go, i want to show the uncensored version of "journal about a
girl", because i can trust you at least. keep in mind, i think you're
one of the first people to ever see this.
</p>
</div>
<div class="for_everyone" style="max-width:80ch;">
<blockquote>
<p>journal - 2024-09-14</p>
<p>
been at HackMIT today on behalf of the company. it's fun. me and
zack were running around looking for people that might be good
hires. he had this magic arbitrary criteria to tell "oh this person
is probably cracked let's talk to them" and we go to the first one.
they were a nerd, perfect. they seemed to be extremely talented with
some extreme software projects.<br />
okay.. oof... its still clouding my mind<br />
i cant shake that feeling away
</p>
<p>hold on...</p>
<p>
at some point they open one of their profiles to navigate to some
code, and it displays for a couple of seconds: "pronouns: she/they".
i don't actually know anything about this person, but it was my
perception that she is trans. their appearance, physique, and age
felt similar to me, which tends makes people think you are male.
</p>
<p>
but... she was having fun being herself. being a legend of identity
and of her skill in computer science. winning the physics major.
making cool shit at the hackathon, and probably in life. my
perception of her was the exact essence of who i myself wanted to
be. i was jealous of her life.
</p>
<p>
i tried hard to avoid a breakdown. success. but i was feeling
distant. the next hour or so was disorienting, trying not to think
about it too hard. i think there was one possibly interesting person
we talked to. i don't remember any of the other conversations. they
were not important. but i couldn't think through them regardless.
</p>
<p>
later, i decided to read some of her code. i either have a huge
dislike towards the Rust programming language and/or it was not high
quality code. welp, so just is a person studying. my perception was
just a perception, inaccurate but impacting. i know i need to become
myself, whoever that is. otherwise, i'm just going to feel this shit
at higher doses. i think about this every day, and the amount of
time i feel being consumed by these problems only grows.
</p>
<p>
getting through it all is a lonely feeling. not because no one is
around, but because i am isolated emotionally. i know other people
hit these feelings, but we all are too afraid to speak up, and it's
all lonely.
</p>
<p>
waiting on a reply from someone from healthcare. it'll be slow, but
it will be okay.
</p>
</blockquote>
</div>
<div class="for_everyone">
<p class="normal">
i've learned that even when i feel alone, it doesn't have to feel
lonely. i know it's hard, dear. i know it's scary. but i promise it's
possible. we're all in this together. struggling together. sacrificing
together. we dedicate our lives to each you, and our art for everyone.
</p>
<p
class="normal"
style="font-size:2rem;color:#9C91FF;font-family:times,serif;font-style:italic"
>
and then we knew,<br />
just like paper airplanes: that we could fly...
</p>
<br />
<p class="normal">
<a
href="/"
style="text-decoration:underline;text-underline-offset:0.2em;"
>
fin.
</a>
</p>
</div>
</>
);
}
ForEveryone.class = "text";

165
src/file-viewer/ffmpeg.ts Normal file
View file

@ -0,0 +1,165 @@
// Utilities for spawning ffmpeg and consuming its output as a `Progress`
// A headless parser is available with `Parse`
export type Line =
| { kind: "ignore" }
| { kind: "log"; level: "info" | "warn" | "error"; message: string }
| {
kind: "progress";
frame: number;
totalFrames: number;
speed: string | null;
fps: number | null;
rest: Record<string, string>;
};
export const defaultExtraOptions = [
"-hide_banner",
"-stats",
];
export interface SpawnOptions {
args: string[];
title: string;
ffmpeg?: string;
progress?: Progress;
cwd: string;
}
export async function spawn(options: SpawnOptions) {
const { ffmpeg = "ffmpeg", args, title, cwd } = options;
const proc = child_process.spawn(ffmpeg, [...defaultExtraOptions, ...args], {
stdio: ["ignore", "inherit", "pipe"],
env: { ...process.env, SVT_LOG: "2" },
cwd,
});
const parser = new Parse();
const bar = options.progress ?? new Progress({ text: title });
let running = true;
const splitter = readline.createInterface({ input: proc.stderr });
splitter.on("line", (line) => {
const result = parser.onLine(line);
if (result.kind === "ignore") {
return;
} else if (result.kind === "log") {
console[result.level](result.message);
} else if (result.kind === "progress") {
if (!running) return;
const { frame, totalFrames, fps, speed } = result;
bar.value = frame;
bar.total = totalFrames;
const extras = [
`${fps} fps`,
speed,
parser.hlsFile,
].filter(Boolean).join(", ");
bar.text = `${title} ${frame}/${totalFrames} ${
extras.length > 0 ? `(${extras})` : ""
}`;
} else result satisfies never;
});
const [code, signal] = await events.once(proc, "close");
running = false;
if (code !== 0) {
const fmt = code ? `code ${code}` : `signal ${signal}`;
const e: any = new Error(`ffmpeg failed with ${fmt}`);
e.args = [ffmpeg, ...args].join(" ");
e.code = code;
e.signal = signal;
bar.error(e.message);
return e;
}
bar.success(title);
}
export class Parse {
parsingStart = true;
inIndentedIgnore: null | "out" | "inp" | "other" = null;
durationTime = 0;
targetFps: number | null = null;
hlsFile: string | null = null;
durationFrames = 0;
onLine(line: string): Line {
line = line.trimEnd();
if (/^frame=/.test(line)) {
if (this.parsingStart) {
this.parsingStart = false;
this.durationFrames = Math.ceil(
(this.targetFps ?? 25) * this.durationTime,
);
}
const parts = Object.fromEntries(
[...line.matchAll(/\b([a-z0-9]+)=\s*([^ ]+)(?= |$)/ig)].map((
[, k, v],
) => [k, v]),
);
const { frame, fps, speed, ...rest } = parts;
return {
kind: "progress",
frame: Number(frame),
totalFrames: this.durationFrames,
fps: Number(fps),
speed,
rest,
};
}
if (this.parsingStart) {
if (this.inIndentedIgnore) {
if (line.startsWith(" ") || line.startsWith("\t")) {
line = line.trimStart();
if (this.inIndentedIgnore === "inp") {
const match = line.match(/^Duration: (\d+):(\d+):(\d+\.\d+)/);
if (match) {
const [h, m, s] = match.slice(1).map((x) => Number(x));
this.durationTime = Math.max(
this.durationTime,
h * 60 * 60 + m * 60 + s,
);
}
if (!this.targetFps) {
const match = line.match(/^Stream.*, (\d+) fps/);
if (match) this.targetFps = Number(match[1]);
}
}
return { kind: "ignore" };
}
this.inIndentedIgnore = null;
}
if (line === "Press [q] to stop, [?] for help") {
return { kind: "ignore" };
}
if (line === "Stream mapping:") {
this.inIndentedIgnore = "other";
return { kind: "ignore" };
}
if (line.startsWith("Output #") || line.startsWith("Input #")) {
this.inIndentedIgnore = line.slice(0, 3).toLowerCase() as "inp" | "out";
return { kind: "ignore" };
}
}
const hlsMatch = line.match(/^\[hls @ .*Opening '(.+)' for writing/);
if (hlsMatch) {
if (!hlsMatch[1].endsWith(".tmp")) {
this.hlsFile = path.basename(hlsMatch[1]);
}
return { kind: "ignore" };
}
let level: Extract<Line, { kind: "log" }>["level"] = "info";
if (line.toLowerCase().includes("err")) level = "error";
else if (line.toLowerCase().includes("warn")) level = "warn";
return { kind: "log", level, message: line };
}
}
import * as child_process from "node:child_process";
import * as readline from "node:readline";
import * as process from "node:process";
import events from "node:events";
import * as path from "node:path";
import { Progress } from "@paperclover/console/Progress";

288
src/file-viewer/format.ts Normal file
View file

@ -0,0 +1,288 @@
const findDomain = "paperclover.net";
export function formatSize(bytes: number) {
if (bytes < 1024) return `${bytes} bytes`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
if (bytes < 1024 * 1024 * 1024) {
return `${(bytes / 1024 / 1024).toFixed(1)} MB`;
}
return `${(bytes / 1024 / 1024 / 1024).toFixed(1)} GB`;
}
// export function formatDateDefined(date: Date) {
// // YYYY-MM-DD, format in PST timezone
// return date.toLocaleDateString("sv", { timeZone: "America/Los_Angeles" });
// }
//
// export function formatShortDate(date: Date) {
// // YY-MM-DD, format in PST timezone
// return formatDate(date).slice(2);
// }
export function formatDuration(seconds: number) {
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
return `${minutes}:${remainingSeconds.toString().padStart(2, "0")}`;
}
export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`;
}
export function escapeUri(uri: string) {
return encodeURIComponent(uri)
.replace(/%2F/gi, "/")
.replace(/%3A/gi, ":")
.replace(/%2B/gi, "+")
.replace(/%40/gi, "@")
.replace(/%2D/gi, "-")
.replace(/%5F/gi, "_")
.replace(/%2E/gi, ".")
.replace(/%2C/gi, ",");
}
// Returns escaped HTML
// Features:
// - autolink detection
// - via \bpaperclover.net/[a-zA-Z0-9_\.+-]+
// - via \b/file/[a-zA-Z0-9_\.+-]+
// - via \bhttps://...
// - via name of a sibling file's basename
// - reformat (c) into ©
//
// This formatter was written with AI. Then manually fixed since AI does not work.
export function highlightLinksInTextView(
text: string,
siblingFiles: MediaFile[] = [],
) {
const siblingLookup = Object.fromEntries(
siblingFiles
.filter((f) => f.basename !== "readme.txt")
.map((f) => [f.basename, f]),
);
// First escape the HTML to prevent XSS
let processedText = escapeHtml(text);
// Replace (c) with ©
processedText = processedText.replace(/\(c\)/gi, "©");
// Process all URL patterns in a single pass to avoid nested links
// This regex matches:
// 1. https:// or http:// URLs
// 2. domain URLs without protocol (e.g., paperclover.net/path)
// 3. /file/ URLs
// 4. ./ relative paths
// We'll use a function to determine what kind of URL it is and format accordingly
const urlRegex = new RegExp(
"(" +
// Group 1: https:// or http:// URLs
"\\bhttps?:\\/\\/[a-zA-Z0-9_\\.\\-]+\\.[a-zA-Z0-9_\\.\\-]+[a-zA-Z0-9_\\.\\-\\/\\?=&%+#]*" +
"|" +
// Group 2: domain URLs without protocol
findDomain +
"\\/\\/[a-zA-Z0-9_\\.\\+\\-]+" +
"|" +
// Group 3: /file/ URLs
"\\/file\\/[a-zA-Z0-9_\\.\\+\\-\\/]+" +
")\\b" +
"|" +
// Group 4: ./ relative paths (not word-bounded)
"(?<=\\s|^)\\.\\/[\\w\\-\\.]+",
"g",
);
processedText = processedText.replace(urlRegex, (match: string) => {
// Case 1: https:// or http:// URLs
if (match.startsWith("http")) {
if (match.includes(findDomain)) {
return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return `<a href="${
match.replace(/\/\/+/g, "/")
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
}
// Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) {
return `<a href="${
match.replace(findDomain + "/", "/").replace(/\/\/+/g, "/")
}">${match}</a>`;
}
// Case 3: /file/ URLs
if (match.startsWith("/file/")) {
return `<a href="${match}">${match}</a>`;
}
// Case 4: ./ relative paths
if (match.startsWith("./")) {
const filename = match.substring(2);
const siblingFile = siblingFiles.find((f) => f.basename === filename);
if (siblingFile) {
return `<a href="/file/${siblingFile.path}">${match}</a>`;
}
if (siblingFiles.length > 0) {
const currentDir = siblingFiles[0].path
.split("/")
.slice(0, -1)
.join("/");
return `<a href="/file/${currentDir}/${filename}">${match}</a>`;
}
}
return match;
});
// Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
);
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/);
for (let i = 0; i < parts.length; i += 2) {
if (i < parts.length) {
parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match];
if (file) {
return `<a href="/file/${
file.path.replace(/^\//, "").replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return match;
});
}
}
processedText = parts.join("");
}
return processedText;
}
export function highlightConvo(text: string) {
text = text.replace(/^#mode=convo\n/, "");
const lines = text.split("\n");
const paras: { speaker: string | null; lines: string[] }[] = [];
let currentPara: string[] = [];
let currentSpeaker: string | null = null;
let firstSpeaker = null;
const speakers: Record<string, string> = {};
const getSpeaker = (s: string) => {
if (s[1] === " " && speakers[s[0]]) {
return s[0];
}
return null;
};
for (const line of lines) {
let trimmed = line.trim();
if (line.startsWith("#")) {
// parse #X=Y
const [_, speaker, color] = trimmed.match(/^#(.)=(.*)$/)!;
speakers[speaker] = color;
continue;
}
if (trimmed === "") {
continue;
}
let speaker = getSpeaker(trimmed);
if (speaker) {
trimmed = trimmed.substring(speaker.length).trimStart();
speaker = speakers[speaker];
} else {
speaker = "me";
}
trimmed = trimmed.replace(
/\[IMG:(\/file\/[^\]]+)\]/g,
'<img src="$1" alt="attachment" class="convo-img" width="300" />',
);
if (trimmed === "---" && speaker === "me") {
trimmed = "<hr/>";
}
if (speaker === currentSpeaker) {
currentPara.push(trimmed);
} else {
if (currentPara.length > 0) {
paras.push({
speaker: currentSpeaker,
lines: currentPara,
});
currentPara = [];
}
currentPara = [trimmed];
currentSpeaker = speaker;
firstSpeaker ??= speaker;
}
}
if (currentPara.length > 0) {
paras.push({
speaker: currentSpeaker,
lines: currentPara,
});
}
return paras
.map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`)
.join("\n")
}</div>`;
})
.join("\n");
}
export function highlightHashComments(text: string) {
const lines = text.split("\n");
return lines
.map((line) => {
if (line.startsWith("#")) {
return `<div style="color: var(--primary);">${line}</div>`;
}
return `<div>${line.trimEnd() || "&nbsp;"}</div>`;
})
.join("\n");
}
const unknownDate = new Date("1970-01-03");
const unknownDateWithKnownYear = new Date("1970-02-20");
export function formatDate(dateTime: Date) {
return dateTime < unknownDateWithKnownYear
? (
dateTime < unknownDate
? (
"??.??.??"
)
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
)
: (
`${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate()
.toString()
.padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`
);
}
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";
import { escapeHtml } from "#ssr";

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,268 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>patch</string>
<string>diff</string>
<string>rej</string>
</array>
<key>firstLineMatch</key>
<string>(?x)^
(===\ modified\ file
|==== \s* // .+ \s - \s .+ \s+ ====
|Index:\
|---\ [^%\n]
|\*\*\*.*\d{4}\s*$
|\d+(,\d+)* (a|d|c) \d+(,\d+)* $
|diff\ --git\
|commit\ [0-9a-f]{40}$
)</string>
<key>keyEquivalent</key>
<string>^~D</string>
<key>name</key>
<string>Diff</string>
<key>patterns</key>
<array>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.separator.diff</string>
</dict>
</dict>
<key>match</key>
<string>^((\*{15})|(={67})|(-{3}))$\n?</string>
<key>name</key>
<string>meta.separator.diff</string>
</dict>
<dict>
<key>match</key>
<string>^\d+(,\d+)*(a|d|c)\d+(,\d+)*$\n?</string>
<key>name</key>
<string>meta.diff.range.normal</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>meta.toc-list.line-number.diff</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(@@)\s*(.+?)\s*(@@.*)($\n?)?</string>
<key>name</key>
<string>meta.diff.range.unified</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>7</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((\-{3}) .+ (\-{4}))|((\*{3}) .+ (\*{4})))$\n?</string>
<key>name</key>
<string>meta.diff.range.context</string>
</dict>
<dict>
<key>match</key>
<string>^diff --git a/.*$\n?</string>
<key>name</key>
<string>meta.diff.header.git</string>
</dict>
<dict>
<key>match</key>
<string>^diff (-|\S+\s+\S+).*$\n?</string>
<key>name</key>
<string>meta.diff.header.command</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
<key>7</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
</dict>
<key>match</key>
<string>(^(((-{3}) .+)|((\*{3}) .+))$\n?|^(={4}) .+(?= - ))</string>
<key>name</key>
<string>meta.diff.header.from-file</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
</dict>
<key>match</key>
<string>(^(\+{3}) .+$\n?| (-) .* (={4})$\n?)</string>
<key>name</key>
<string>meta.diff.header.to-file</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.inserted.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.inserted.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((&gt;)( .*)?)|((\+).*))$\n?</string>
<key>name</key>
<string>markup.inserted.diff</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.changed.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(!).*$\n?</string>
<key>name</key>
<string>markup.changed.diff</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.deleted.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.deleted.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((&lt;)( .*)?)|((-).*))$\n?</string>
<key>name</key>
<string>markup.deleted.diff</string>
</dict>
<dict>
<key>begin</key>
<string>^(#)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.diff</string>
</dict>
</dict>
<key>comment</key>
<string>Git produces unified diffs with embedded comments"</string>
<key>end</key>
<string>\n</string>
<key>name</key>
<string>comment.line.number-sign.diff</string>
</dict>
<dict>
<key>match</key>
<string>^index [0-9a-f]{7,40}\.\.[0-9a-f]{7,40}.*$\n?</string>
<key>name</key>
<string>meta.diff.index.git</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.diff</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>meta.toc-list.file-name.diff</string>
</dict>
</dict>
<key>match</key>
<string>^Index(:) (.+)$\n?</string>
<key>name</key>
<string>meta.diff.index</string>
</dict>
<dict>
<key>match</key>
<string>^Only in .*: .*$\n?</string>
<key>name</key>
<string>meta.diff.only-in</string>
</dict>
</array>
<key>scopeName</key>
<string>source.diff</string>
<key>uuid</key>
<string>7E848FF4-708E-11D9-97B4-0011242E4184</string>
</dict>
</plist>

View file

@ -0,0 +1,169 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>uuid</key>
<string>E07EC438-7B75-4437-8AA1-DA94C1E6EACC</string>
<key>patterns</key>
<array>
<dict>
<key>name</key>
<string>keyword.command.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:append|assoc|at|attrib|break|cacls|cd|chcp|chdir|chkdsk|chkntfs|cls|cmd|color|comp|compact|convert|copy|date|del|dir|diskcomp|diskcopy|doskey|echo|endlocal|erase|fc|find|findstr|format|ftype|graftabl|help|keyb|label|md|mkdir|mode|more|move|path|pause|popd|print|prompt|pushd|rd|recover|ren|rename|replace|restore|rmdir|set|setlocal|shift|sort|start|subst|time|title|tree|type|ver|verify|vol|xcopy)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.statement.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:goto|call|exit)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.conditional.if.dosbatch</string>
<key>match</key>
<string>\b(?i)if\s+((not)\s+)(exist|defined|errorlevel|cmdextversion)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.conditional.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:if|else)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.repeat.dosbatch</string>
<key>match</key>
<string>\b(?i)for\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.dosbatch</string>
<key>match</key>
<string>\b(?:EQU|NEQ|LSS|LEQ|GTR|GEQ)\b</string>
</dict>
<dict>
<key>name</key>
<string>comment.line.rem.dosbatch</string>
<key>match</key>
<string>\b(?i)rem(?:$|\s.*$)</string>
</dict>
<dict>
<key>name</key>
<string>comment.line.colons.dosbatch</string>
<key>match</key>
<string>\s*:\s*:.*$</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.parameter.function.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.parameter.function.dosbatch</string>
<key>match</key>
<string>(?i)(%)(~(?:f|d|p|n|x|s|a|t|z|\$[^:]*:)*)?\d</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.parameter.loop.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.parameter.loop.dosbatch</string>
<key>match</key>
<string>(?i)(%%)(~(?:f|d|p|n|x|s|a|t|z|\$[^:]*:)*)?[a-z]</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.parsetime.begin.shell</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>variable.other.parsetime.end.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.other.parsetime.dosbatch</string>
<key>match</key>
<string>(%)[^%]+(%)</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.delayed.begin.shell</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>variable.other.delayed.end.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.other.delayed.dosbatch</string>
<key>match</key>
<string>(!)[^!]+(!)</string>
</dict>
<dict>
<key>begin</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.shell</string>
</dict>
</dict>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.dosbatch</string>
<key>end</key>
<string>"|$</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.pipe.dosbatch</string>
<key>match</key>
<string>[|]</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.redirect.shell</string>
<key>match</key>
<string>&amp;&gt;|\d*&gt;&amp;\d*|\d*(&gt;&gt;|&gt;|&lt;)|\d*&lt;&amp;|\d*&lt;&gt;</string>
</dict>
</array>
<key>name</key>
<string>Batch File</string>
<key>scopeName</key>
<string>source.dosbatch</string>
<key>fileTypes</key>
<array>
<string>bat</string>
</array>
</dict>
</plist>

View file

@ -0,0 +1,386 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>json</string>
<string>sublime-settings</string>
<string>sublime-menu</string>
<string>sublime-keymap</string>
<string>sublime-mousemap</string>
<string>sublime-theme</string>
<string>sublime-build</string>
<string>sublime-project</string>
<string>sublime-completions</string>
</array>
<key>foldingStartMarker</key>
<string>(?x) # turn on extended mode
^ # a line beginning with
\s* # some optional space
[{\[] # the start of an object or array
(?! # but not followed by
.* # whatever
[}\]] # and the close of an object or array
,? # an optional comma
\s* # some optional space
$ # at the end of the line
)
| # ...or...
[{\[] # the start of an object or array
\s* # some optional space
$ # at the end of the line</string>
<key>foldingStopMarker</key>
<string>(?x) # turn on extended mode
^ # a line beginning with
\s* # some optional space
[}\]] # and the close of an object or array</string>
<key>keyEquivalent</key>
<string>^~J</string>
<key>name</key>
<string>JSON (Javascript Next)</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#value</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>array</key>
<dict>
<key>begin</key>
<string>\[</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>\]</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.end.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.array.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#value</string>
</dict>
<dict>
<key>match</key>
<string>,</string>
<key>name</key>
<string>punctuation.separator.array.json</string>
</dict>
<dict>
<key>match</key>
<string>[^\s\]]</string>
<key>name</key>
<string>invalid.illegal.expected-array-separator.json</string>
</dict>
</array>
</dict>
<key>comments</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>/\*\*(?!/)</string>
<key>captures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>end</key>
<string>\*/</string>
<key>name</key>
<string>comment.block.documentation.json</string>
</dict>
<dict>
<key>begin</key>
<string>/\*</string>
<key>captures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>end</key>
<string>\*/</string>
<key>name</key>
<string>comment.block.json</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>match</key>
<string>(//).*$\n?</string>
<key>name</key>
<string>comment.line.double-slash.js</string>
</dict>
</array>
</dict>
<key>constant</key>
<dict>
<key>match</key>
<string>\b(?:true|false|null)\b</string>
<key>name</key>
<string>constant.language.json</string>
</dict>
<key>number</key>
<dict>
<key>match</key>
<string>(?x) # turn on extended mode
-? # an optional minus
(?:
0 # a zero
| # ...or...
[1-9] # a 1-9 character
\d* # followed by zero or more digits
)
(?:
(?:
\. # a period
\d+ # followed by one or more digits
)?
(?:
[eE] # an e character
[+-]? # followed by an option +/-
\d+ # followed by one or more digits
)? # make exponent optional
)? # make decimal portion optional</string>
<key>name</key>
<string>constant.numeric.json</string>
</dict>
<key>object</key>
<dict>
<key>begin</key>
<string>\{</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.dictionary.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>\}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.dictionary.end.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.dictionary.json</string>
<key>patterns</key>
<array>
<dict>
<key>comment</key>
<string>the JSON object key</string>
<key>include</key>
<string>#objectkey</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>begin</key>
<string>:</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.dictionary.key-value.json</string>
</dict>
</dict>
<key>end</key>
<string>(,)|(?=\})</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.separator.dictionary.pair.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.dictionary.value.json</string>
<key>patterns</key>
<array>
<dict>
<key>comment</key>
<string>the JSON object value</string>
<key>include</key>
<string>#value</string>
</dict>
<dict>
<key>match</key>
<string>[^\s,]</string>
<key>name</key>
<string>invalid.illegal.expected-dictionary-separator.json</string>
</dict>
</array>
</dict>
<dict>
<key>match</key>
<string>[^\s\}]</string>
<key>name</key>
<string>invalid.illegal.expected-dictionary-separator.json</string>
</dict>
</array>
</dict>
<key>string</key>
<dict>
<key>begin</key>
<string>"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.json</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#stringcontent</string>
</dict>
</array>
</dict>
<key>objectkey</key>
<dict>
<key>begin</key>
<string>"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.support.type.property-name.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.support.type.property-name.end.json</string>
</dict>
</dict>
<key>name</key>
<string>string.json support.type.property-name.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#stringcontent</string>
</dict>
</array>
</dict>
<key>stringcontent</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>(?x) # turn on extended mode
\\ # a literal backslash
(?: # ...followed by...
["\\/bfnrt] # one of these characters
| # ...or...
u # a u
[0-9a-fA-F]{4}) # and four hex digits</string>
<key>name</key>
<string>constant.character.escape.json</string>
</dict>
<dict>
<key>match</key>
<string>\\.</string>
<key>name</key>
<string>invalid.illegal.unrecognized-string-escape.json</string>
</dict>
</array>
</dict>
<key>value</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#constant</string>
</dict>
<dict>
<key>include</key>
<string>#number</string>
</dict>
<dict>
<key>include</key>
<string>#string</string>
</dict>
<dict>
<key>include</key>
<string>#array</string>
</dict>
<dict>
<key>include</key>
<string>#object</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
</array>
</dict>
</dict>
<key>scopeName</key>
<string>source.json</string>
<key>uuid</key>
<string>8f97457b-516e-48ce-83c7-08ae12fb327a</string>
</dict>
</plist>

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,736 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>toml</string>
</array>
<key>keyEquivalent</key>
<string>^~T</string>
<key>name</key>
<string>TOML</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#groups</string>
</dict>
<dict>
<key>include</key>
<string>#key_pair</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>comments</key>
<dict>
<key>begin</key>
<string>(^[ \t]+)?(?=#)</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.whitespace.comment.leading.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?!\G)</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>#</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.toml</string>
</dict>
</dict>
<key>end</key>
<string>\n</string>
<key>name</key>
<string>comment.line.number-sign.toml</string>
</dict>
</array>
</dict>
<key>groups</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>[^\s.]+</string>
<key>name</key>
<string>entity.name.section.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
</dict>
<key>match</key>
<string>^\s*(\[)([^\[\]]*)(\])</string>
<key>name</key>
<string>meta.group.toml</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>[^\s.]+</string>
<key>name</key>
<string>entity.name.section.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
</dict>
<key>match</key>
<string>^\s*(\[\[)([^\[\]]*)(\]\])</string>
<key>name</key>
<string>meta.group.double.toml</string>
</dict>
</array>
</dict>
<key>invalid</key>
<dict>
<key>match</key>
<string>\S+(\s*(?=\S))?</string>
<key>name</key>
<string>invalid.illegal.not-allowed-here.toml</string>
</dict>
<key>key_pair</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>([A-Za-z0-9_-]+)\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>((")(.*?)("))\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>3</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>"</string>
<key>name</key>
<string>invalid.illegal.not-allowed-here.toml</string>
</dict>
</array>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>((')([^']*)('))\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>(?x)
(
(
(?:
[A-Za-z0-9_-]+ # Bare key
| " (?:[^"\\]|\\.)* " # Double quoted key
| ' [^']* ' # Sindle quoted key
)
(?:
\s* \. \s* # Dot
| (?= \s* =) # or look-ahead for equals
)
){2,} # Ensure at least one dot
)
\s*(=)\s*
</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\.</string>
<key>name</key>
<string>punctuation.separator.variable.toml</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
</dict>
<key>match</key>
<string>(")((?:[^"\\]|\\.)*)(")</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
</dict>
<key>match</key>
<string>(')[^']*(')</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>comment</key>
<string>Dotted key</string>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
</array>
</dict>
<key>primatives</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>\G"""</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>"{3,5}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.triple.double.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\\n]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G'''</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>'{3,5}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.triple.single.toml</string>
</dict>
<dict>
<key>begin</key>
<string>\G'</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>'</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.single.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(?x)
[0-9]{4}
-
(0[1-9]|1[012])
-
(?!00|3[2-9])[0-3][0-9]
(
[Tt ]
(?!2[5-9])[0-2][0-9]
:
[0-5][0-9]
:
(?!6[1-9])[0-6][0-9]
(\.[0-9]+)?
(
Z
| [+-](?!2[5-9])[0-2][0-9]:[0-5][0-9]
)?
)?
</string>
<key>name</key>
<string>constant.other.date.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(?x)
(?!2[5-9])[0-2][0-9]
:
[0-5][0-9]
:
(?!6[1-9])[0-6][0-9]
(\.[0-9]+)?
</string>
<key>name</key>
<string>constant.other.time.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(true|false)</string>
<key>name</key>
<string>constant.language.boolean.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0x\h(\h|_\h)*</string>
<key>name</key>
<string>constant.numeric.hex.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0o[0-7]([0-7]|_[0-7])*</string>
<key>name</key>
<string>constant.numeric.octal.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0b[01]([01]|_[01])*</string>
<key>name</key>
<string>constant.numeric.binary.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G[+-]?(inf|nan)</string>
<key>name</key>
<string>constant.numeric.toml</string>
</dict>
<dict>
<key>match</key>
<string>(?x)
\G
(
[+-]?
(
0
| ([1-9](([0-9]|_[0-9])+)?)
)
)
(?=[.eE])
(
\.
([0-9](([0-9]|_[0-9])+)?)
)?
(
[eE]
([+-]?[0-9](([0-9]|_[0-9])+)?)
)?
</string>
<key>name</key>
<string>constant.numeric.float.toml</string>
</dict>
<dict>
<key>match</key>
<string>(?x)
\G
(
[+-]?
(
0
| ([1-9](([0-9]|_[0-9])+)?)
)
)
</string>
<key>name</key>
<string>constant.numeric.integer.toml</string>
</dict>
<dict>
<key>begin</key>
<string>\G\[</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>\]</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>meta.array.toml</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>(?=["'']|[+-]?[0-9]|[+-]?(inf|nan)|true|false|\[|\{)</string>
<key>end</key>
<string>,|(?=])</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.array.toml</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G\{</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.inline-table.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>\}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.inline-table.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>meta.inline-table.toml</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>(?=\S)</string>
<key>end</key>
<string>,|(?=})</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.inline-table.toml</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#key_pair</string>
</dict>
</array>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
</array>
</dict>
</array>
</dict>
</dict>
<key>scopeName</key>
<string>source.toml</string>
<key>uuid</key>
<string>7DEF2EDB-5BB7-4DD2-9E78-3541A26B7923</string>
</dict>
</plist>

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,573 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>xml</string>
<string>xsd</string>
<string>tld</string>
<string>jsp</string>
<string>pt</string>
<string>cpt</string>
<string>dtml</string>
<string>rss</string>
<string>opml</string>
</array>
<key>keyEquivalent</key>
<string>^~X</string>
<key>name</key>
<string>XML</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>(&lt;\?)\s*([-_a-zA-Z0-9]+)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>entity.name.tag.xml</string>
</dict>
</dict>
<key>end</key>
<string>(\?&gt;)</string>
<key>name</key>
<string>meta.tag.metadata.processing.xml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string> ([a-zA-Z-]+)</string>
<key>name</key>
<string>entity.other.attribute-name.xml</string>
</dict>
<dict>
<key>include</key>
<string>#doublequotedString</string>
</dict>
<dict>
<key>include</key>
<string>#singlequotedString</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>(&lt;!)(DOCTYPE)\s+([:a-zA-Z_][:a-zA-Z0-9_.-]*)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>entity.name.tag.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.other.attribute-name.documentroot.xml</string>
</dict>
</dict>
<key>end</key>
<string>\s*(&gt;)</string>
<key>name</key>
<string>meta.tag.metadata.doctype.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#internalSubset</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>&lt;[!%]--</string>
<key>captures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.xml</string>
</dict>
</dict>
<key>end</key>
<string>--%?&gt;</string>
<key>name</key>
<string>comment.block.xml</string>
</dict>
<dict>
<key>begin</key>
<string>(&lt;)((?:([-_a-zA-Z0-9]+)((:)))?([-_a-zA-Z0-9:]+))(?=(\s[^&gt;]*)?&gt;&lt;/\2&gt;)</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.name.tag.namespace.xml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>entity.name.tag.xml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.namespace.xml</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>entity.name.tag.localname.xml</string>
</dict>
</dict>
<key>end</key>
<string>(&gt;(&lt;))/(?:([-_a-zA-Z0-9]+)((:)))?([-_a-zA-Z0-9:]+)(&gt;)</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>meta.scope.between-tag-pair.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.name.tag.namespace.xml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>entity.name.tag.xml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.namespace.xml</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>entity.name.tag.localname.xml</string>
</dict>
<key>7</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
</dict>
<key>name</key>
<string>meta.tag.no-content.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#tagStuff</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>(&lt;/?)(?:([-_a-zA-Z0-9]+)((:)))?([-_a-zA-Z0-9:]+)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>entity.name.tag.namespace.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.name.tag.xml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.separator.namespace.xml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>entity.name.tag.localname.xml</string>
</dict>
</dict>
<key>end</key>
<string>(/?&gt;)</string>
<key>name</key>
<string>meta.tag.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#tagStuff</string>
</dict>
</array>
</dict>
<dict>
<key>include</key>
<string>#entity</string>
</dict>
<dict>
<key>include</key>
<string>#bare-ampersand</string>
</dict>
<dict>
<key>begin</key>
<string>&lt;%@</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.section.embedded.begin.xml</string>
</dict>
</dict>
<key>end</key>
<string>%&gt;</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.section.embedded.end.xml</string>
</dict>
</dict>
<key>name</key>
<string>source.java-props.embedded.xml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>page|include|taglib</string>
<key>name</key>
<string>keyword.other.page-props.xml</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>&lt;%[!=]?(?!--)</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.section.embedded.begin.xml</string>
</dict>
</dict>
<key>end</key>
<string>(?!--)%&gt;</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.section.embedded.end.xml</string>
</dict>
</dict>
<key>name</key>
<string>source.java.embedded.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>source.java</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>&lt;!\[CDATA\[</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.xml</string>
</dict>
</dict>
<key>end</key>
<string>]]&gt;</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.xml</string>
</dict>
</dict>
<key>name</key>
<string>string.unquoted.cdata.xml</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>EntityDecl</key>
<dict>
<key>begin</key>
<string>(&lt;!)(ENTITY)\s+(%\s+)?([:a-zA-Z_][:a-zA-Z0-9_.-]*)(\s+(?:SYSTEM|PUBLIC)\s+)?</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.tag.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>keyword.other.entity.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.entity.xml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>variable.language.entity.xml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>keyword.other.entitytype.xml</string>
</dict>
</dict>
<key>end</key>
<string>(&gt;)</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#doublequotedString</string>
</dict>
<dict>
<key>include</key>
<string>#singlequotedString</string>
</dict>
</array>
</dict>
<key>bare-ampersand</key>
<dict>
<key>match</key>
<string>&amp;</string>
<key>name</key>
<string>invalid.illegal.bad-ampersand.xml</string>
</dict>
<key>doublequotedString</key>
<dict>
<key>begin</key>
<string>"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.xml</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.xml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#entity</string>
</dict>
<dict>
<key>include</key>
<string>#bare-ampersand</string>
</dict>
</array>
</dict>
<key>entity</key>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.constant.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.constant.xml</string>
</dict>
</dict>
<key>match</key>
<string>(&amp;)([:a-zA-Z_][:a-zA-Z0-9_.-]*|#[0-9]+|#x[0-9a-fA-F]+)(;)</string>
<key>name</key>
<string>constant.character.entity.xml</string>
</dict>
<key>internalSubset</key>
<dict>
<key>begin</key>
<string>(\[)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.constant.xml</string>
</dict>
</dict>
<key>end</key>
<string>(\])</string>
<key>name</key>
<string>meta.internalsubset.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#EntityDecl</string>
</dict>
<dict>
<key>include</key>
<string>#parameterEntity</string>
</dict>
</array>
</dict>
<key>parameterEntity</key>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.constant.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.constant.xml</string>
</dict>
</dict>
<key>match</key>
<string>(%)([:a-zA-Z_][:a-zA-Z0-9_.-]*)(;)</string>
<key>name</key>
<string>constant.character.parameter-entity.xml</string>
</dict>
<key>singlequotedString</key>
<dict>
<key>begin</key>
<string>'</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.xml</string>
</dict>
</dict>
<key>end</key>
<string>'</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.xml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.single.xml</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#entity</string>
</dict>
<dict>
<key>include</key>
<string>#bare-ampersand</string>
</dict>
</array>
</dict>
<key>tagStuff</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>entity.other.attribute-name.namespace.xml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>entity.other.attribute-name.xml</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.separator.namespace.xml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>entity.other.attribute-name.localname.xml</string>
</dict>
</dict>
<key>match</key>
<string> (?:([-_a-zA-Z0-9]+)((:)))?([-_a-zA-Z0-9]+)=</string>
</dict>
<dict>
<key>include</key>
<string>#doublequotedString</string>
</dict>
<dict>
<key>include</key>
<string>#singlequotedString</string>
</dict>
</array>
</dict>
</dict>
<key>scopeName</key>
<string>text.xml</string>
<key>uuid</key>
<string>D3C4E6DA-6B1C-11D9-8CC2-000D93589AF6</string>
</dict>
</plist>

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,846 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>zig</string>
</array>
<key>keyEquivalent</key>
<string>^~Z</string>
<key>name</key>
<string>Zig</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>block</key>
<dict>
<key>begin</key>
<string>([a-zA-Z_][\w.]*|@\".+\")?\s*(\{)</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.section.braces.begin.zig</string>
</dict>
</dict>
<key>end</key>
<string>(\})</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.section.braces.end.zig</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
</array>
</dict>
<key>character_escapes</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\n</string>
<key>name</key>
<string>constant.character.escape.newline.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\r</string>
<key>name</key>
<string>constant.character.escape.carrigereturn.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\t</string>
<key>name</key>
<string>constant.character.escape.tabulator.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\\\</string>
<key>name</key>
<string>constant.character.escape.backslash.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\'</string>
<key>name</key>
<string>constant.character.escape.single-quote.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\\"</string>
<key>name</key>
<string>constant.character.escape.double-quote.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\x[a-fA-F\d]{2}</string>
<key>name</key>
<string>constant.character.escape.hexidecimal.zig</string>
</dict>
<dict>
<key>match</key>
<string>\\u\{[a-fA-F\d]{1,6}\}</string>
<key>name</key>
<string>constant.character.escape.hexidecimal.zig</string>
</dict>
</array>
</dict>
<key>comments</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>///</string>
<key>end</key>
<string>$\n?</string>
<key>name</key>
<string>comment.line.documentation.zig</string>
</dict>
<dict>
<key>begin</key>
<string>//[^/]\s*TODO</string>
<key>end</key>
<string>$\n?</string>
<key>name</key>
<string>comment.line.todo.zig</string>
</dict>
<dict>
<key>begin</key>
<string>//[^/]*</string>
<key>end</key>
<string>$\n?</string>
<key>name</key>
<string>comment.line.zig</string>
</dict>
</array>
</dict>
<key>constants</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\b(null|undefined|true|false)\b</string>
<key>name</key>
<string>constant.language.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?&lt;!\.)(-?[\d_]+)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.integer.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?&lt;!\.)(0x[a-fA-F\d_]+)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.integer.hexadecimal.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?&lt;!\.)(0o[0-7_]+)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.integer.octal.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?&lt;!\.)(0b[01_]+)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.integer.binary.zig</string>
</dict>
<dict>
<key>match</key>
<string>(?&lt;!\.)(-?\b[\d_]+(?:\.[\d_]+)?(?:[eE][+-]?[\d_]+)?)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.float.zig</string>
</dict>
<dict>
<key>match</key>
<string>(?&lt;!\.)(-?\b0x[a-fA-F\d_]+(?:\.[a-fA-F\d_]+)?[pP]?(?:[+-]?[\d_]+)?)(?!\.)\b</string>
<key>name</key>
<string>constant.numeric.float.hexadecimal.zig</string>
</dict>
</array>
</dict>
<key>container_decl</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\b(?!\d)([a-zA-Z_]\w*|@\".+\")?(?=\s*=\s*(?:extern|packed)?\b\s*(?:union)\s*[(\{])</string>
<key>name</key>
<string>entity.name.union.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?!\d)([a-zA-Z_]\w*|@\".+\")?(?=\s*=\s*(?:extern|packed)?\b\s*(?:struct)\s*[(\{])</string>
<key>name</key>
<string>entity.name.struct.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?!\d)([a-zA-Z_]\w*|@\".+\")?(?=\s*=\s*(?:extern|packed)?\b\s*(?:enum)\s*[(\{])</string>
<key>name</key>
<string>entity.name.enum.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?!\d)([a-zA-Z_]\w*|@\".+\")?(?=\s*=\s*(?:error)\s*[(\{])</string>
<key>name</key>
<string>entity.name.error.zig</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.error.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.accessor.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.name.error.zig</string>
</dict>
</dict>
<key>match</key>
<string>\b(error)(\.)([a-zA-Z_]\w*|@\".+\")</string>
</dict>
</array>
</dict>
<key>dummy_main</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#label</string>
</dict>
<dict>
<key>include</key>
<string>#function_type</string>
</dict>
<dict>
<key>include</key>
<string>#punctuation</string>
</dict>
<dict>
<key>include</key>
<string>#storage_modifier</string>
</dict>
<dict>
<key>include</key>
<string>#container_decl</string>
</dict>
<dict>
<key>include</key>
<string>#constants</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#strings</string>
</dict>
<dict>
<key>include</key>
<string>#storage</string>
</dict>
<dict>
<key>include</key>
<string>#keywords</string>
</dict>
<dict>
<key>include</key>
<string>#operators</string>
</dict>
<dict>
<key>include</key>
<string>#support</string>
</dict>
<dict>
<key>include</key>
<string>#field_decl</string>
</dict>
<dict>
<key>include</key>
<string>#block</string>
</dict>
<dict>
<key>include</key>
<string>#function_def</string>
</dict>
<dict>
<key>include</key>
<string>#function_call</string>
</dict>
<dict>
<key>include</key>
<string>#enum_literal</string>
</dict>
</array>
</dict>
<key>enum_literal</key>
<dict>
<key>match</key>
<string>(?&lt;!\w|\)|\?|\}|\]|\*)(\.(?:[a-zA-Z_]\w*\b|@\"[^\"]*\"))(?!\(|\s*=[^=&gt;])</string>
<key>name</key>
<string>constant.language.enum</string>
</dict>
<key>field_decl</key>
<dict>
<key>begin</key>
<string>([a-zA-Z_]\w*|@\".+\")\s*(:)\s*</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.member.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.zig</string>
</dict>
</dict>
<key>end</key>
<string>([a-zA-Z_][\w.]*|@\".+\")?\s*(?:(,)|(=)|$)</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>keyword.operator.assignment.zig</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
</array>
</dict>
<key>function_call</key>
<dict>
<key>match</key>
<string>(?&lt;!fn)\b([a-zA-Z_]\w*|@\".+\")(?=\s*\()</string>
<key>name</key>
<string>variable.function.zig</string>
</dict>
<key>function_def</key>
<dict>
<key>begin</key>
<string>(?&lt;=fn)\s+([a-zA-Z_]\w*|@\".+\")(\()</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>entity.name.function</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.section.parens.begin.zig</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\)[^\)])\s*([a-zA-Z_][\w.]*|@\".+\")?(!)?\s*(?:([a-zA-Z_][\w.]*|@\".+\")\b(?!\s*\())?</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>keyword.operator.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#label</string>
</dict>
<dict>
<key>include</key>
<string>#param_list</string>
</dict>
<dict>
<key>match</key>
<string>([a-zA-Z_][\w.]*|@\".+\")</string>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
</array>
</dict>
<key>function_type</key>
<dict>
<key>begin</key>
<string>\b(fn)\s*(\()</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.function.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.section.parens.begin.zig</string>
</dict>
</dict>
<key>contentName</key>
<string>meta.function.parameters.zig</string>
<key>end</key>
<string>(?&lt;=\)|\})\s*([a-zA-Z_][\w.]*|@\".+\")?\s*(!)?\s*([a-zA-Z_][\w.]*|@\".+\")</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>keyword.operator.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#label</string>
</dict>
<dict>
<key>include</key>
<string>#param_list</string>
</dict>
<dict>
<key>match</key>
<string>([a-zA-Z_][\w.]*|@\".+\")</string>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
</array>
</dict>
<key>keywords</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\b(while|for|break|return|continue|asm|defer|errdefer|unreachable)\b</string>
<key>name</key>
<string>keyword.control.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(async|await|suspend|nosuspend|resume)\b</string>
<key>name</key>
<string>keyword.control.async.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(if|else|switch|try|catch|orelse)\b</string>
<key>name</key>
<string>keyword.control.conditional.zig</string>
</dict>
<dict>
<key>match</key>
<string>(?&lt;!\w)(@import|@cImport|@cInclude)\b</string>
<key>name</key>
<string>keyword.control.import.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(usingnamespace)\b</string>
<key>name</key>
<string>keyword.other.usingnamespace.zig</string>
</dict>
</array>
</dict>
<key>label</key>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>keyword.control.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>entity.name.label.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>entity.name.label.zig</string>
</dict>
</dict>
<key>match</key>
<string>\b(break|continue)\s*:\s*([a-zA-Z_]\w*|@\".+\")\b|\b(?!\d)([a-zA-Z_]\w*|@\".+\")\b(?=\s*:\s*(?:\{|while\b))</string>
</dict>
<key>operators</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\b!\b</string>
<key>name</key>
<string>keyword.operator.zig</string>
</dict>
<dict>
<key>match</key>
<string>(==|(?:!|&gt;|&lt;)=?)</string>
<key>name</key>
<string>keyword.operator.logical.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(and|or)\b</string>
<key>name</key>
<string>keyword.operator.word.zig</string>
</dict>
<dict>
<key>match</key>
<string>((?:(?:\+|-|\*)\%?|/|%|&lt;&lt;|&gt;&gt;|&amp;|\|(?=[^\|])|\^)?=)</string>
<key>name</key>
<string>keyword.operator.assignment.zig</string>
</dict>
<dict>
<key>match</key>
<string>((?:\+|-|\*)\%?|/(?!/)|%)</string>
<key>name</key>
<string>keyword.operator.arithmetic.zig</string>
</dict>
<dict>
<key>match</key>
<string>(&lt;&lt;|&gt;&gt;|&amp;(?=[a-zA-Z_]|@\")|\|(?=[^\|])|\^|~)</string>
<key>name</key>
<string>keyword.operator.bitwise.zig</string>
</dict>
<dict>
<key>match</key>
<string>(\+\+|\*\*|-&gt;|\.\?|\.\*|&amp;(?=[a-zA-Z_]|@\")|\?|\|\||\.{2,3})</string>
<key>name</key>
<string>keyword.operator.other.zig</string>
</dict>
</array>
</dict>
<key>param_list</key>
<dict>
<key>begin</key>
<string>([a-zA-Z_]\w*|@\".+\")\s*(:)\s*</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.parameter.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.zig</string>
</dict>
</dict>
<key>end</key>
<string>([a-zA-Z_][\w.]*|@\".+\")?\s*(?:(,)|(\)))</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.section.parens.end.zig</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#dummy_main</string>
</dict>
<dict>
<key>match</key>
<string>([a-zA-Z_][\w.]*|@\".+\")</string>
<key>name</key>
<string>storage.type.zig</string>
</dict>
</array>
</dict>
<key>punctuation</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>,</string>
<key>name</key>
<string>punctuation.separator.zig</string>
</dict>
<dict>
<key>match</key>
<string>;</string>
<key>name</key>
<string>punctuation.terminator.zig</string>
</dict>
<dict>
<key>match</key>
<string>(\()</string>
<key>name</key>
<string>punctuation.section.parens.begin.zig</string>
</dict>
<dict>
<key>match</key>
<string>(\))</string>
<key>name</key>
<string>punctuation.section.parens.end.zig</string>
</dict>
</array>
</dict>
<key>storage</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\b(bool|void|noreturn|type|anyerror|anytype)\b</string>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(?&lt;!\.)([iu]\d+|[iu]size|comptime_int)\b</string>
<key>name</key>
<string>storage.type.integer.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(f16|f32|f64|f128|comptime_float)\b</string>
<key>name</key>
<string>storage.type.float.zig</string>
</dict>
<dict>
<key>match</key>
<string>\b(c_short|c_ushort|c_int|c_uint|c_long|c_ulong|c_longlong|c_ulonglong|c_longdouble|c_void)\b</string>
<key>name</key>
<string>storage.type.c_compat.zig</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>keyword.operator.zig</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>storage.type.zig</string>
</dict>
</dict>
<key>match</key>
<string>\b(anyframe)\b\s*(-&gt;)?\s*(?:([a-zA-Z_][\w.]*|@\".+\")\b(?!\s*\())?</string>
</dict>
<dict>
<key>match</key>
<string>\bfn\b</string>
<key>name</key>
<string>storage.type.function.zig</string>
</dict>
<dict>
<key>match</key>
<string>\btest\b</string>
<key>name</key>
<string>storage.type.test.zig</string>
</dict>
<dict>
<key>match</key>
<string>\bstruct\b</string>
<key>name</key>
<string>storage.type.struct.zig</string>
</dict>
<dict>
<key>match</key>
<string>\benum\b</string>
<key>name</key>
<string>storage.type.enum.zig</string>
</dict>
<dict>
<key>match</key>
<string>\bunion\b</string>
<key>name</key>
<string>storage.type.union.zig</string>
</dict>
<dict>
<key>match</key>
<string>\berror\b</string>
<key>name</key>
<string>storage.type.error.zig</string>
</dict>
</array>
</dict>
<key>storage_modifier</key>
<dict>
<key>match</key>
<string>\b(const|var|extern|packed|export|pub|noalias|inline|noinline|comptime|volatile|align|linksection|threadlocal|allowzero)\b</string>
<key>name</key>
<string>storage.modifier.zig</string>
</dict>
<key>strings</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>\'</string>
<key>end</key>
<string>\'</string>
<key>name</key>
<string>string.quoted.single.zig</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#character_escapes</string>
</dict>
<dict>
<key>match</key>
<string>\\[^\'][^\']*?</string>
<key>name</key>
<string>invalid.illegal.character.zig</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>c?\"</string>
<key>end</key>
<string>\"</string>
<key>name</key>
<string>string.quoted.double.zig</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#character_escapes</string>
</dict>
<dict>
<key>match</key>
<string>\\[^\'][^\']*?</string>
<key>name</key>
<string>invalid.illegal.character.zig</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>c?\\\\</string>
<key>end</key>
<string>$\n?</string>
<key>name</key>
<string>string.quoted.other.zig</string>
</dict>
</array>
</dict>
<key>support</key>
<dict>
<key>match</key>
<string>(?&lt;!\w)@[^\"\d][a-zA-Z_]\w*\b</string>
<key>name</key>
<string>support.function.zig</string>
</dict>
</dict>
<key>scopeName</key>
<string>source.zig</string>
<key>uuid</key>
<string>06C2FF99-3080-441A-9019-460C51E93116</string>
</dict>
</plist>

View file

@ -0,0 +1,200 @@
const languages = [
"ts",
"tsx",
"zig",
"json",
"css",
"astro",
"mdx",
"lua",
"shell",
"dosbatch",
"powershell",
"yaml",
"toml",
"xml",
"python",
"php",
"diff",
] as const;
const altScopes: Record<string, string> = {
astro: "text.html.astro",
xml: "text.xml",
php: "text.html.php",
};
export type Language = (typeof languages)[number];
const scopes = [
// CSS
["punctuation.definition.keyword", "keyword", "css"],
["entity.name.tag.css", "class", "css"],
["meta.selector.css", "method", "css"],
["entity.other.attribute-name.class.css", "builtin", "css"],
["punctuation.definition.entity", "builtin", "css"],
["variable.css", "parameter", "css"],
// JSON
["support.type.property-name.json", "variable", "json"],
["constant.numeric", "method", "json"],
["constant", "class", "json"],
// Lua
["entity.name", "class", "lua"],
// Diff
["punctuation.definition.deleted", "variable", "diff"],
["markup.deleted", "variable", "diff"],
["punctuation.definition.inserted", "method", "diff"],
["markup.inserted", "method", "diff"],
["meta.diff.range", "string", "diff"],
["punctuation.definition.range", "string", "diff"],
["meta.toc-list-line.number", "keyword", "diff"],
["meta.diff", "comment", "diff"],
// General
["meta.object-literal.key", "property"],
["comment", "comment"],
["string", "string"],
["storage", "keyword"],
["keyword", "keyword"],
["variable.parameter", "parameter"],
["entity.name.function", "method"],
["support.type.primitive", "builtin"],
["entity.name.type", "class"],
["support.type", "class"],
["support.class", "class"],
["constant.language", "builtin"],
["constant", "constant"],
["support.constant", "constant"],
["meta.parameters", "parameter"],
["support.function", "method"],
["variable", "variable"],
["punctuation", null],
["meta.function-call", "method"],
] as const;
interface HighlightLinesOptions {
lines: string[];
grammar: textmate.IGrammar;
state: textmate.StateStack;
language: Language;
}
export function getStyle(scopesToCheck: string[], langugage: Language) {
for (const scope of scopes) {
if (scope[2] && scope[2] !== langugage) continue;
const find = scopesToCheck.find((s) => s.startsWith(scope[0]));
if (find) {
return scope[1];
}
}
return null;
}
function highlightLines({
lines,
grammar,
state,
language,
}: HighlightLinesOptions) {
let html = "";
let lastHtmlStyle: string | null = null;
const { length } = lines;
for (let i = 0; i < length; i += 1) {
const { tokens, ruleStack, stoppedEarly } = grammar.tokenizeLine(
lines[i],
state,
);
if (stoppedEarly) throw new Error("TODO: Tokenization stopped early?");
state = ruleStack;
for (const token of tokens) {
const str = lines[i].slice(token.startIndex, token.endIndex);
if (str.trim().length === 0) {
// Emit but do not consider scope changes
html += ssr.escapeHtml(str);
continue;
}
const style = getStyle(token.scopes, language);
if (style !== lastHtmlStyle) {
if (lastHtmlStyle) html += "</span>";
if (style) html += `<span class='${style}'>`;
}
html += ssr.escapeHtml(str);
lastHtmlStyle = style;
}
html += "\n";
}
if (lastHtmlStyle) html += "</span>";
return { state, html };
}
export const getRegistry = async.once(async () => {
const wasmBin = await fs.readFile(
require.resolve("vscode-oniguruma/release/onig.wasm"),
);
await oniguruma.loadWASM(wasmBin);
return new textmate.Registry({
onigLib: Promise.resolve({
createOnigScanner: (patterns) => new oniguruma.OnigScanner(patterns),
createOnigString: (s) => new oniguruma.OnigString(s),
}),
loadGrammar: async (scopeName: string) => {
for (const lang of languages) {
if (scopeName.endsWith(`.${lang}`)) {
const file = await fs.readFile(
path.join(import.meta.dirname, `highlight-grammar/${lang}.plist`),
"utf-8",
);
return textmate.parseRawGrammar(file);
}
}
return null;
},
});
});
export async function highlightCode(code: string, language: Language) {
const registry = await getRegistry();
const grammar = await registry.loadGrammar(
altScopes[language] ?? "source." + language,
);
if (!grammar) {
throw new Error(`No grammar found for language: ${language}`);
}
let state = textmate.INITIAL;
const { html } = highlightLines({
lines: code.split("\n"),
grammar,
state,
language,
});
return html;
}
export async function main() {
// validate exts
for (const ext of languages) {
if (
!fs.existsSync(
path.join(import.meta.dirname, `highlight-grammar/${ext}.plist`),
)
) {
console.error(`Missing grammar for ${ext}`);
}
// Sanity check
await highlightCode("wwwwwwwwwwwaaaaaaaaaaaaaaaa", ext);
}
}
import * as async from "#sitegen/async";
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as oniguruma from "vscode-oniguruma";
import * as textmate from "vscode-textmate";
import * as ssr from "#ssr";

View file

@ -0,0 +1,73 @@
const db = getDb("cache.sqlite");
db.table(
"asset_refs",
/* SQL */ `
create table if not exists asset_refs (
id integer primary key autoincrement,
key text not null UNIQUE,
refs integer not null
);
create table if not exists asset_ref_files (
file text not null,
id integer not null,
foreign key (id) references asset_refs(id) ON DELETE CASCADE
);
create index asset_ref_files_id on asset_ref_files(id);
`,
);
/**
* Uncompressed files are read directly from the media store root. Derivied
* assets like compressed files, optimized images, and streamable video are
* stored in the `derived` folder. After scanning, the derived assets are
* uploaded into the store (storage1/clofi-derived dataset on NAS). Since
* multiple files can share the same hash, the number of references is
* tracked, and the derived content is only produced once. This means if a
* file is deleted, it should only decrement a reference count; deleting it
* once all references are removed.
*/
export class AssetRef {
/** Key which aws referenced */
id!: number;
key!: string;
refs!: number;
unref() {
decrementQuery.run(this.key);
deleteUnreferencedQuery.run().changes > 0;
}
addFiles(files: string[]) {
for (const file of files) {
addFileQuery.run({ id: this.id, file });
}
}
static get(key: string) {
return getQuery.get(key);
}
static putOrIncrement(key: string) {
putOrIncrementQuery.get(key);
return UNWRAP(AssetRef.get(key));
}
}
const getQuery = db.prepare<[key: string]>(/* SQL */ `
select * from asset_refs where key = ?;
`).as(AssetRef);
const putOrIncrementQuery = db.prepare<[key: string]>(/* SQL */ `
insert into asset_refs (key, refs) values (?, 1)
on conflict(key) do update set refs = refs + 1;
`);
const decrementQuery = db.prepare<[key: string]>(/* SQL */ `
update asset_refs set refs = refs - 1 where key = ? and refs > 0;
`);
const deleteUnreferencedQuery = db.prepare(/* SQL */ `
delete from asset_refs where refs <= 0;
`);
const addFileQuery = db.prepare<[{ id: number; file: string }]>(/* SQL */ `
insert into asset_ref_files (id, file) values ($id, $file);
`);
import { getDb } from "#sitegen/sqlite";

View file

@ -0,0 +1,59 @@
const db = getDb("cache.sqlite");
db.table(
"permissions",
/* SQL */ `
CREATE TABLE IF NOT EXISTS permissions (
prefix TEXT PRIMARY KEY,
allow INTEGER NOT NULL
);
`,
);
export class FilePermissions {
prefix!: string;
/** Currently set to 1 always */
allow!: number;
// -- static ops --
static getByPrefix(filePath: string): number {
return getByPrefixQuery.get(filePath)?.allow ?? 0;
}
static getExact(filePath: string): number {
return getExactQuery.get(filePath)?.allow ?? 0;
}
static setPermissions(prefix: string, allow: number) {
if (allow) {
insertQuery.run({ prefix, allow });
} else {
deleteQuery.run(prefix);
}
}
}
const getByPrefixQuery = db.prepare<
[prefix: string],
Pick<FilePermissions, "allow">
>(/* SQL */ `
SELECT allow
FROM permissions
WHERE ? GLOB prefix || '*'
ORDER BY LENGTH(prefix) DESC
LIMIT 1;
`);
const getExactQuery = db.prepare<
[file: string],
Pick<FilePermissions, "allow">
>(/* SQL */ `
SELECT allow FROM permissions WHERE ? == prefix
`);
const insertQuery = db.prepare<[{ prefix: string; allow: number }]>(/* SQL */ `
REPLACE INTO permissions(prefix, allow) VALUES($prefix, $allow);
`);
const deleteQuery = db.prepare<[file: string]>(/* SQL */ `
DELETE FROM permissions WHERE prefix = ?;
`);
import { getDb } from "#sitegen/sqlite";

View file

@ -0,0 +1,460 @@
const db = getDb("cache.sqlite");
db.table(
"media_files",
/* SQL */ `
create table media_files (
id integer primary key autoincrement,
parent_id integer,
path text unique,
kind integer not null,
timestamp integer not null,
timestamp_updated integer not null default current_timestamp,
hash text not null,
size integer not null,
duration integer not null default 0,
dimensions text not null default "",
contents text not null,
dirsort text,
processed integer not null,
processors text not null default "",
foreign key (parent_id) references media_files(id)
);
-- index for quickly looking up files by path
create index media_files_path on media_files (path);
-- index for quickly looking up children
create index media_files_parent_id on media_files (parent_id);
-- index for quickly looking up recursive file children
create index media_files_file_children on media_files (kind, path);
-- index for finding directories that need to be processed
create index media_files_directory_processed on media_files (kind, processed);
`,
);
export enum MediaFileKind {
directory = 0,
file = 1,
}
export class MediaFile {
id!: number;
parent_id!: number | null;
/**
* Has leading slash, does not have `/file` prefix.
* @example "/2025/waterfalls/waterfalls.mp3"
*/
path!: string;
kind!: MediaFileKind;
private timestamp!: number;
private timestamp_updated!: number;
/** for mp3/mp4 files, measured in seconds */
duration?: number;
/** for images and videos, the dimensions. Two numbers split by `x` */
dimensions?: string;
/**
* sha1 of
* - files: the contents
* - directories: the JSON array of strings + the content of `readme.txt`
* this is used
* - to inform changes in caching mechanisms (etag, page render cache)
* - as a filename for compressed files (.clover/compressed/<hash>.{gz,zstd})
*/
hash!: string;
/**
* Depends on the file kind.
*
* - For directories, this is the contents of `readme.txt`, if it exists.
* - Otherwise, it is an empty string.
*/
contents!: string;
/**
* For directories, if this is set, it is a JSON-encoded array of the explicit
* sorting order. Derived off of `.dirsort` files.
*/
dirsort!: string | null;
/** in bytes */
size!: number;
/**
* 0 - not processed
* non-zero - processed
*
* file: a bit-field of the processors.
* directory: this is for re-indexing contents
*/
processed!: number;
processors!: string;
// -- instance ops --
get date() {
return new Date(this.timestamp);
}
get lastUpdateDate() {
return new Date(this.timestamp_updated);
}
parseDimensions() {
const dimensions = this.dimensions;
if (!dimensions) return null;
const [width, height] = dimensions.split("x").map(Number);
return { width, height };
}
get basename() {
return path.basename(this.path);
}
get basenameWithoutExt() {
return path.basename(this.path, path.extname(this.path));
}
get extension() {
return path.extname(this.path);
}
get extensionNonEmpty() {
const { basename } = this;
const ext = path.extname(basename);
if (ext === "") return basename;
return ext;
}
getChildren() {
return MediaFile.getChildren(this.id)
.filter((file) => !file.basename.startsWith("."));
}
getPublicChildren() {
const children = MediaFile.getChildren(this.id);
if (FilePermissions.getByPrefix(this.path) == 0) {
return children.filter(({ path }) => FilePermissions.getExact(path) == 0);
}
return children;
}
getParent() {
const dirPath = this.path;
if (dirPath === "/") return null;
const parentPath = path.dirname(dirPath);
if (parentPath === dirPath) return null;
const result = MediaFile.getByPath(parentPath);
if (!result) return null;
ASSERT(result.kind === MediaFileKind.directory);
return result;
}
setProcessed(processed: number) {
setProcessedQuery.run({ id: this.id, processed });
this.processed = processed;
}
setProcessors(processed: number, processors: string) {
setProcessorsQuery.run({ id: this.id, processed, processors });
this.processed = processed;
this.processors = processors;
}
setDuration(duration: number) {
setDurationQuery.run({ id: this.id, duration });
this.duration = duration;
}
setDimensions(dimensions: string) {
setDimensionsQuery.run({ id: this.id, dimensions });
this.dimensions = dimensions;
}
setContents(contents: string) {
setContentsQuery.run({ id: this.id, contents });
this.contents = contents;
}
getRecursiveFileChildren() {
if (this.kind !== MediaFileKind.directory) return [];
return getChildrenFilesRecursiveQuery.array(this.path + "/");
}
delete() {
deleteCascadeQuery.run({ id: this.id });
}
// -- static ops --
static getByPath(filePath: string): MediaFile | null {
const result = getByPathQuery.get(filePath);
if (result) return result;
if (filePath === "/") {
return Object.assign(new MediaFile(), {
id: 0,
parent_id: null,
path: "/",
kind: MediaFileKind.directory,
timestamp: 0,
timestamp_updated: Date.now(),
hash: "0".repeat(40),
contents: "the file scanner has not been run yet",
dirsort: null,
size: 0,
processed: 1,
});
}
return null;
}
static createFile({
path: filePath,
date,
hash,
size,
duration,
dimensions,
contents,
}: CreateFile) {
ASSERT(
!filePath.includes("\\") && filePath.startsWith("/"),
`Invalid path: ${filePath}`,
);
return createFileQuery.getNonNull({
path: filePath,
parentId: MediaFile.getOrPutDirectoryId(path.dirname(filePath)),
timestamp: date.getTime(),
timestampUpdated: Date.now(),
hash,
size,
duration,
dimensions,
contents,
});
}
static getOrPutDirectoryId(filePath: string) {
ASSERT(
!filePath.includes("\\") && filePath.startsWith("/"),
`Invalid path: ${filePath}`,
);
filePath = path.normalize(filePath);
const row = getDirectoryIdQuery.get(filePath);
if (row) return row.id;
let current = filePath;
let parts = [];
let parentId: null | number = null;
if (filePath === "/") {
return createDirectoryQuery.getNonNull({
path: filePath,
parentId,
}).id;
}
// walk up the path until we find a directory that exists
do {
parts.unshift(path.basename(current));
current = path.dirname(current);
parentId = getDirectoryIdQuery.get(current)?.id ?? null;
} while (parentId == undefined && current !== "/");
if (parentId == undefined) {
parentId = createDirectoryQuery.getNonNull({
path: current,
parentId,
}).id;
}
// walk back down the path, creating directories as needed
for (const part of parts) {
current = path.join(current, part);
ASSERT(parentId != undefined);
parentId = createDirectoryQuery.getNonNull({
path: current,
parentId,
}).id;
}
return parentId;
}
static markDirectoryProcessed({
id,
timestamp,
contents,
size,
hash,
dirsort,
}: MarkDirectoryProcessed) {
markDirectoryProcessedQuery.get({
id,
timestamp: timestamp.getTime(),
contents,
dirsort: dirsort ? JSON.stringify(dirsort) : "",
hash,
size,
});
}
static setProcessed(id: number, processed: number) {
setProcessedQuery.run({ id, processed });
}
static createOrUpdateDirectory(dirPath: string) {
const id = MediaFile.getOrPutDirectoryId(dirPath);
return updateDirectoryQuery.get(id);
}
static getChildren(id: number) {
return getChildrenQuery.array(id);
}
static getDirectoriesToReindex() {
return getDirectoriesToReindexQuery.array();
}
static db = db;
}
// Create a `file` entry with a given path, date, file hash, size, and duration
// If the file already exists, update the date and duration.
// If the file exists and the hash is different, sets `compress` to 0.
interface CreateFile {
path: string;
date: Date;
hash: string;
size: number;
duration: number;
dimensions: string;
contents: string;
}
// Set the `processed` flag true and update the metadata for a directory
export interface MarkDirectoryProcessed {
id: number;
timestamp: Date;
contents: string;
size: number;
hash: string;
dirsort: null | string[];
}
export interface DirConfig {
/** Overridden sorting */
sort: string[];
}
// -- queries --
// Get a directory ID by path, creating it if it doesn't exist
const createDirectoryQuery = db.prepare<
[{ path: string; parentId: number | null }],
{ id: number }
>(
/* SQL */ `
insert into media_files (
path, parent_id, kind, timestamp, hash, size,
duration, dimensions, contents, dirsort, processed)
values (
$path, $parentId, ${MediaFileKind.directory}, 0, '', 0,
0, '', '', '', 0)
returning id;
`,
);
const getDirectoryIdQuery = db.prepare<[string], { id: number }>(/* SQL */ `
SELECT id FROM media_files WHERE path = ? AND kind = ${MediaFileKind.directory};
`);
const createFileQuery = db.prepare<[{
path: string;
parentId: number;
timestamp: number;
timestampUpdated: number;
hash: string;
size: number;
duration: number;
dimensions: string;
contents: string;
}], void>(/* SQL */ `
insert into media_files (
path, parent_id, kind, timestamp, timestamp_updated, hash,
size, duration, dimensions, contents, processed)
values (
$path, $parentId, ${MediaFileKind.file}, $timestamp, $timestampUpdated,
$hash, $size, $duration, $dimensions, $contents, 0)
on conflict(path) do update set
timestamp = excluded.timestamp,
timestamp_updated = excluded.timestamp_updated,
duration = excluded.duration,
size = excluded.size,
contents = excluded.contents,
processed = case
when media_files.hash != excluded.hash then 0
else media_files.processed
end
returning *;
`).as(MediaFile);
const setProcessedQuery = db.prepare<[{
id: number;
processed: number;
}]>(/* SQL */ `
update media_files set processed = $processed where id = $id;
`);
const setProcessorsQuery = db.prepare<[{
id: number;
processed: number;
processors: string;
}]>(/* SQL */ `
update media_files set
processed = $processed,
processors = $processors
where id = $id;
`);
const setDurationQuery = db.prepare<[{
id: number;
duration: number;
}]>(/* SQL */ `
update media_files set duration = $duration where id = $id;
`);
const setDimensionsQuery = db.prepare<[{
id: number;
dimensions: string;
}]>(/* SQL */ `
update media_files set dimensions = $dimensions where id = $id;
`);
const setContentsQuery = db.prepare<[{
id: number;
contents: string;
}]>(/* SQL */ `
update media_files set contents = $contents where id = $id;
`);
const getByPathQuery = db.prepare<[string]>(/* SQL */ `
select * from media_files where path = ?;
`).as(MediaFile);
const markDirectoryProcessedQuery = db.prepare<[{
timestamp: number;
contents: string;
dirsort: string;
hash: string;
size: number;
id: number;
}]>(/* SQL */ `
update media_files set
processed = 1,
timestamp = $timestamp,
contents = $contents,
dirsort = $dirsort,
hash = $hash,
size = $size
where id = $id;
`);
const updateDirectoryQuery = db.prepare<[id: number]>(/* SQL */ `
update media_files set processed = 0 where id = ?;
`);
const getChildrenQuery = db.prepare<[id: number]>(/* SQL */ `
select * from media_files where parent_id = ?;
`).as(MediaFile);
const getChildrenFilesRecursiveQuery = db.prepare<[dir: string]>(/* SQL */ `
select * from media_files
where path like ? || '%'
and kind = ${MediaFileKind.file}
`).as(MediaFile);
const deleteCascadeQuery = db.prepare<[{ id: number }]>(/* SQL */ `
with recursive items as (
select id, parent_id from media_files where id = $id
union all
select p.id, p.parent_id
from media_files p
join items c on p.id = c.parent_id
where p.parent_id is not null
and not exists (
select 1 from media_files child
where child.parent_id = p.id
and child.id <> c.id
)
)
delete from media_files
where id in (select id from items)
`);
const getDirectoriesToReindexQuery = db.prepare(`
with recursive directory_chain as (
-- base case
select id, parent_id, path from media_files
where kind = 0 and processed = 0
-- recurse to find all parents so that size/hash can be updated
union
select m.id, m.parent_id, m.path
from media_files m
inner join directory_chain d on m.id = d.parent_id
)
select distinct id, parent_id, path
from directory_chain
order by path;
`).as(MediaFile);
import { getDb } from "#sitegen/sqlite";
import * as path from "node:path/posix";
import { FilePermissions } from "./FilePermissions.ts";

View file

@ -0,0 +1,34 @@
import { MediaFile } from "../models/MediaFile.ts";
import { MediaPanel } from "../views/clofi.tsx";
import { addScript } from "#sitegen";
export const theme = {
bg: "#312652",
fg: "#f0f0ff",
primary: "#fabe32",
};
export const meta = { title: "file not found" };
export default function CotyledonPage() {
addScript("../scripts/canvas_cotyledon.client.ts");
return (
<div class="files ctld ctld-sb">
<MediaPanel
file={MediaFile.getByPath("/")!}
isLast={false}
activeFilename={null}
hasCotyledonCookie={false}
/>
<div class="panel last">
<div className="header"></div>
<div className="content file-view notfound">
<p>this file does not exist ...</p>
<p>
<a href="/file">return</a>
</p>
</div>
</div>
</div>
);
}

View file

@ -0,0 +1,27 @@
import { MediaFile } from "@/file-viewer/models/MediaFile.ts";
import { addScript } from "#sitegen";
import { Readme } from "@/file-viewer/cotyledon.tsx";
import { MediaPanel } from "../views/clofi.tsx";
export const theme = {
bg: "#312652",
fg: "#f0f0ff",
primary: "#fabe32",
};
export const meta = { title: "living room" };
export default function CotyledonPage() {
addScript("../scripts/canvas_cotyledon.client.ts");
return (
<div class="files ctld ctld-et">
<MediaPanel
file={MediaFile.getByPath("/")!}
isLast={false}
activeFilename={null}
hasCotyledonCookie={true}
/>
<Readme />
</div>
);
}

View file

@ -0,0 +1,27 @@
import { MediaFile } from "../models/MediaFile.ts";
import { MediaPanel } from "../views/clofi.tsx";
import { addScript } from "#sitegen";
import { Speedbump } from "../cotyledon.tsx";
export const theme = {
bg: "#312652",
fg: "#f0f0ff",
primary: "#fabe32",
};
export const meta = { title: "the front door" };
export default function CotyledonPage() {
addScript("../scripts/canvas_cotyledon.client.ts");
return (
<div class="files ctld ctld-sb">
<MediaPanel
file={MediaFile.getByPath("/")!}
isLast={false}
activeFilename={null}
hasCotyledonCookie={false}
/>
<Speedbump />
</div>
);
}

View file

@ -0,0 +1,9 @@
export const mediaRedirects: Record<string, string> = {
"/q+a/172533.png": "/q+a/172533.jpg",
"/q+a/2021-12-05_smooth.mp4": "/2019/smooth.mp4",
"/q+a/temp_2022-08-17-19-43-32.m4a":
"/2023/g-missing/fragments/2022-08-17-19-43-32.m4a",
"/q+a/2023-02-09_20-5814i.png":
"/2023/g-is-missing/fragments/2023-02-09_20-5814i.png",
"/2024/waterfalls/": "/2025/waterfalls/",
};

184
src/file-viewer/rsync.ts Normal file
View file

@ -0,0 +1,184 @@
// Utilities for spawning rsync and consuming its output as a `Progress`
// A headless parser is available with `Parse`
export type Line =
| { kind: "ignore" }
| { kind: "log"; level: "info" | "warn" | "error"; message: string }
| { kind: "count"; files: number }
| {
kind: "progress";
currentFile: string;
bytesTransferred: number;
percentage: number;
timeElapsed: string;
transferNumber: number;
filesToCheck: number;
totalFiles: number;
speed: string | null;
};
export const defaultExtraOptions = [
"--progress",
];
export interface SpawnOptions {
args: string[];
title: string;
rsync?: string;
progress?: Progress;
cwd: string;
}
export async function spawn(options: SpawnOptions) {
const { rsync = "rsync", args, title, cwd } = options;
const proc = child_process.spawn(rsync, [...defaultExtraOptions, ...args], {
stdio: ["ignore", "pipe", "pipe"],
cwd,
});
const parser = new Parse();
const bar = options.progress ?? new Progress({ text: title });
let running = true;
const stdoutSplitter = readline.createInterface({ input: proc.stdout });
const stderrSplitter = readline.createInterface({ input: proc.stderr });
const handleLine = (line: string) => {
const result = parser.onLine(line);
if (result.kind === "ignore") {
return;
} else if (result.kind === "log") {
console[result.level](result.message);
} else if (result.kind === "count") {
if (!running) return;
bar.text = `${result.files} files...`;
} else if (result.kind === "progress") {
if (!running) return;
const {
transferNumber,
bytesTransferred,
totalFiles,
filesToCheck,
currentFile,
speed,
} = result;
bar.value = transferNumber;
bar.total = totalFiles;
const extras = [
formatSize(bytesTransferred),
(totalFiles > filesToCheck)
? `${totalFiles - filesToCheck} unchecked`
: null,
speed,
].filter(Boolean).join(", ");
const fileName = currentFile.length > 20
? `${currentFile.slice(0, 3)}..${currentFile.slice(-15)}`
: currentFile;
bar.text = `[${transferNumber}/${totalFiles}] ${fileName} ${
extras.length > 0 ? `(${extras})` : ""
}`;
} else result satisfies never;
};
stdoutSplitter.on("line", handleLine);
stderrSplitter.on("line", handleLine);
const [code, signal] = await events.once(proc, "close");
running = false;
if (code !== 0) {
const fmt = code ? `code ${code}` : `signal ${signal}`;
const e: any = new Error(`rsync failed with ${fmt}`);
e.args = [rsync, ...args].join(" ");
e.code = code;
e.signal = signal;
bar.error(e.message);
return e;
}
bar.success(title);
}
export class Parse {
totalFiles = 0;
currentTransfer = 0;
toCheck = 0;
onLine(line: string): Line {
line = line.trimEnd();
// Parse progress lines like:
// 20c83c16735608fc3de4aac61e36770d7774e0c6/au26.m4s
// 238,377 100% 460.06kB/s 0:00:00 (xfr#557, to-chk=194111/194690)
const progressMatch = line.match(
/^\s+([\d,]+)\s+(\d+)%\s+(\S+)\s+(?:(\S+)\s+)?(?:\(xfr#(\d+), to-chk=(\d+)\/(\d+)\))?/,
);
if (progressMatch) {
const [
,
bytesStr,
percentageStr,
speed,
timeElapsed,
transferStr,
toCheckStr,
totalStr,
] = progressMatch;
this.currentTransfer = Number(transferStr);
return {
kind: "progress",
currentFile: this.lastSeenFile || "",
bytesTransferred: Number(bytesStr.replaceAll(",", "")),
percentage: Number(percentageStr),
timeElapsed,
transferNumber: this.currentTransfer,
filesToCheck: toCheckStr ? this.toCheck = Number(toCheckStr) : this.toCheck,
totalFiles: totalStr ? this.totalFiles = Number(totalStr) : this.totalFiles,
speed: speed || null,
};
}
// Skip common rsync info lines
if (!line.startsWith(" ") && !line.startsWith("rsync")) {
if (
line.startsWith("sending incremental file list") ||
line.startsWith("sent ") ||
line.startsWith("total size is ") ||
line.includes("speedup is ") ||
line.startsWith("building file list")
) {
return { kind: "ignore" };
}
if (line.trim().length > 0) {
this.lastSeenFile = line;
}
return { kind: "ignore" };
}
if (line.startsWith(" ")) {
const match = line.match(/ (\d+) files.../);
if (match) {
return { kind: "count", files: Number(match[1]) };
}
}
if (
line.toLowerCase().includes("error") ||
line.toLowerCase().includes("failed")
) {
return { kind: "log", level: "error", message: line };
}
if (
line.toLowerCase().includes("warning") ||
line.toLowerCase().includes("skipping")
) {
return { kind: "log", level: "warn", message: line };
}
return { kind: "log", level: "info", message: line };
}
private lastSeenFile: string | null = null;
}
import * as child_process from "node:child_process";
import * as readline from "node:readline";
import events from "node:events";
import { Progress } from "@paperclover/console/Progress";
import { formatSize } from "@/file-viewer/format.ts";

157
src/file-viewer/rules.ts Normal file
View file

@ -0,0 +1,157 @@
// -- file extension rules --
/** Extensions that must have EXIF/etc data stripped */
export const extScrubExif = new Set([
".jpg",
".jpeg",
".png",
".mov",
".mp4",
".m4a",
]);
/** Extensions that rendered syntax-highlighted code */
export const extsCode = new Map<string, highlight.Language>(Object.entries({
".json": "json",
".toml": "toml",
".ts": "ts",
".js": "ts",
".tsx": "tsx",
".jsx": "tsx",
".css": "css",
".py": "python",
".lua": "lua",
".sh": "shell",
".bat": "dosbatch",
".ps1": "powershell",
".cmd": "dosbatch",
".yaml": "yaml",
".yml": "yaml",
".zig": "zig",
".astro": "astro",
".mdx": "mdx",
".xml": "xml",
".jsonc": "json",
".php": "php",
".patch": "diff",
".diff": "diff",
}));
/** These files show an audio embed. */
export const extsAudio = new Set([
".mp3",
".flac",
".wav",
".ogg",
".m4a",
]);
/** These files show a video embed. */
export const extsVideo = new Set([
".mp4",
".mkv",
".webm",
".avi",
".mov",
]);
/** These files show an image embed */
export const extsImage = new Set([
".jpg",
".jpeg",
".png",
".webp",
".avif",
".heic",
]);
/** These files show an image embed, but aren't optimized */
export const extsImageLike = new Set([
...extsImage,
".svg",
".gif",
]);
/** These files populate `duration` using `ffprobe` */
export const extsDuration = new Set([...extsAudio, ...extsVideo]);
/** These files populate `dimensions` using `ffprobe` */
export const extsDimensions = new Set([...extsImage, ...extsVideo]);
/** These files read file contents into `contents`, as-is */
export const extsReadContents = new Set([".txt", ".chat", ".dirsort"]);
export const extsArchive = new Set([
".zip",
".rar",
".7z",
".tar",
".gz",
".bz2",
".xz",
]);
/**
* Formats which are already compression formats, meaning a pass
* through zstd would offer little to negative benefits
*/
export const extsPreCompressed = new Set([
...extsAudio,
...extsVideo,
...extsImageLike,
...extsArchive,
".docx",
".xlsx",
".pptx",
".psd",
".sketch",
".ai",
".3ds",
".fbx",
".blend",
".dng",
]);
extsPreCompressed.delete(".svg");
export function fileIcon(
file: Pick<MediaFile, "kind" | "basename" | "path">,
dirOpen?: boolean,
) {
const { kind, basename } = file;
if (kind === MediaFileKind.directory) return dirOpen ? "dir-open" : "dir";
// -- special cases --
if (file.path === "/2024/for everyone") return "snow";
// -- basename cases --
if (basename === "readme.txt") return "readme";
// -- extension cases --
const ext = path.extname(file.basename).toLowerCase();
if ([".comp", ".fuse", ".setting"].includes(ext)) return "fusion";
if ([".json", ".toml", ".yaml", ".yml"].includes(ext)) return "json";
if (ext === ".blend") return "blend";
if (ext === ".chat") return "chat";
if (ext === ".html") return "webpage";
if (ext === ".lnk") return "link";
if (ext === ".txt" || ext === ".md") return "text";
// -- extension categories --
if (extsVideo.has(ext)) return "video";
if (extsAudio.has(ext)) return "audio";
if (extsImage.has(ext)) return "image";
if (extsArchive.has(ext)) return "archive";
if (extsCode.has(ext)) return "code";
return "file";
}
// -- viewer rules --
const pathToCanvas = new Map<string, string>(Object.entries({
"/2017": "2017",
"/2018": "2018",
"/2019": "2019",
"/2020": "2020",
"/2021": "2021",
"/2022": "2022",
"/2023": "2023",
"/2024": "2024",
}));
import type * as highlight from "./highlight.ts";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import * as path from "node:path";

View file

@ -0,0 +1,233 @@
// Vibe coded with AI
(globalThis as any).canvas_2017 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
// Configuration interface for the checkerboard effect
interface CheckerboardConfig {
fps: number; // frames per second
color1: string; // first checkerboard color
color2: string; // second checkerboard color
opacity: number; // opacity of each checkerboard (0-1)
speedX1: number; // horizontal speed of first checkerboard (pixels per second)
speedY1: number; // vertical speed of first checkerboard (pixels per second)
speedX2: number; // horizontal speed of second checkerboard (pixels per second)
speedY2: number; // vertical speed of second checkerboard (pixels per second)
baseTileSize: number; // base size of checkerboard tiles
sizeVariation: number; // maximum variation in tile size (pixels)
sineFrequency1: number; // frequency of first sine wave for size variation
sineFrequency2: number; // frequency of second sine wave for size variation
sineOffset: number; // offset between the two sine waves (radians)
rotation: number; // rotation in degrees for the entire pattern
rotation2: number; // rotation in degrees for the entire pattern
}
// Default configuration
const config: CheckerboardConfig = {
fps: 30,
color1: "#1A1C17",
color2: "#1A1C17",
opacity: 0.3,
speedX1: -0.02, // moving left slowly
speedY1: -0.01, // moving up slowly
speedX2: -0.015, // moving left (slightly slower)
speedY2: 0.012, // moving down slowly
baseTileSize: 200,
sizeVariation: 1.5,
sineFrequency1: 0.0005,
sineFrequency2: 0.0008,
sineOffset: Math.PI / 2, // 90 degrees offset
rotation: 2, // 5 degree rotation
rotation2: -2, // 5 degree rotation
};
// Get the canvas context
const ctx = canvas.getContext("2d");
if (!ctx) {
console.error("Could not get canvas context");
return () => {};
}
// Make canvas transparent
if (isStandalone) {
canvas.style.backgroundColor = "#737D60";
} else {
canvas.style.backgroundColor = "transparent";
}
// Variables to track position and animation
let width = canvas.width;
let height = canvas.height;
let animationFrameId: number;
let lastFrameTime = 0;
const frameInterval = 1000 / config.fps;
// Position offsets for the two checkerboards (centered)
let offset1X = 0;
let offset1Y = 0;
let offset2X = 0;
let offset2Y = 0;
// Time variable for sine wave calculation
let time = 0;
// Convert rotation to radians
const rotationRad = (config.rotation * Math.PI) / 180;
const rotationRad2 = (config.rotation2 * Math.PI) / 180;
// Update canvas dimensions when resized
const updateDimensions = () => {
width = canvas.width = canvas.clientWidth;
height = canvas.height = canvas.clientHeight;
};
// Calculate the diagonal length of the canvas (to ensure rotation covers corners)
const calculateDiagonal = () => {
return Math.sqrt(width * width + height * height);
};
// Draw a single checkerboard pattern scaled from center with rotation
const drawCheckerboard = (
offsetX: number,
offsetY: number,
tileSize: number,
color1: string,
color2: string,
opacity: number,
rotationRad: number,
) => {
ctx.globalAlpha = opacity;
// Get the center of the viewport
const centerX = width / 2;
const centerY = height / 2;
// Save the current transformation state
ctx.save();
// Move to the center of the canvas, rotate, then move back
ctx.translate(centerX, centerY);
ctx.rotate(rotationRad);
// Calculate the number of tiles needed to cover the rotated canvas
// We need to use the diagonal length to ensure we cover the corners when rotated
const diagonal = calculateDiagonal();
const tilesX = Math.ceil(diagonal / tileSize) + 6; // Added extra tiles for rotation
const tilesY = Math.ceil(diagonal / tileSize) + 6;
// Calculate how many tiles fit from center to edge (in each direction)
const halfTilesX = Math.ceil(tilesX / 2);
const halfTilesY = Math.ceil(tilesY / 2);
// Adjust the offset to be relative to the center
// The modulo ensures the pattern repeats smoothly even with scaling
const adjustedOffsetX = offsetX % (tileSize * 2);
const adjustedOffsetY = offsetY % (tileSize * 2);
// Draw the checker pattern, centered on the viewport
for (let y = -halfTilesY; y <= halfTilesY; y++) {
for (let x = -halfTilesX; x <= halfTilesX; x++) {
// Determine if this tile should be colored (creating checker pattern)
// We add a large number to ensure (x+y) is always positive for the modulo
if ((x + y + 1000) % 2 === 0) {
ctx.fillStyle = color1;
} else {
ctx.fillStyle = color2;
}
// Calculate the position of this tile relative to the center
// The adjusted offset creates the movement effect
const posX = (x * tileSize) + adjustedOffsetX;
const posY = (y * tileSize) + adjustedOffsetY;
// Draw the tile
ctx.fillRect(
posX - tileSize / 2,
posY - tileSize / 2,
tileSize,
tileSize,
);
}
}
// Restore the transformation state
ctx.restore();
};
// Animation loop
const animate = (currentTime: number) => {
animationFrameId = requestAnimationFrame(animate);
// Control frame rate
if (currentTime - lastFrameTime < frameInterval) {
return;
}
// Calculate the time elapsed since the last frame
const dt = currentTime - lastFrameTime;
lastFrameTime = currentTime;
// Increment time for sine wave calculation
time += dt;
// Update the position offsets based on speed and elapsed time
offset1X += config.speedX1 * dt;
offset1Y += config.speedY1 * dt;
offset2X += config.speedX2 * dt;
offset2Y += config.speedY2 * dt;
// Calculate the tile sizes using sine waves
const tileSize1 = config.baseTileSize +
Math.sin(time * config.sineFrequency1) * config.sizeVariation;
const tileSize2 = config.baseTileSize +
Math.sin(time * config.sineFrequency2 + config.sineOffset) *
config.sizeVariation;
// Clear canvas
ctx.clearRect(0, 0, width, height);
// Draw the two checkerboards
drawCheckerboard(
offset1X,
offset1Y,
tileSize1,
config.color1,
"transparent",
config.opacity,
rotationRad,
);
drawCheckerboard(
offset2X,
offset2Y,
tileSize2,
config.color2,
"transparent",
config.opacity,
rotationRad2,
);
// Reset global alpha
ctx.globalAlpha = 1.0;
};
// Initialize the animation
const init = () => {
// Set up resize handler
window.addEventListener("resize", updateDimensions);
// Initial setup
updateDimensions();
// Start animation
lastFrameTime = performance.now();
animationFrameId = requestAnimationFrame(animate);
};
// Start the animation
init();
// Return cleanup function
return () => {
window.removeEventListener("resize", updateDimensions);
cancelAnimationFrame(animationFrameId);
};
};

View file

@ -0,0 +1,431 @@
// This canvas is based on the maze generation algo in Tanks. This was
// originally written in C++ as a single function in 2018, and was ported to TS
// by Chloe in 2025 for the cotyledon canvas.
//
// The main difference is that this version is a visualization, rather than the
// practical function. Instead of taking a millisecond, only 5 steps are
// performed per second, visualizing the whole ordeal. It also isn't a playable
// game, obviously.
//
// Ported with love because I care about my old self
// She deserves the world, but instead gave it to me.
(globalThis as any).canvas_2018 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
if (isStandalone) {
canvas.style.backgroundColor = "#27201E";
}
interface Cell {
down: boolean;
right: boolean;
visited: boolean;
cell_flash: number;
down_flash: number;
right_flash: number;
}
interface Pos {
x: number;
y: number;
/** Where the wall is relative to x, y. */
dir: "left" | "right" | "up" | "down";
}
interface Maze {
grid: Grid;
cursor: { x: number; y: number };
lastTick: number;
/* Pixels */
transform: number;
newCellsToVisit: Pos[];
randomWallBag: Cell[];
randomWallTarget: number;
renderOffset: { x: number; y: number };
done: boolean;
}
const hex = (color: number[]) =>
"#" + color.map((c) => c.toString(16).padStart(2, "0")).join("");
let cellSize: number;
let borderThickness: number;
const cellFlashModifier = isStandalone ? 0.4 : 0.2;
const color = isStandalone ? "#170d0b" : "#231C1A";
const bg = [0x27, 0x20, 0x1E];
const wallFlashColor = [0xFF, 0xA8, 0x7A];
const cellFlashColor = "#FFA87A";
const updateTime = 1000 / 7;
const randomWallBreakInterval = [6, 12]; // every 10 to 18 walls.
function randomBetween(min: number, max: number) {
return Math.round(
Math.random() * (max - min),
) + min;
}
function randomOf<T>(array: T[]): T {
return array[randomBetween(0, array.length - 1)];
}
function randomWallTarget() {
return randomBetween(
randomWallBreakInterval[0],
randomWallBreakInterval[1],
);
}
// Originally, this used a 2-dimensional array. However, I wanted to make sure
// that the grid could be infinitely sized. This grid constructs new cells on
// demand, as needed.
class Grid {
cells = new Map<number, Cell>();
cell({ x, y }: { x: number; y: number }) {
const k = ((x | 0) << 16) + (y | 0);
const { cells } = this;
let existing = this.cells.get(k);
if (!existing) {
existing = {
cell_flash: 0,
down: true,
down_flash: 0,
right: true,
right_flash: 0,
visited: false,
};
cells.set(k, existing);
}
return existing;
}
forAll(
renderOffset: { x: number; y: number },
width: number,
height: number,
cb: (cell: Cell, pos: { x: number; y: number }) => void,
) {
const { x: offsetX, y: offsetY } = renderOffset;
const startX = Math.floor(-offsetX / cellSize);
const startY = Math.floor(-offsetY / cellSize);
const endX = Math.ceil((width - offsetX) / cellSize);
const endY = Math.ceil((height - offsetY) / cellSize);
for (let x = startX; x <= endX; x++) {
for (let y = startY; y <= endY; y++) {
const cellX = offsetX + x * cellSize;
const cellY = offsetY + y * cellSize;
cb(this.cell({ x, y }), { x: cellX, y: cellY });
}
}
}
}
const ctx = canvas.getContext("2d")!;
if (!ctx) {
console.error("Could not get canvas context");
return () => {};
}
let width: number, height: number;
const updateDimensions = () => {
width = canvas.width = canvas.offsetWidth;
height = canvas.height = canvas.offsetHeight;
cellSize = 100;
borderThickness = 8;
};
updateDimensions();
setTimeout(() => {
updateDimensions();
}, 10);
let maze = initMaze();
let nextMaze: Maze | null = null;
let completeFade = 0;
function initMaze(): Maze {
return {
grid: new Grid(),
transform: 0,
cursor: {
x: randomBetween(0, Math.ceil(width / cellSize)),
y: randomBetween(0, Math.ceil(height / cellSize)),
},
lastTick: performance.now(),
randomWallBag: [],
randomWallTarget: randomWallTarget(),
newCellsToVisit: [],
renderOffset: { x: 0, y: 0 },
done: false,
};
}
function isOnScreen(maze: Maze, x: number, y: number) {
const { x: offsetX, y: offsetY } = maze.renderOffset;
const cellX = offsetX + x * cellSize;
const cellY = offsetY + y * cellSize;
return (
cellX + cellSize > 0 &&
cellX < width &&
cellY + cellSize > 0 &&
cellY < height
);
}
function tick(maze: Maze, other?: Maze) {
if (maze.done) return;
// The original maze algorithm broke down 4%-8% of random right facing
// walls, and 4%-8% of down facing walls. It did this at the end.
// To make this visual more interesting, two random walls will be broken
// down every 12-25 cell visits. This way, the main trail is always running.
if (maze.randomWallBag.length > maze.randomWallTarget) {
const down: Cell = randomOf(maze.randomWallBag);
const right: Cell = randomOf(maze.randomWallBag);
maze.randomWallBag.forEach((cell) =>
cell.cell_flash = Math.min(cell.cell_flash + 0.2, 1)
);
down.cell_flash = 1;
down.down = false;
down.down_flash = 1;
right.cell_flash = 1;
right.right = false;
right.right_flash = 1;
maze.randomWallBag = [];
maze.randomWallTarget = randomWallTarget();
return;
}
// The main algorithm was simple: Have a cursor position, and move it in a
// random direction that it had not seen before. Once it had run out of
// options, branch off of a previous location. Only visit each cell once.
//
// In this visualization, cells that are too far offscreen are softly
// treated as "visited", which is how the simulation always stays in frame.
const current = maze.grid.cell(maze.cursor);
current.visited = true;
current.cell_flash = 1;
maze.randomWallBag.push(current);
const adjacent = ([
{ x: maze.cursor.x + 1, y: maze.cursor.y, dir: "left" },
{ x: maze.cursor.x - 1, y: maze.cursor.y, dir: "right" },
{ x: maze.cursor.x, y: maze.cursor.y + 1, dir: "up" },
{ x: maze.cursor.x, y: maze.cursor.y - 1, dir: "down" },
] as Pos[]).filter((pos) =>
isOnScreen(maze, pos.x, pos.y) &&
maze.grid.cell(pos).visited === false
);
if (adjacent.length === 0) {
// move cursor to a random cell that has not been visited.
const cells = maze.newCellsToVisit.filter((pos) =>
isOnScreen(maze, pos.x, pos.y) &&
maze.grid.cell(pos).visited === false
);
if (cells.length === 0) {
maze.done = true;
return;
}
const continuePos = randomOf(cells);
breakWall(maze, continuePos, other);
maze.cursor = { x: continuePos.x, y: continuePos.y };
return;
}
// break a random wall
const toBreak = randomOf(adjacent);
breakWall(maze, toBreak, other);
maze.cursor = { x: toBreak.x, y: toBreak.y };
// add the other directions to the new cells to visit.
maze.newCellsToVisit.push(
...adjacent.filter((pos) => pos.dir !== toBreak.dir),
);
}
function breakWall(maze: Maze, pos: Pos, other?: Maze) {
if (pos.dir === "right") {
const cell = maze.grid.cell(pos);
cell.right = false;
cell.right_flash = 1;
if (other) cell.right = false;
} else if (pos.dir === "down") {
const cell = maze.grid.cell(pos);
cell.down = false;
cell.down_flash = 1;
if (other) cell.down = false;
} else if (pos.dir === "left") {
const cell = maze.grid.cell({ x: pos.x - 1, y: pos.y });
cell.right = false;
cell.right_flash = 1;
if (other) cell.right = false;
} else if (pos.dir === "up") {
const cell = maze.grid.cell({ x: pos.x, y: pos.y - 1 });
cell.down = false;
cell.down_flash = 1;
if (other) cell.down = false;
}
}
function renderOffset(maze: Maze) {
return { x: maze.transform, y: maze.transform };
}
let animationFrameId: number;
let last = performance.now();
let dt: number = 0;
function renderMazeBorders(maze: Maze, opacity: number) {
ctx.globalAlpha = opacity;
maze.grid.forAll(
maze.renderOffset,
width,
height,
(cell, { x: cellX, y: cellY }) => {
// Walls
if (cell.right) {
ctx.fillStyle = color;
ctx.fillRect(
cellX + cellSize - borderThickness / 2,
cellY - borderThickness / 2,
borderThickness,
cellSize + borderThickness,
);
}
if (cell.down) {
ctx.fillStyle = color;
ctx.fillRect(
cellX - borderThickness / 2,
cellY + cellSize - borderThickness / 2,
cellSize + borderThickness,
borderThickness,
);
}
},
);
ctx.globalAlpha = 1;
}
function renderCellFlash(maze: Maze) {
maze.grid.forAll(
maze.renderOffset,
width,
height,
(cell, { x: cellX, y: cellY }) => {
// Cell flash to show visiting path.
if (cell.cell_flash > 0) {
cell.cell_flash = Math.max(0, cell.cell_flash - dt / 1000);
ctx.fillStyle = cellFlashColor;
ctx.globalAlpha = cell.cell_flash * cellFlashModifier;
ctx.fillRect(cellX, cellY, cellSize, cellSize);
ctx.globalAlpha = 1;
}
},
);
}
function renderBorderFlash(maze: Maze) {
maze.grid.forAll(
maze.renderOffset,
width,
height,
(cell, { x: cellX, y: cellY }) => {
if (cell.right_flash == 0 && cell.down_flash == 0) {
return;
}
// Walls
const cellFlash = cell.cell_flash * cellFlashModifier;
if (cell.right_flash > 0) {
cell.right_flash = Math.max(0, cell.right_flash - dt / 500);
ctx.fillStyle = interpolateColor(
bg,
wallFlashColor,
Math.max(cell.right_flash, cellFlash),
);
if (cellFlash > cell.right_flash) {
ctx.globalAlpha = cell.right_flash / cellFlash;
}
ctx.fillRect(
cellX + cellSize - borderThickness / 2,
cellY + borderThickness / 2,
borderThickness,
cellSize - borderThickness,
);
ctx.globalAlpha = 1;
}
if (cell.down_flash > 0) {
if (cellFlash > cell.down_flash) {
ctx.globalAlpha = cell.down_flash / cellFlash;
}
cell.down_flash = Math.max(0, cell.down_flash - dt / 500);
ctx.fillStyle = interpolateColor(
bg,
wallFlashColor,
Math.max(cell.down_flash, cellFlash),
);
ctx.fillRect(
cellX + borderThickness / 2,
cellY + cellSize - borderThickness / 2,
cellSize - borderThickness,
borderThickness,
);
ctx.globalAlpha = 1;
}
},
);
}
function render() {
const now = performance.now();
dt = now - last;
maze.transform += dt * 0.005;
maze.renderOffset = renderOffset(maze);
if (!maze.done) {
if (now - maze.lastTick >= updateTime) {
tick(maze);
maze.lastTick = now;
if (maze.done) {
nextMaze = initMaze();
nextMaze.transform = (maze.transform % cellSize) - dt * 0.005;
nextMaze.lastTick = now;
completeFade = 0;
}
}
}
if (nextMaze) {
nextMaze.transform += dt * 0.005;
nextMaze.renderOffset = renderOffset(nextMaze);
if (!nextMaze.done && now - nextMaze.lastTick >= updateTime) {
tick(nextMaze, maze);
nextMaze.lastTick = now;
}
}
last = now;
ctx.clearRect(0, 0, width, height);
renderCellFlash(maze);
if (nextMaze) renderCellFlash(nextMaze);
renderMazeBorders(maze, 1);
if (nextMaze) {
renderMazeBorders(nextMaze, completeFade);
completeFade += dt / 3000;
if (completeFade >= 1) {
maze = nextMaze;
nextMaze = null;
}
}
renderBorderFlash(maze);
if (nextMaze) {
renderCellFlash(nextMaze);
renderBorderFlash(nextMaze);
}
animationFrameId = requestAnimationFrame(render);
}
function interpolateColor(start: number[], end: number[], t: number) {
return hex(start.map((s, i) => Math.round(s + (end[i] - s) * t)));
}
window.addEventListener("resize", updateDimensions);
animationFrameId = requestAnimationFrame(render);
// cleanup function
return () => {
window.removeEventListener("resize", updateDimensions);
cancelAnimationFrame(animationFrameId);
};
};

View file

@ -0,0 +1,213 @@
// Ported from CanvasAPI, allegedly written on 2019-08-26.
(globalThis as any).canvas_2019 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
if (isStandalone) {
canvas.parentElement!.style.backgroundColor = "#121013";
}
// Canvas.tsx
abstract class CanvasAPI {
canvas: HTMLCanvasElement;
ctx: CanvasRenderingContext2D;
width = 0;
height = 0;
private _disposed = false;
private _running = false;
private _last = 0;
constructor(canvas: HTMLCanvasElement) {
this.canvas = canvas;
this.width = canvas.width = canvas.clientWidth;
this.height = canvas.height = canvas.clientHeight;
const ctx = this.canvas.getContext("2d");
if (!ctx) {
throw new Error("Canvas2D Not Supported!");
}
this.ctx = ctx;
}
stopRenderLoop() {
this._running = false;
}
startRenderLoop() {
if (this._disposed) return;
this._running = true;
this._last = performance.now();
requestAnimationFrame(this._renderLoop);
}
private _renderLoop = (delta: number) => {
if (!this._running) return;
this.render(delta - this._last);
this._last = delta;
requestAnimationFrame(this._renderLoop);
};
abstract render(delta: number): void;
}
// VaultBackground.ts
function addGSHelper(
grad: CanvasGradient,
color: string,
dotOpacity: number,
gradStop: number,
gradOpacity: number,
) {
grad.addColorStop(gradStop, `rgba(${color},${dotOpacity * gradOpacity})`);
}
function randAround(target: number, dist: number) {
return (Math.random() - 0.5) * dist * 2 + target;
}
class Dot {
x = Math.random() * 1.1 - 0.05;
y = Math.random() / 4 + 0.9;
size = Math.random() * 200 + 50;
opacity = 0;
opacityRandom = Math.random() / 3 + 0.3;
fadeInOpacity = 1;
color = `${randAround(217, 30)}, ${randAround(170, 30)}, ${
randAround(255, 20)
}`;
life = 0;
ySpeed_1 = 0;
ySpeed_2 = -0.0000063;
ySpeed_3 = 0.000000016;
ySpeed_4 = 0.000000000009;
seed = Math.random();
delete = false;
update(init: boolean) {
this.life += 0.8;
if (this.life < 115) {
this.opacity = this.life / 230;
} else if (this.life > 450) {
this.delete = true;
} else if (this.life > 300) {
this.opacity = (150 + 300 - this.life) / 300;
}
this.ySpeed_3 += this.ySpeed_4;
this.ySpeed_2 += this.ySpeed_3;
this.ySpeed_1 += this.ySpeed_2;
this.y += this.ySpeed_1 * 0.5;
this.size -= 0.08;
if (this.delete) {
Object.assign(this, new Dot());
}
}
render(scene: VaultBackground) {
const ctx = scene.ctx;
if (this.fadeInOpacity < 1) {
this.fadeInOpacity += 0.0075;
}
const finalX = this.x +
Math.sin(this.seed * Math.PI * 2 + Date.now() / 15000) * 0.2;
const drawX = scene.shakeX +
finalX * Math.max(700, scene.width) -
(Math.max(700, scene.width) - scene.width) / 2;
const drawY = scene.shakeY + (this.y * 1.5 - 0.5) * scene.height;
const opacity = this.opacity * this.opacityRandom * this.fadeInOpacity;
const grad = ctx.createRadialGradient(
drawX,
drawY,
0,
drawX,
drawY,
this.size,
);
addGSHelper(grad, this.color, opacity, 0, 1);
addGSHelper(grad, this.color, opacity, 0.8, 0.7);
addGSHelper(grad, this.color, opacity, 0.87, 0.5);
addGSHelper(grad, this.color, opacity, 0.93, 0.3);
addGSHelper(grad, this.color, opacity, 1, 0);
ctx.fillStyle = grad;
ctx.fillRect(
drawX - this.size,
drawY - this.size,
this.size * 2,
this.size * 2,
);
}
}
class VaultBackground extends CanvasAPI {
private items = new Set<Dot>();
private shakeVar = 0;
private dom?: HTMLElement;
shakeX = 0;
shakeY = 0;
constructor(canvas: HTMLCanvasElement) {
super(canvas);
for (let i = 0; i < 450; i++) {
if (i % 7 === 0) {
this.items.add(new Dot());
}
this.items.forEach((x) => x.update(true));
}
this.items.forEach((x) => x.fadeInOpacity = 0);
}
render(): void {
this.ctx.clearRect(0, 0, this.width, this.height);
this.items.forEach((x) => (x.update(false), x.render(this)));
if (this.shakeVar >= 0.0001) {
this.shakeVar *= 0.97 - 0.22 * this.shakeVar;
if (this.shakeVar >= 0.0001) {
this.shakeX = (Math.random() * 2 - 1) * this.shakeVar * 65;
this.shakeY = (Math.random() * 2 - 1) * this.shakeVar * 65;
if (this.dom) {
this.dom.style.transform =
`translate(${this.shakeX}px,${this.shakeY}px)`;
}
} else {
this.shakeX = 0;
this.shakeY = 0;
if (this.dom) this.dom.style.removeProperty("transform");
this.dom = undefined;
}
}
}
shake(dom?: HTMLElement | null) {
this.dom = dom || document.body;
this.shakeVar = 1;
}
}
// Binding code
let bg = new VaultBackground(canvas);
bg.startRenderLoop();
canvas.style.opacity = "0.2";
function onResize() {
bg.width = canvas.width = canvas.clientWidth;
bg.height = canvas.height = canvas.clientHeight;
}
window.addEventListener("resize", onResize);
onResize();
(globalThis as any).vault = bg;
return () => {
bg.stopRenderLoop();
window.removeEventListener("resize", onResize);
};
};

View file

@ -0,0 +1,197 @@
// Vibe coded with AI. Heavily tuned.
(globalThis as any).canvas_2020 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
// Rain effect with slanted lines
// Configuration interface for the rain effect
interface RainConfig {
fps: number; // frames per second
color: string; // color of rain particles
angle: number; // angle in degrees
particleDensity: number; // particles per 10000 pixels of canvas area
speed: number; // speed of particles (pixels per frame)
lineWidth: number; // thickness of rain lines
lineLength: number; // length of rain lines
}
// Rain particle interface
interface RainParticle {
x: number; // x position
y: number; // y position
}
// Default configuration
const config: RainConfig = {
fps: 16,
color: isStandalone ? "#00FEFB99" : "#081F24",
angle: -18,
particleDensity: 1,
speed: 400,
lineWidth: 8,
lineLength: 100,
};
// Get the canvas context
const ctx = canvas.getContext("2d");
if (!ctx) {
console.error("Could not get canvas context");
return () => {};
}
// Make canvas transparent
if (isStandalone) {
canvas.style.backgroundColor = "#0F252B";
} else {
canvas.style.backgroundColor = "transparent";
}
// Calculate canvas dimensions and update when resized
let width = canvas.width;
let height = canvas.height;
let particles: RainParticle[] = [];
let animationFrameId: number;
let lastFrameTime = 0;
const frameInterval = 1000 / config.fps;
// Calculate angle in radians
const angleRad = (config.angle * Math.PI) / 180;
// Update canvas dimensions and particle count when resized
const updateDimensions = () => {
width = canvas.width = canvas.offsetWidth;
height = canvas.height = canvas.offsetHeight;
// Calculate the canvas area in pixels
const canvasArea = width * height;
// Calculate target number of particles based on canvas area
const targetParticleCount = Math.floor(
(canvasArea / 10000) * config.particleDensity,
);
// Calculate buffer for horizontal offset due to slanted angle
const buffer = Math.abs(height * Math.tan(angleRad)) + config.lineLength;
// Adjust the particles array
if (particles.length < targetParticleCount) {
// Add more particles if needed
for (let i = particles.length; i < targetParticleCount; i++) {
particles.push(createParticle(true, buffer));
}
} else if (particles.length > targetParticleCount) {
// Remove excess particles
particles = particles.slice(0, targetParticleCount);
}
};
// Create a new particle
// Added initialDistribution parameter to distribute particles across the entire canvas at startup
const createParticle = (
initialDistribution = false,
buffer: number,
): RainParticle => {
// For initial distribution, place particles throughout the canvas
// Otherwise start them above the canvas
let x = Math.random() * (width + buffer * 2) - buffer;
let y;
if (initialDistribution) {
// Distribute across the entire canvas height for initial setup
y = Math.random() * (height + config.lineLength * 2) - config.lineLength;
} else {
// Start new particles from above the canvas with some randomization
y = -config.lineLength - (Math.random() * config.lineLength * 20);
}
return {
x,
y,
};
};
// Update particle positions
const updateParticles = () => {
// Calculate buffer for horizontal offset due to slanted angle
const buffer = Math.abs(height * Math.tan(angleRad)) + config.lineLength;
for (let i = 0; i < particles.length; i++) {
const p = particles[i];
// Update position based on speed and angle
p.x += Math.sin(angleRad) * config.speed;
p.y += Math.cos(angleRad) * config.speed;
// Reset particles that go offscreen - only determined by position
// Add extra buffer to ensure particles fully exit the visible area before resetting
if (
p.y > height + config.lineLength ||
p.x < -buffer ||
p.x > width + buffer
) {
particles[i] = createParticle(false, buffer);
}
}
};
// Draw particles
const drawParticles = () => {
// Clear canvas
ctx.clearRect(0, 0, width, height);
// Set drawing properties
ctx.strokeStyle = config.color;
ctx.lineWidth = config.lineWidth;
ctx.lineCap = "square";
// Draw each rain line
ctx.beginPath();
for (const p of particles) {
// Only draw particles that are either on screen or within a reasonable buffer
// This is for performance reasons - we don't need to draw particles far offscreen
if (p.y >= -config.lineLength * 2 && p.y <= height + config.lineLength) {
const endX = p.x + Math.sin(angleRad) * config.lineLength;
const endY = p.y + Math.cos(angleRad) * config.lineLength;
ctx.moveTo(p.x, p.y);
ctx.lineTo(endX, endY);
}
}
ctx.stroke();
};
// Animation loop
const animate = (currentTime: number) => {
animationFrameId = requestAnimationFrame(animate);
// Control frame rate
if (currentTime - lastFrameTime < frameInterval) {
return;
}
lastFrameTime = currentTime;
updateParticles();
drawParticles();
};
// Initialize the animation
const init = () => {
// Set up resize handler
window.addEventListener("resize", updateDimensions);
// Initial setup
updateDimensions();
// Start animation
lastFrameTime = performance.now();
animationFrameId = requestAnimationFrame(animate);
};
// Start the animation
init();
// Return cleanup function
return () => {
window.removeEventListener("resize", updateDimensions);
cancelAnimationFrame(animationFrameId);
};
};

View file

@ -0,0 +1,790 @@
// Initially vibe coded with AI, but a lot of tuning was done manually
// for it to feel natural. Some of the tuning was done through AI and some
// was manual. This implementation is not very performant and might get
// re-visited, but it runs mostly-fine, mostly in chromium.
//
// The parts that need improvement are how particles are computed. Those
// nested loops take way too long. 2d Canvas is fine for rendering. A
// good chance moving computation to WASM and rendering to JS would help.
(globalThis as any).canvas_2021 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
// Constants for simulation
const PARTICLE_RADIUS = 4.5;
const PARTICLE_DENSITY = 0.004; // Particles per pixel
const MIN_SPEED = 0.05;
const MAX_SPEED = 6.0;
const FRICTION = 0.96;
const REPULSION_STRENGTH = 0.1;
const REPULSION_RADIUS = 50;
const FORCE_RADIUS = 400; // Increased radius
const FORCE_STRENGTH = 0.25;
const FORCE_FALLOFF_EXPONENT = 3; // Higher value = sharper falloff
const FORCE_SPACING = 10; // Pixels between force points
const MIN_FORCE_STRENGTH = 0.05; // Minimum force strength for very slow movements
const MAX_FORCE_STRENGTH = 0.4; // Maximum force strength for fast movements
const MIN_SPEED_THRESHOLD = 1; // Movement speed (px/frame) that produces minimum force
const MAX_SPEED_THRESHOLD = 20; // Movement speed that produces maximum force
const OVERSCAN_PIXELS = 250;
const CELL_SIZE = REPULSION_RADIUS; // For spatial hashing
let globalOpacity = 0;
if (isStandalone) {
canvas.style.backgroundColor = "#301D02";
} else {
canvas.style.backgroundColor = "transparent";
}
// Interfaces
interface Particle {
x: number;
y: number;
vx: number;
vy: number;
charge: number; // 0 to 1, affecting color
}
interface Force {
x: number;
y: number;
dx: number;
dy: number;
strength: number;
radius: number;
createdAt: number;
}
interface SpatialHash {
[key: string]: Particle[];
}
// State
let first = true;
let particles: Particle[] = [];
let forces: Force[] = [];
let width = canvas.width;
let height = canvas.height;
let targetParticleCount = 0;
let spatialHash: SpatialHash = {};
let ctx: CanvasRenderingContext2D | null = null;
let animationId: number | null = null;
let isRunning = false;
// Mouse tracking
let lastMousePosition: { x: number; y: number } | null = null;
// Track position of the last created force
let lastForcePosition: { x: number; y: number } | null = null;
// Keep track of previous canvas dimensions for resize logic
let previousWidth = 0;
let previousHeight = 0;
// Initialize and cleanup
function init(): void {
ctx = canvas.getContext("2d");
if (!ctx) return;
// Set canvas to full size
resizeCanvas();
// Event listeners
window.addEventListener("resize", resizeCanvas);
document.addEventListener("mousemove", handleMouseMove);
// Start animation immediately
start();
}
function cleanup(): void {
// Stop the animation
stop();
// Remove event listeners
window.removeEventListener("resize", resizeCanvas);
document.removeEventListener("mousemove", handleMouseMove);
// Clear arrays
particles = [];
forces = [];
spatialHash = {};
lastMousePosition = null;
lastForcePosition = null;
}
// Resize canvas and adjust particle count
function resizeCanvas(): void {
// Store previous dimensions
previousWidth = width;
previousHeight = height;
// Update to new dimensions
width = window.innerWidth;
height = window.innerHeight;
canvas.width = width;
canvas.height = height;
const oldTargetCount = targetParticleCount;
targetParticleCount = Math.floor(width * height * PARTICLE_DENSITY);
// Adjust particle count
if (targetParticleCount > oldTargetCount) {
// Add more particles if needed, but only in newly available space
addParticles(targetParticleCount - oldTargetCount, !first);
first = false;
}
// Note: Removal of excess particles happens naturally during update
}
// Handle mouse movement
function handleMouseMove(e: MouseEvent): void {
const rect = canvas.getBoundingClientRect();
const currentX = e.clientX - rect.left;
const currentY = e.clientY - rect.top;
// Initialize positions if this is the first movement
if (!lastMousePosition || !lastForcePosition) {
lastMousePosition = { x: currentX, y: currentY };
lastForcePosition = { x: currentX, y: currentY };
return;
}
// Store current mouse position
const mouseX = currentX;
const mouseY = currentY;
// Calculate vector from last mouse position to current
const dx = mouseX - lastMousePosition.x;
const dy = mouseY - lastMousePosition.y;
const distMoved = Math.sqrt(dx * dx + dy * dy);
// Skip if essentially no movement (avoids numerical issues)
if (distMoved < 0.1) {
return;
}
// Get the vector from the last force to the current mouse position
const forceDx = mouseX - lastForcePosition.x;
const forceDy = mouseY - lastForcePosition.y;
const forceDistance = Math.sqrt(forceDx * forceDx + forceDy * forceDy);
// Only create forces if we've moved far enough from the last force
if (forceDistance >= FORCE_SPACING) {
// Calculate the direction vector from last force to current mouse
let dirX = forceDx / forceDistance;
let dirY = forceDy / forceDistance;
// Calculate how many force points to create
const numPoints = Math.floor(forceDistance / FORCE_SPACING);
// Calculate movement speed based on the recent movement
const movementSpeed = distMoved; // Simple approximation of speed
// Scale force strength based on movement speed
let speedFactor;
if (movementSpeed <= MIN_SPEED_THRESHOLD) {
speedFactor = MIN_FORCE_STRENGTH;
} else if (movementSpeed >= MAX_SPEED_THRESHOLD) {
speedFactor = MAX_FORCE_STRENGTH;
} else {
// Linear interpolation between min and max
const t = (movementSpeed - MIN_SPEED_THRESHOLD) /
(MAX_SPEED_THRESHOLD - MIN_SPEED_THRESHOLD);
speedFactor = MIN_FORCE_STRENGTH +
t * (MAX_FORCE_STRENGTH - MIN_FORCE_STRENGTH);
}
// Store current force position to update incrementally
let currentForceX = lastForcePosition.x;
let currentForceY = lastForcePosition.y;
// Create evenly spaced force points along the path from last force to current mouse
for (let i = 0; i < numPoints; i++) {
// Calculate position for this force point
const t = (i + 1) / numPoints;
const fx = lastForcePosition.x + forceDx * t;
const fy = lastForcePosition.y + forceDy * t;
// Create force at this position with the direction vector
createForce(fx, fy, dirX, dirY, speedFactor);
// Update the last force position to this new force
currentForceX = fx;
currentForceY = fy;
}
// Update the last force position
lastForcePosition = { x: currentForceX, y: currentForceY };
}
// Always update the last mouse position
lastMousePosition = { x: mouseX, y: mouseY };
}
// Create a new force
function createForce(
x: number,
y: number,
dx: number,
dy: number,
strength = FORCE_STRENGTH,
): void {
forces.push({
x,
y,
dx,
dy,
strength,
radius: 1,
createdAt: Date.now(),
});
}
// Improved particle addition with fill strategy options
function addParticles(count: number, inNewAreaOnly: boolean = false): void {
// Determine available space
const minX = -OVERSCAN_PIXELS;
const maxX = width + OVERSCAN_PIXELS;
const minY = -OVERSCAN_PIXELS;
const maxY = height + OVERSCAN_PIXELS;
// Use a grid system that guarantees uniform spacing of particles
const gridSpacing = REPULSION_RADIUS * 0.8; // Slightly less than repulsion radius
const gridWidth = Math.ceil((maxX - minX) / gridSpacing);
const gridHeight = Math.ceil((maxY - minY) / gridSpacing);
// Track which grid cells are already occupied
const occupiedCells: Set<string> = new Set();
// Mark cells occupied by existing particles
for (const particle of particles) {
const cellX = Math.floor((particle.x - minX) / gridSpacing);
const cellY = Math.floor((particle.y - minY) / gridSpacing);
// Ensure cell coordinates are within valid range
if (cellX >= 0 && cellX < gridWidth && cellY >= 0 && cellY < gridHeight) {
occupiedCells.add(`${cellX},${cellY}`);
}
}
// Create arrays of all cells and filter by placement strategy
const allGridCells: { x: number; y: number }[] = [];
for (let cellY = 0; cellY < gridHeight; cellY++) {
for (let cellX = 0; cellX < gridWidth; cellX++) {
const cellKey = `${cellX},${cellY}`;
if (!occupiedCells.has(cellKey)) {
const posX = minX + (cellX + 0.5) * gridSpacing;
const posY = minY + (cellY + 0.5) * gridSpacing;
// For new area only placement, filter to expanded areas
if (inNewAreaOnly && previousWidth > 0 && previousHeight > 0) {
const expandedRight = width > previousWidth;
const expandedBottom = height > previousHeight;
const inNewRightArea = expandedRight && posX >= previousWidth &&
posX <= width;
const inNewBottomArea = expandedBottom && posY >= previousHeight &&
posY <= height;
if (inNewRightArea || inNewBottomArea) {
allGridCells.push({ x: cellX, y: cellY });
}
} else if (!inNewAreaOnly) {
// Standard placement - add all valid cells
allGridCells.push({ x: cellX, y: cellY });
}
}
}
}
if (allGridCells.length == 0) {
throw new Error("No cells available to place particles");
}
// We now have all grid cells that match our placement criteria
// If we need more particles than we have available cells, we need to adjust
// gridSpacing to fit more cells into the same space
if (count > allGridCells.length) {
// Retry with a smaller grid spacing
// Proportionally reduce the grid spacing to fit the required number of particles
const scaleFactor = Math.sqrt(allGridCells.length / count);
const newGridSpacing = gridSpacing * scaleFactor;
// Clear particles and try again with new spacing
// This is a recursive call, but with adjusted parameters that will fit
return addParticlesWithCustomSpacing(
count,
inNewAreaOnly,
newGridSpacing,
);
}
// Shuffle the available cells for random selection
shuffleArray(allGridCells);
// Take the number of cells we need
const cellsToUse = Math.min(count, allGridCells.length);
const selectedCells = allGridCells.slice(0, cellsToUse);
// Create particles in selected cells
for (const cell of selectedCells) {
// Add jitter within the cell for natural look
const jitterX = (Math.random() - 0.5) * gridSpacing * 0.8;
const jitterY = (Math.random() - 0.5) * gridSpacing * 0.8;
// Calculate final position
const x = minX + (cell.x + 0.5) * gridSpacing + jitterX;
const y = minY + (cell.y + 0.5) * gridSpacing + jitterY;
// Create a particle at this position
particles.push(createParticle(x, y));
}
}
// Helper function to add particles with custom grid spacing
function addParticlesWithCustomSpacing(
count: number,
inNewAreaOnly: boolean,
gridSpacing: number,
): void {
if (gridSpacing == 0) throw new Error("Grid spacing is 0");
// Determine available space
const minX = -OVERSCAN_PIXELS;
const maxX = width + OVERSCAN_PIXELS;
const minY = -OVERSCAN_PIXELS;
const maxY = height + OVERSCAN_PIXELS;
// Create grid using the custom spacing
const gridWidth = Math.ceil((maxX - minX) / gridSpacing);
const gridHeight = Math.ceil((maxY - minY) / gridSpacing);
// Track which grid cells are already occupied
const occupiedCells: Set<string> = new Set();
// Mark cells occupied by existing particles
for (const particle of particles) {
const cellX = Math.floor((particle.x - minX) / gridSpacing);
const cellY = Math.floor((particle.y - minY) / gridSpacing);
// Ensure cell coordinates are within valid range
if (cellX >= 0 && cellX < gridWidth && cellY >= 0 && cellY < gridHeight) {
occupiedCells.add(`${cellX},${cellY}`);
}
}
// Create arrays of all cells and filter by placement strategy
const allGridCells: { x: number; y: number }[] = [];
for (let cellY = 0; cellY < gridHeight; cellY++) {
for (let cellX = 0; cellX < gridWidth; cellX++) {
const cellKey = `${cellX},${cellY}`;
if (!occupiedCells.has(cellKey)) {
const posX = minX + (cellX + 0.5) * gridSpacing;
const posY = minY + (cellY + 0.5) * gridSpacing;
// For new area only placement, filter to expanded areas
if (inNewAreaOnly && previousWidth > 0 && previousHeight > 0) {
const expandedRight = width > previousWidth;
const expandedBottom = height > previousHeight;
const inNewRightArea = expandedRight && posX >= previousWidth &&
posX <= width;
const inNewBottomArea = expandedBottom && posY >= previousHeight &&
posY <= height;
if (inNewRightArea || inNewBottomArea) {
allGridCells.push({ x: cellX, y: cellY });
}
} else if (!inNewAreaOnly) {
// Standard placement - add all valid cells
allGridCells.push({ x: cellX, y: cellY });
}
}
}
}
// Shuffle the available cells for random distribution
shuffleArray(allGridCells);
// Take the number of cells we need (or all if we have fewer)
const cellsToUse = Math.min(count, allGridCells.length);
// Create particles in selected cells
for (let i = 0; i < cellsToUse; i++) {
const cell = allGridCells[i];
// Add jitter within the cell
const jitterX = (Math.random() - 0.5) * gridSpacing * 0.8;
const jitterY = (Math.random() - 0.5) * gridSpacing * 0.8;
// Calculate final position
const x = minX + (cell.x + 0.5) * gridSpacing + jitterX;
const y = minY + (cell.y + 0.5) * gridSpacing + jitterY;
// Create a particle at this position
particles.push(createParticle(x, y));
}
}
// Utility to shuffle an array (Fisher-Yates algorithm)
function shuffleArray<T>(array: T[]): void {
for (let i = array.length - 1; i > 0; i--) {
const j = Math.floor(Math.random() * (i + 1));
[array[i], array[j]] = [array[j], array[i]];
}
}
// Simplified createParticle function that just places at a specific position
function createParticle(x: number, y: number): Particle {
return {
x: x + (Math.random() * 4 - 2),
y: y + (Math.random() * 4 - 2),
vx: 0,
vy: 0,
charge: 0,
};
}
// Function to create a particle on one of the edges
function createParticleOnEdge(): Particle {
// Overscan bounds with fixed pixel size
const minX = -OVERSCAN_PIXELS;
const maxX = width + OVERSCAN_PIXELS;
const minY = -OVERSCAN_PIXELS;
const maxY = height + OVERSCAN_PIXELS;
let x: number, y: number;
// Place on one of the edges
const edge = Math.floor(Math.random() * 4);
switch (edge) {
case 0: // Top
x = minX + Math.random() * (maxX - minX);
y = minY;
break;
case 1: // Right
x = maxX;
y = minY + Math.random() * (maxY - minY);
break;
case 2: // Bottom
x = minX + Math.random() * (maxX - minX);
y = maxY;
break;
case 3: // Left
x = minX;
y = minY + Math.random() * (maxY - minY);
break;
default:
x = minX + Math.random() * (maxX - minX);
y = minY + Math.random() * (maxY - minY);
}
return createParticle(x, y);
}
// Spatial hashing functions
function getHashKey(x: number, y: number): string {
const cellX = Math.floor(x / CELL_SIZE);
const cellY = Math.floor(y / CELL_SIZE);
return `${cellX},${cellY}`;
}
function addToSpatialHash(particle: Particle): void {
const key = getHashKey(particle.x, particle.y);
if (!spatialHash[key]) {
spatialHash[key] = [];
}
spatialHash[key].push(particle);
}
function updateSpatialHash(): void {
// Clear previous hash
spatialHash = {};
// Add all particles to hash
for (const particle of particles) {
addToSpatialHash(particle);
}
}
function getNearbyParticles(
x: number,
y: number,
radius: number,
): Particle[] {
const result: Particle[] = [];
const cellRadius = Math.ceil(radius / CELL_SIZE);
const centerCellX = Math.floor(x / CELL_SIZE);
const centerCellY = Math.floor(y / CELL_SIZE);
for (
let cellX = centerCellX - cellRadius;
cellX <= centerCellX + cellRadius;
cellX++
) {
for (
let cellY = centerCellY - cellRadius;
cellY <= centerCellY + cellRadius;
cellY++
) {
const key = `${cellX},${cellY}`;
const cell = spatialHash[key];
if (cell) {
result.push(...cell);
}
}
}
return result;
}
// Main update function
function update(): void {
const now = Date.now();
// Fixed pixel overscan
const minX = -OVERSCAN_PIXELS;
const maxX = width + OVERSCAN_PIXELS;
const minY = -OVERSCAN_PIXELS;
const maxY = height + OVERSCAN_PIXELS;
// Update spatial hash
updateSpatialHash();
// Update forces and remove expired ones
if (forces.length > 40) {
forces = forces.slice(-40);
}
forces = forces.filter((force) => {
force.strength *= 0.95;
force.radius *= 0.95;
return force.strength > 0.001;
});
// Update particles
const newParticles: Particle[] = [];
for (const particle of particles) {
// Apply forces
for (const force of forces) {
const dx = particle.x - force.x;
const dy = particle.y - force.y;
const distSq = dx * dx + dy * dy;
const radius = force.radius * FORCE_RADIUS;
if (distSq < radius * radius) {
const dist = Math.sqrt(distSq);
// Exponential falloff - much more concentrated at center
// (1 - x/R)^n where n controls how sharp the falloff is
const normalizedDist = dist / radius;
const factor = Math.pow(1 - normalizedDist, FORCE_FALLOFF_EXPONENT);
// Calculate force line projection for directional effect
// This makes particles along the force's path experience stronger effect
const dotProduct = (dx * -force.dx) + (dy * -force.dy);
const projectionFactor = Math.max(0, dotProduct / dist);
// Apply the combined factors - stronger directional bias
const finalFactor = factor * force.strength *
(0.1 + 0.9 * projectionFactor);
particle.vx += force.dx * finalFactor;
particle.vy += force.dy * finalFactor;
// charge for the first 100ms
if ((now - force.createdAt) < 100) {
particle.charge = Math.min(
1,
particle.charge + (finalFactor * finalFactor) * 0.2,
);
}
}
}
// Apply repulsion from nearby particles
const nearby = getNearbyParticles(
particle.x,
particle.y,
REPULSION_RADIUS,
);
for (const other of nearby) {
if (other === particle) continue;
const dx = particle.x - other.x;
const dy = particle.y - other.y;
const distSq = dx * dx + dy * dy;
if (distSq < REPULSION_RADIUS * REPULSION_RADIUS && distSq > 0) {
const dist = Math.sqrt(distSq);
const factor = REPULSION_STRENGTH * (1 - dist / REPULSION_RADIUS);
const fx = dx / dist * factor;
const fy = dy / dist * factor;
particle.vx += fx;
particle.vy += fy;
}
}
// Apply friction
particle.vx *= FRICTION;
particle.vy *= FRICTION;
// Ensure minimum speed
const speed = Math.sqrt(
particle.vx * particle.vx + particle.vy * particle.vy,
);
if (speed < MIN_SPEED && speed > 0) {
const scale = MIN_SPEED / speed;
particle.vx *= scale;
particle.vy *= scale;
}
// Cap at maximum speed
if (speed > MAX_SPEED) {
const scale = MAX_SPEED / speed;
particle.vx *= scale;
particle.vy *= scale;
}
// Update position
particle.x += particle.vx;
particle.y += particle.vy;
// Decrease charge
particle.charge *= 0.99;
// Check if particle is within extended bounds
if (
particle.x >= minX && particle.x <= maxX &&
particle.y >= minY && particle.y <= maxY
) {
// If outside screen but within overscan, keep it if we need more particles
if (
(particle.x < 0 || particle.x > width ||
particle.y < 0 || particle.y > height) &&
newParticles.length >= targetParticleCount
) {
continue;
}
newParticles.push(particle);
} else {
// Out of bounds, respawn if needed
if (newParticles.length < targetParticleCount) {
newParticles.push(createParticleOnEdge());
}
}
}
// Add more particles if needed
while (newParticles.length < targetParticleCount) {
newParticles.push(createParticleOnEdge());
}
particles = newParticles;
}
// Render function
const mul = isStandalone ? 0.9 : 0.5;
const add = isStandalone ? 0.1 : 0.03;
function render(): void {
if (!ctx) return;
// Clear canvas
ctx.clearRect(0, 0, width, height);
// Draw particles
for (const particle of particles) {
// Only draw if within canvas bounds (plus a small margin)
if (
particle.x >= -PARTICLE_RADIUS &&
particle.x <= width + PARTICLE_RADIUS &&
particle.y >= -PARTICLE_RADIUS && particle.y <= height + PARTICLE_RADIUS
) {
ctx.beginPath();
ctx.arc(particle.x, particle.y, PARTICLE_RADIUS, 0, Math.PI * 2);
// Color based on charge
ctx.fillStyle = "#FFCB1F";
ctx.globalAlpha = (particle.charge * mul + add) * globalOpacity;
ctx.fill();
}
}
// // Debug: Draw forces and falloff visualization
// if (ctx) {
// for (const force of forces) {
// const R = force.radius * FORCE_RADIUS;
// // Draw force point
// ctx.beginPath();
// ctx.arc(force.x, force.y, 5, 0, Math.PI * 2);
// ctx.fillStyle = 'rgba(255, 0, 0, 0.5)';
// ctx.fill();
// // Draw force direction
// ctx.beginPath();
// ctx.moveTo(force.x, force.y);
// ctx.lineTo(force.x + force.dx * 20, force.y + force.dy * 20);
// ctx.strokeStyle = 'red';
// ctx.stroke();
// // Visualize the falloff curve with rings
// for (let i = 0; i <= 10; i++) {
// const radius = (R * i) / 10;
// const normalizedDist = radius / R;
// const intensity = Math.pow(1 - normalizedDist, FORCE_FALLOFF_EXPONENT);
// ctx.beginPath();
// ctx.arc(force.x, force.y, radius, 0, Math.PI * 2);
// ctx.strokeStyle = `rgba(255, 0, 0, ${intensity * 0.2})`;
// ctx.stroke();
// }
// }
// }
}
// Animation loop
let r = Math.random();
function animate(): void {
globalOpacity = Math.min(1, globalOpacity + 0.03);
update();
render();
if (isRunning) {
animationId = requestAnimationFrame(animate);
}
}
// Start/stop functions
function start(): void {
if (isRunning) return;
// Calculate target particle count based on canvas size
targetParticleCount = Math.floor(width * height * PARTICLE_DENSITY);
// Clear any existing particles and create new ones with proper spacing
particles = [];
addParticles(targetParticleCount);
isRunning = true;
animate();
}
function stop(): void {
isRunning = false;
if (animationId !== null) {
cancelAnimationFrame(animationId);
animationId = null;
}
}
init();
return cleanup;
};

View file

@ -0,0 +1,161 @@
// Written by AI. Options tuned with AI.
(globalThis as any).canvas_2022 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
// Configuration for the grid of rotating squares
const config = {
gridRotation: 20, // Overall grid rotation in degrees
squareSize: 20, // Size of each square
spacing: 100, // Distance between square centers
moveSpeedX: 0.01, // Horizontal movement speed (pixels per second)
moveSpeedY: 0.01, // Vertical movement speed (pixels per second)
squareColor: "#00220A", // Color of the squares
squareOpacity: 1, // Opacity of the squares
// Function to determine square rotation based on its coordinates and time
// Can be adjusted for different patterns
rotationFunction: (x: number, y: number, time: number): number => {
// Combination of spatial wave and time-based rotation
return Math.sin(x * 0.05) * Math.cos(y * 0.05) * 180;
},
};
// Convert grid rotation to radians
const gridRotationRad = (config.gridRotation * Math.PI) / 180;
// Get the canvas context
const ctx = canvas.getContext("2d");
if (!ctx) {
console.error("Could not get canvas context");
return () => {};
}
// Make canvas transparent
if (isStandalone) {
canvas.style.backgroundColor = "#154226";
} else {
canvas.style.backgroundColor = "transparent";
}
// Animation variables
let width = canvas.width;
let height = canvas.height;
let offsetX = 0;
let offsetY = 0;
let time = 0;
let animationFrameId: number;
let lastTime = 0;
// Update canvas dimensions when resized
const updateDimensions = () => {
width = canvas.width = canvas.clientWidth;
height = canvas.height = canvas.clientHeight;
};
// Calculate the diagonal length of the canvas (to ensure rotation covers corners)
const calculateDiagonal = () => {
return Math.sqrt(width * width + height * height);
};
// Draw a single square with rotation
const drawSquare = (x: number, y: number, size: number, rotation: number) => {
ctx.save();
// Move to the center of the square position, rotate, then draw
ctx.translate(x, y);
ctx.rotate((rotation * Math.PI) / 180); // Convert rotation degrees to radians
// Draw square centered at position
ctx.fillRect(-size / 2, -size / 2, size, size);
ctx.restore();
};
// Draw the entire grid of squares
const drawGrid = () => {
ctx.clearRect(0, 0, width, height);
// Set drawing properties
ctx.fillStyle = config.squareColor;
ctx.globalAlpha = config.squareOpacity;
// Save the current transformation state
ctx.save();
// Move to the center of the canvas, rotate the grid, then move back
const centerX = width / 2;
const centerY = height / 2;
ctx.translate(centerX, centerY);
ctx.rotate(gridRotationRad);
// Calculate how much of the grid to draw based on canvas size
const diagonal = calculateDiagonal();
const gridSize = Math.ceil(diagonal / config.spacing) + 2;
// Adjust for offset to create movement
const adjustedOffsetX = offsetX % config.spacing;
const adjustedOffsetY = offsetY % config.spacing;
// Draw grid with enough squares to cover the rotated canvas
const halfGrid = Math.ceil(gridSize / 2);
for (let y = -halfGrid; y <= halfGrid; y++) {
for (let x = -halfGrid; x <= halfGrid; x++) {
// Calculate actual position with offset
const posX = x * config.spacing + adjustedOffsetX;
const posY = y * config.spacing + adjustedOffsetY;
// Calculate square rotation based on its position and time
const squareRotation = config.rotationFunction(posX, posY, time);
// Draw the square
drawSquare(posX, posY, config.squareSize, squareRotation);
}
}
// Restore the transformation state
ctx.restore();
// Reset global alpha
ctx.globalAlpha = 1.0;
};
// Animation loop
const animate = (currentTime: number) => {
animationFrameId = requestAnimationFrame(animate);
// Calculate time elapsed since last frame
const elapsed = currentTime - lastTime;
lastTime = currentTime;
// Update time variable for rotation function
time += elapsed;
// Update position offsets for movement
offsetX += config.moveSpeedX * elapsed;
offsetY += config.moveSpeedY * elapsed;
// Draw the grid
drawGrid();
};
// Initialize the animation
const init = () => {
// Set up resize handler
window.addEventListener("resize", updateDimensions);
// Initial setup
updateDimensions();
// Start animation
animationFrameId = requestAnimationFrame(animate);
};
// Start the animation
init();
// Return cleanup function
return () => {
window.removeEventListener("resize", updateDimensions);
cancelAnimationFrame(animationFrameId);
};
};

View file

@ -0,0 +1,199 @@
// Partially vibe coded. A lot of manually tuning with the heart scale function
// and how the mouse interacts with the hearts.
(globalThis as any).canvas_2023 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
const config = {
heartBaseSize: 50,
heartMaxSize: 100,
spacing: 150,
rowSpeed: 0.1,
heartColor: "#FF90D9",
heartOpacity: isStandalone ? 0.5 : 0.04,
mouseInfluenceRadius: 1000,
heartScaleFunction: (distance: number, radius: number): number => {
if (distance > radius) return 1;
const normalizedDistance = distance / radius;
const scaleFactor = 1 +
(1 - normalizedDistance) *
(config.heartMaxSize / config.heartBaseSize - 1);
return 1 + (scaleFactor - 1) * Math.pow(1 - normalizedDistance, 2);
},
};
const heart = new Path2D(
"M23.9451 45.3973L20.8672 42.6493C16.9551 39.0174 13.7054 35.8927 11.1181 33.275C8.53056 30.6574 6.46731 28.286 4.92839 26.1608C3.38946 24.0356 2.31772 22.1028 1.71314 20.3624C1.10856 18.6219 0.806274 16.8705 0.806274 15.1081C0.806274 11.4718 2.03118 8.42016 4.481 5.95312C6.93118 3.48608 9.93831 2.25256 13.5024 2.25256C15.5649 2.25256 17.482 2.70142 19.2536 3.59912C21.0255 4.49682 22.5893 5.80674 23.9451 7.52887C25.484 5.73346 27.1059 4.40522 28.8108 3.54416C30.5161 2.6831 32.3751 2.25256 34.3877 2.25256C38.0141 2.25256 41.0551 3.48663 43.5108 5.95477C45.9661 8.42291 47.1938 11.4758 47.1938 15.1136C47.1938 16.8712 46.8823 18.6115 46.2594 20.3343C45.6365 22.0568 44.5648 23.9807 43.0442 26.1059C41.5236 28.231 39.4721 30.6136 36.8896 33.2536C34.3068 35.8936 31.0362 39.0255 27.0779 42.6493L23.9451 45.3973ZM23.9176 38.802C27.6088 35.431 30.6339 32.5547 32.9928 30.173C35.3518 27.7913 37.2091 25.7211 38.5648 23.9624C39.9205 22.2036 40.864 20.6137 41.3953 19.1928C41.9266 17.7715 42.1923 16.4101 42.1923 15.1086C42.1923 12.8768 41.4529 11.0098 39.974 9.50748C38.4952 8.0052 36.6461 7.25406 34.4268 7.25406C32.631 7.25406 30.9572 7.6811 29.4055 8.87193C27.8537 10.0628 25.5389 13.0434 25.5389 13.0434L23.9451 15.3299L22.3512 13.0434C22.3512 13.0434 20.0643 10.2311 18.4638 9.04031C16.8634 7.84948 15.2194 7.25406 13.4991 7.25406C11.2929 7.25406 9.46857 7.98816 8.02602 9.45637C6.58383 10.9246 5.86273 12.8162 5.86273 15.1311C5.86273 16.4784 6.13644 17.8679 6.68386 19.2994C7.23127 20.731 8.18394 22.3333 9.54185 24.1064C10.8998 25.879 12.7329 27.9562 15.0413 30.3379C17.3497 32.7196 20.3084 35.5409 23.9176 38.802Z",
);
const ctx = canvas.getContext("2d");
if (!ctx) {
console.error("Could not get canvas context");
return () => {};
}
if (isStandalone) {
canvas.style.backgroundColor = "#2F1C21";
} else {
canvas.style.backgroundColor = "transparent";
}
let width = canvas.width;
let height = canvas.height;
let animationFrameId: number;
let lastFrameTime = 0;
let mouseX = width / 2;
let mouseY = height / 2;
let offset = config.spacing / 2;
const updateDimensions = () => {
width = canvas.width = canvas.clientWidth;
height = canvas.height = canvas.clientHeight;
mouseX = width / 2;
mouseY = height / 2;
};
const drawHeart = (x: number, y: number, size: number) => {
const scale = size / 30;
ctx.save();
ctx.translate(x, y);
ctx.scale(scale, scale);
ctx.fillStyle = config.heartColor;
ctx.fill(heart);
ctx.restore();
};
const c = 400;
const h = 40;
const k = solveForK(c, h);
const drawHeartGrid = () => {
ctx.clearRect(0, 0, width, height);
ctx.globalAlpha = config.heartOpacity;
const numRows = Math.ceil(height / config.spacing) + 1;
for (let row = 0; row < numRows; row++) {
const direction = row % 2 === 0 ? 1 : -1;
const rowOffset = (offset * direction) % config.spacing;
const posYInit = row * config.spacing + config.spacing / 2;
for (
let posXInit = -config.spacing + rowOffset;
posXInit < width + config.spacing;
posXInit += config.spacing
) {
const dx = (posXInit + config.heartBaseSize / 2) - mouseX;
const dy = (posYInit + config.heartBaseSize / 2) - mouseY;
const distance = Math.sqrt(dx * dx + dy * dy);
const pushIntensity = asymmetricBump(distance, h, c, k, 0.00002);
const pushAngle = Math.atan2(dy, dx);
const pushDistanceX = pushIntensity * Math.cos(pushAngle);
const pushDistanceY = pushIntensity * Math.sin(pushAngle);
const posX = posXInit + pushDistanceX * 1;
const posY = posYInit + pushDistanceY * 2;
const scaleFactor = config.heartScaleFunction(
distance,
config.mouseInfluenceRadius,
);
const heartSize = config.heartBaseSize * scaleFactor;
if (
posX > -config.heartMaxSize &&
posX < width + config.heartMaxSize &&
posY > -config.heartMaxSize &&
posY < height + config.heartMaxSize
) {
drawHeart(posX - heartSize / 2, posY - heartSize / 2, heartSize);
}
}
}
ctx.globalAlpha = 1.0;
};
function solveForK(c: number, k: number) {
// input -> f(x)=h*e^{(-k*(x-c)^{2})}
// desired result is (0, 0.45). (0, 0) is unsolvable but 0.45px will round down to 0.
//
// solution: -\frac{\ln\left(\frac{0.45}{h}\right)}{c^{2}}
return -Math.log(0.45 / h) / (c * c);
}
function asymmetricBump(
x: number,
h: number,
c: number,
leftK: number,
rightK: number,
) {
const k = (x <= c) ? leftK : rightK;
return h * Math.exp(-k * Math.pow(x - c, 2));
}
const updateOffset = (elapsed: number) => {
offset += config.rowSpeed * elapsed;
if (offset > 1000000) {
offset -= 1000000;
}
};
const animate = (currentTime: number) => {
animationFrameId = requestAnimationFrame(animate);
const elapsed = currentTime - lastFrameTime;
lastFrameTime = currentTime;
updateOffset(elapsed * 0.05);
drawHeartGrid();
};
const handleMouseMove = (event: MouseEvent) => {
const rect = canvas.getBoundingClientRect();
mouseX = event.clientX - rect.left;
mouseY = event.clientY - rect.top;
};
const handleTouchMove = (event: TouchEvent) => {
if (event.touches.length > 0) {
event.preventDefault();
const rect = canvas.getBoundingClientRect();
mouseX = event.touches[0].clientX - rect.left;
mouseY = event.touches[0].clientY - rect.top;
}
};
const init = () => {
window.addEventListener("resize", updateDimensions);
document.addEventListener("mousemove", handleMouseMove);
document.addEventListener("touchmove", handleTouchMove, { passive: false });
updateDimensions();
lastFrameTime = performance.now();
animationFrameId = requestAnimationFrame(animate);
};
init();
return () => {
window.removeEventListener("resize", updateDimensions);
document.removeEventListener("mousemove", handleMouseMove);
document.removeEventListener("touchmove", handleTouchMove);
cancelAnimationFrame(animationFrameId);
};
};

View file

@ -0,0 +1,251 @@
// Vibe coded with AI, manually tuned randomness shader + opacity.
(globalThis as any).canvas_2024 = function (canvas: HTMLCanvasElement) {
const isStandalone = canvas.getAttribute("data-standalone") === "true";
if (isStandalone) {
canvas.parentElement!.style.backgroundColor = "black";
}
const gl = canvas.getContext("webgl", {
alpha: true,
premultipliedAlpha: false,
});
if (!gl) {
console.error("WebGL not supported");
return () => {};
}
canvas.style.imageRendering = "pixelated";
canvas.style.opacity = isStandalone ? "0.3" : "0.15";
// Resize canvas to match display size
const resize = () => {
const displayWidth = Math.floor(
(canvas.clientWidth || window.innerWidth) / 3,
);
const displayHeight = Math.floor(
(canvas.clientHeight || window.innerHeight) / 3,
);
if (canvas.width !== displayWidth || canvas.height !== displayHeight) {
canvas.width = displayWidth;
canvas.height = displayHeight;
gl.viewport(0, 0, canvas.width, canvas.height);
}
};
resize();
// Vertex shader (just passes coordinates)
const vertexShaderSource = `
attribute vec2 a_position;
void main() {
gl_Position = vec4(a_position, 0.0, 1.0);
}
`;
// Fragment shader creates random noise with higher opacity to ensure visibility
const fragmentShaderSource = `
precision mediump float;
uniform float u_time;
float noise1(float seed1,float seed2){
return(
fract(seed1+12.34567*
fract(100.*(abs(seed1*0.91)+seed2+94.68)*
fract((abs(seed2*0.41)+45.46)*
fract((abs(seed2)+757.21)*
fract(seed1*0.0171))))))
* 1.0038 - 0.00185;
}
float n(float seed1, float seed2, float seed3){
float buff1 = abs(seed1+100.81) + 1000.3;
float buff2 = abs(seed2+100.45) + 1000.2;
float buff3 = abs(noise1(seed1, seed2)+seed3) + 1000.1;
buff1 = (buff3*fract(buff2*fract(buff1*fract(buff2*0.146))));
buff2 = (buff2*fract(buff2*fract(buff1+buff2*fract(buff3*0.52))));
buff1 = noise1(buff1, buff2);
return(buff1);
}
void main() {
float noise = n(gl_FragCoord.x, gl_FragCoord.y, u_time);
gl_FragColor = vec4(1.0, 0.7, 0.7, 0.8*noise);
}
`;
// Create and compile shaders
const vertexShader = createShader(gl, gl.VERTEX_SHADER, vertexShaderSource);
const fragmentShader = createShader(
gl,
gl.FRAGMENT_SHADER,
fragmentShaderSource,
);
// Check if shader creation failed
if (!vertexShader || !fragmentShader) {
console.error("Failed to create shaders");
return () => {};
}
// Create program and link shaders
const program = createProgram(gl, vertexShader, fragmentShader);
// Check if program creation failed
if (!program) {
console.error("Failed to create program");
return () => {};
}
// Get attribute and uniform locations
const positionAttributeLocation = gl.getAttribLocation(program, "a_position");
const timeUniformLocation = gl.getUniformLocation(program, "u_time");
// Create a position buffer for a rectangle covering the entire canvas
const positionBuffer = gl.createBuffer();
if (!positionBuffer) {
console.error("Failed to create position buffer");
return () => {};
}
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
// Rectangle that covers the entire clip space
const positions = [
-1.0,
-1.0, // bottom left
1.0,
-1.0, // bottom right
-1.0,
1.0, // top left
1.0,
1.0, // top right
];
gl.bufferData(gl.ARRAY_BUFFER, new Float32Array(positions), gl.STATIC_DRAW);
// Set up blending
gl.enable(gl.BLEND);
gl.blendFunc(gl.SRC_ALPHA, gl.ONE_MINUS_SRC_ALPHA);
// Fixed 24 FPS timing
const FPS = 24;
const FRAME_TIME = 1000 / FPS; // ms per frame
// Handle animation
let animationTimerId: number;
let startTime = Date.now();
let lastFrameTime = 0;
const render = () => {
// Get current time
const currentTime = Date.now();
const deltaTime = currentTime - lastFrameTime;
// Skip frame if it's too early (maintain 24 FPS)
if (deltaTime < FRAME_TIME) {
animationTimerId = window.setTimeout(render, 0); // Check again ASAP but yield to browser
return;
}
// Update last frame time, accounting for any drift
lastFrameTime = currentTime - (deltaTime % FRAME_TIME);
// Resize canvas if needed
resize();
// Calculate elapsed time in seconds for animation
const elapsedTime = (currentTime - startTime) / 1000;
// Clear the canvas with transparent black
gl.clearColor(0, 0, 0, 0);
gl.clear(gl.COLOR_BUFFER_BIT);
// Use our shader program
gl.useProgram(program);
// Set up the position attribute
gl.enableVertexAttribArray(positionAttributeLocation);
gl.bindBuffer(gl.ARRAY_BUFFER, positionBuffer);
gl.vertexAttribPointer(
positionAttributeLocation,
2, // 2 components per vertex
gl.FLOAT, // data type
false, // normalize
0, // stride (0 = compute from size and type)
0, // offset
);
// Update time uniform for animation
gl.uniform1f(timeUniformLocation, elapsedTime);
// Draw the rectangle (2 triangles)
gl.drawArrays(gl.TRIANGLE_STRIP, 0, 4);
// Schedule next frame (aiming for 24 FPS)
const timeToNextFrame = Math.max(
0,
FRAME_TIME - (Date.now() - currentTime),
);
animationTimerId = window.setTimeout(render, timeToNextFrame);
};
// Helper function to create shaders
function createShader(
gl: WebGLRenderingContext,
type: number,
source: string,
): WebGLShader | null {
const shader = gl.createShader(type);
if (!shader) {
console.error("Failed to create shader object");
return null;
}
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
console.error("Shader compilation error:", gl.getShaderInfoLog(shader));
gl.deleteShader(shader);
return null;
}
return shader;
}
// Helper function to create program and link shaders
function createProgram(
gl: WebGLRenderingContext,
vertexShader: WebGLShader,
fragmentShader: WebGLShader,
): WebGLProgram | null {
const program = gl.createProgram();
if (!program) {
console.error("Failed to create program object");
return null;
}
gl.attachShader(program, vertexShader);
gl.attachShader(program, fragmentShader);
gl.linkProgram(program);
if (!gl.getProgramParameter(program, gl.LINK_STATUS)) {
console.error("Program linking error:", gl.getProgramInfoLog(program));
return null;
}
return program;
}
// Start the rendering with initial timestamp
lastFrameTime = Date.now();
render();
// Return cleanup function
return () => {
clearTimeout(animationTimerId);
if (program) gl.deleteProgram(program);
if (vertexShader) gl.deleteShader(vertexShader);
if (fragmentShader) gl.deleteShader(fragmentShader);
if (positionBuffer) gl.deleteBuffer(positionBuffer);
};
};

View file

@ -0,0 +1,363 @@
// This canvas was written partially by AI
// @ts-ignore
globalThis.canvas_cotyledon = function (
canvas: HTMLCanvasElement,
panel: HTMLElement,
) {
let running = true;
const ctx = canvas.getContext("2d");
function resizeCanvas() {
canvas.width = canvas.clientWidth;
canvas.height = canvas.clientHeight;
}
resizeCanvas();
window.addEventListener("resize", resizeCanvas);
const clover = new Path2D(
"M18.9845 34.4839C20.4004 34.5218 21.8336 34.6883 23.2644 34.9578C20.1378 31.095 18.4268 27.1546 18.0555 23.2959C17.321 15.6622 21.9022 9.36595 28.8908 5.78535C34.6355 2.84212 40.258 2.98454 44.2809 5.96879C45.6605 6.99221 46.7683 8.2886 47.5877 9.78593C48.3054 8.50307 49.134 7.26623 50.0858 6.17951C51.8368 4.18037 54.1947 2.47127 57.2294 2.15019C60.2768 1.82766 63.467 2.9608 66.7548 5.52299C70.9834 8.81811 73.084 12.8864 73.5996 17.2135C74.1044 21.4504 73.0711 25.7433 71.4155 29.6117C70.6566 31.3849 69.7488 33.1106 68.7557 34.7506C70.3664 33.9983 72.0168 33.3376 73.6816 32.8312C77.2262 31.7528 81.0258 31.3024 84.8151 32.2149C88.6451 33.1371 92.2246 35.3946 95.3823 39.3157C98.4534 43.1293 99.9219 46.6818 99.997 49.9677C100.073 53.3033 98.7051 55.9829 96.8652 57.9789C95.0586 59.9387 92.7653 61.2872 90.7505 62.1315C90.692 62.1561 90.6334 62.1802 90.5746 62.2042L90.4465 62.256C91.4852 63.7304 92.4724 65.5955 93.0127 67.6979C93.5916 69.9509 93.6669 72.5285 92.674 75.1356C91.679 77.7482 89.7006 80.1559 86.5767 82.2161C86.5556 82.23 86.5342 82.2438 86.5126 82.2571C84.1333 83.7267 81.5504 84.7197 78.6932 84.9352C75.832 85.151 72.8634 84.5742 69.7337 83.1522C64.7667 80.8953 59.274 76.4525 52.8745 69.3645C52.8789 70.1568 52.8844 70.9254 52.9004 71.6677C52.9643 74.6226 53.1868 77.4534 54.0666 80.6265C55.2259 84.503 57.2821 88.4684 60.9561 92.3161C61.644 93.0366 61.8512 94.0908 61.4872 95.018L60.9919 96.2799C60.6464 97.16 59.8435 97.778 58.9041 97.8865C57.9647 97.9952 57.042 97.5769 56.5047 96.7985C52.5406 91.0574 50.3441 86.3289 49.1491 82.0434C48.0155 78.2319 47.6244 74.4579 47.5085 71.0024C45.418 73.6873 42.8696 76.4687 40.0618 78.9101C34.3517 83.8756 26.6803 88.1931 19.142 85.9955C15.5301 84.9425 12.8635 83.2751 11.0848 81.1179C9.2952 78.9474 8.5557 76.4627 8.4981 74.0631C8.43961 71.6256 9.07998 69.225 10.075 67.1703C7.76333 66.828 5.38011 65.9682 3.47071 64.2327C-0.339092 60.7699 -1.2199 54.8876 1.86982 46.4552C3.47011 42.0878 5.90372 38.9798 8.98328 37.0179C12.0444 35.0677 15.5215 34.3912 18.9845 34.4839Z",
);
// Background
const base = [0x14, 0x1a, 0x19];
let blobTextureCanvas: OffscreenCanvas;
let blobTextureCtx: OffscreenCanvasRenderingContext2D;
const blobSize = 1000; // Size of the noise texture
{
const blobTexture = new ImageData(blobSize, blobSize);
const data = blobTexture.data;
let x = 0;
let y = 0;
for (let i = 0; i < data.length; i += 4) {
if (x >= blobSize) {
x = 0;
y++;
}
const noiseX = Math.sin(x * .2 + y * .1) * 0.03;
const noiseY = Math.cos(y * .6 + x * .2) * 0.05;
const centerX = blobSize / 2;
const centerY = blobSize / 2;
const dx = (x + noiseX) - centerX;
const dy = (y + noiseY) - centerY;
const distanceFromCenterRaw = (dx * dx + dy * dy) ** 0.5;
const maxDistance = blobSize / 2;
const distanceFromCenter = Math.min(
1,
distanceFromCenterRaw / maxDistance,
);
const noiseValue = (1 - 0.5 * Math.sin(x * 0.02 - y * 0.04)) *
(1 - 0.5 * Math.cos(x * 0.03 + y * 0.04)) * 0.3;
const gradient = (1 - distanceFromCenter) *
(0.95 - distanceFromCenter * 0.4);
const finalValue = Math.max(
0,
Math.min(1, gradient * (Math.random() * 0.3 + 0.85 + noiseValue)),
);
data[i] = 121;
data[i + 1] = 219;
data[i + 2] = 160;
data[i + 3] = Math.floor(finalValue * 255.99); // Alpha
x++;
}
blobTextureCanvas = new OffscreenCanvas(blobSize, blobSize);
blobTextureCtx = blobTextureCanvas.getContext("2d")!;
blobTextureCtx.putImageData(blobTexture, 0, 0);
}
class CotyledonParticle {
x: number;
y: number;
size: number;
velocityX: number;
velocityY: number;
rotation: number;
rotationSpeed: number;
color: string;
constructor(positioning: "random" | "edge") {
this.size = Math.random() * 0.1 + 0.6;
if (positioning === "edge") {
const edge = Math.floor(Math.random() * 5);
if (edge === 0 || edge === 1) {
// Right edge
this.x = 1.05;
this.y = Math.random();
this.velocityX = -Math.random() * 0.1 - 0.2;
this.velocityY = 0;
} else if (edge === 2 || edge === 3) {
// Top edge
this.x = Math.random();
this.y = -0.05;
this.velocityX = -Math.random() * 0.1 - 0.1;
this.velocityY = -Math.random() * 0.2 - 0.05;
} else {
// Bottom edge
this.x = Math.random() * 0.5 + 0.5;
this.y = 1.05;
this.velocityX = Math.random() * 0.1 + 0.1;
this.velocityY = -Math.random() * 0.3 - 0.1;
}
} else {
let tries = 0;
do {
this.x = Math.random();
this.y = Math.random();
this.velocityX = -Math.random() * 0.05 - 0.1;
this.velocityY = -Math.random() * 0.2 + 0.1;
} while (this.tooCloseToAnyOtherParticle() && (tries++ < 10));
}
this.rotation = Math.random() * Math.PI * 2;
this.rotationSpeed = (Math.random() * 0.003 - 0.0015) *
(Math.random() > 0.5 ? 1 : -1);
const opacity = Math.random() * 0.4 + 0.2;
this.color = `rgba(${
base.map((x) => x + Math.floor(x * opacity)).join(",")
}, 1)`;
}
tooCloseToAnyOtherParticle() {
for (let i = 0; i < cotyledonParticles.length; i++) {
const otherParticle = cotyledonParticles[i];
const distance = Math.sqrt(
(this.x - otherParticle.x) ** 2 + (this.y - otherParticle.y) ** 2,
);
if (distance < 0.1) {
return true;
}
}
}
update() {
if (this.velocityY < 0.01) {
this.velocityY += 0.00025;
}
this.velocityX -= 0.0001;
this.x += this.velocityX / 1300;
this.y += this.velocityY / 1000;
this.rotation += this.rotationSpeed;
return this.x < -0.05 || (this.y > 1 && this.velocityY < 0);
}
draw() {
if (!ctx) return;
ctx.save();
ctx.translate(this.x * canvas.width, this.y * canvas.height);
ctx.rotate(this.rotation);
ctx.scale(this.size, this.size);
ctx.translate(-50, -50);
ctx.fillStyle = this.color;
ctx.fill(clover);
ctx.restore();
}
}
class BlobParticle {
x: number;
y: number;
opacity: number;
state: 0 | 1 | 2;
stateTime: number;
stayDuration: number;
innerColor: string;
rot: number = Math.random() * Math.PI * 2;
constructor() {
this.x = Math.random() * 0.6 + 0.2;
this.y = Math.random() * 0.6 + 0.2;
this.opacity = 0; // Start fully transparent
this.state = 0;
this.stateTime = 0;
this.stayDuration = Math.random() * 10000 + 5000; // Random stay duration between 5-15 seconds
const colorMultiplier = Math.random() * 0.5 + 1.5; // 0.5-1.0 multiplier
const colorValues = base.map((x) => Math.floor(x * colorMultiplier));
this.innerColor = `rgba(${colorValues.join(",")}, 1)`;
}
update(deltaTime: number) {
this.stateTime += deltaTime;
if (this.state === 0) {
this.opacity = Math.min(1, this.stateTime / 15000);
if (this.stateTime >= 15000) {
this.state = 1;
this.stateTime = 0;
}
} else if (this.state === 1) {
if (this.stateTime >= this.stayDuration) {
this.state = 2;
this.stateTime = 0;
}
} else if (this.state === 2) {
this.opacity = Math.max(0, 1 - (this.stateTime / 15000));
if (this.stateTime >= 15000) {
return true;
}
}
return false;
}
draw() {
if (!ctx) return;
const screenX = this.x * canvas.width;
const screenY = this.y * canvas.height;
const screenSize = (Math.min(canvas.width, canvas.height) * 2) / blobSize;
ctx.save();
ctx.translate(screenX - screenSize * 0.5, screenY - screenSize * 0.5);
ctx.scale(screenSize, screenSize);
ctx.rotate(this.rot);
ctx.globalAlpha = this.opacity * 0.2;
ctx.globalCompositeOperation = "overlay";
ctx.drawImage(blobTextureCanvas, 0, 0);
ctx.restore();
}
}
const cotyledonParticles: CotyledonParticle[] = [];
const blobParticles: BlobParticle[] = [];
let blobParticleTop: BlobParticle = new BlobParticle();
for (let i = 0; i < 80; i++) {
cotyledonParticles.push(new CotyledonParticle("random"));
}
for (let i = 0; i < 9; i++) {
const blobParticle = new BlobParticle();
if (i < 4) {
blobParticle.state = 1;
blobParticle.opacity = 1;
blobParticle.stayDuration = Math.random() * 10000;
} else {
blobParticle.state = i < 7 ? 2 : 0;
blobParticle.stateTime = Math.random() * 15000;
}
if (i > 0) {
do {
blobParticle.x = Math.random();
blobParticle.y = Math.random();
} while (
blobParticles.some((p) =>
Math.sqrt((p.x - blobParticle.x) ** 2 + (p.y - blobParticle.y) ** 2) <
0.1
)
);
}
blobParticles.push(blobParticle);
}
let lastTime = performance.now();
function animate(currentTime: number) {
if (!running) return;
if (!ctx) return;
const deltaTime = currentTime - lastTime;
lastTime = currentTime;
ctx.clearRect(0, 0, canvas.width, canvas.height);
for (let i = blobParticles.length - 1; i >= 0; i--) {
const shouldRemove = blobParticles[i].update(deltaTime);
if (shouldRemove) {
blobParticles[i] = new BlobParticle();
} else {
blobParticles[i].draw();
}
}
for (let i = cotyledonParticles.length - 1; i >= 0; i--) {
const shouldRemove = cotyledonParticles[i].update();
if (shouldRemove) {
cotyledonParticles[i] = new CotyledonParticle("edge");
} else {
cotyledonParticles[i].draw();
}
}
if (blobParticleTop.update(deltaTime)) {
blobParticleTop = new BlobParticle();
}
blobParticleTop.draw();
requestAnimationFrame(animate);
}
let clickedButton = false;
const enterButton = panel.querySelector("button#enter")!;
enterButton.addEventListener("click", () => {
if (clickedButton) return;
clickedButton = true;
const first = panel.querySelector("#first")! as HTMLElement;
const second = panel.querySelector("#captcha")! as HTMLElement;
first.style.transition = second.style.transition = "opacity 1s ease-in-out";
first.style.opacity = "0";
second.style.opacity = "0";
setTimeout(() => {
first.style.display = "none";
second.style.display = "block";
setTimeout(() => {
second.style.opacity = "1";
document.getElementById("enter2")?.addEventListener("click", () => {
second.style.opacity = "0";
let p = fetch("/file/cotyledon", {
method: "POST",
body: "I AGREE",
});
setTimeout(() => {
p.then(() => {
location.reload();
});
}, 1000);
});
}, 10);
}, 1000);
});
const imageButtons = panel.querySelectorAll(".image-grid button")!;
imageButtons.forEach((button) => {
let canClick = true;
button.addEventListener("click", () => {
if (!canClick) return;
canClick = false;
const image = button.querySelector("img")!;
image.style.transition = "opacity 0.05s linear";
image.style.opacity = "0";
setTimeout(() => {
image.style.transition = "opacity 2s linear";
let newNum;
do {
newNum = Math.floor(Math.random() * 18); // 0-17 inclusive
} while (
document.querySelector(`img[src="/captcha/image/${newNum}.jpeg"]`)
);
image.setAttribute("src", `/captcha/image/${newNum}.jpeg`);
setTimeout(() => {
image.style.opacity = "0.75";
canClick = true;
}, 50);
}, 300);
});
});
// Start animation
animate(performance.now());
return () => {
window.removeEventListener("resize", resizeCanvas);
running = false;
};
};

58
src/file-viewer/sort.ts Normal file
View file

@ -0,0 +1,58 @@
export function splitRootDirFiles(dir: MediaFile, hasCotyledonCookie: boolean) {
const children = dir.getPublicChildren();
let readme: MediaFile | null = null;
const groups = {
// years 2025 and onwards
years: [] as MediaFile[],
// named categories
categories: [] as MediaFile[],
// years 2017 to 2024
cotyledon: [] as MediaFile[],
};
const colorMap: Record<string, string> = {
years: "#a2ff91",
categories: "#9c91ff",
cotyledon: "#ff91ca",
};
for (const child of children) {
const basename = child.basename;
if (basename === "readme.txt") {
readme = child;
continue;
}
const year = basename.match(/^(\d{4})/);
if (year) {
const n = parseInt(year[1]);
if (n >= 2025) {
groups.years.push(child);
} else {
groups.cotyledon.push(child);
}
} else {
groups.categories.push(child);
}
}
let sections = [];
for (const [key, files] of Object.entries(groups)) {
if (key === "cotyledon" && !hasCotyledonCookie) {
continue;
}
if (key === "years" || key === "cotyledon") {
files.sort((a, b) => {
return b.basename.localeCompare(a.basename);
});
} else {
files.sort((a, b) => {
return a.basename.localeCompare(b.basename);
});
}
sections.push({ key, titleColor: colorMap[key], files });
}
return { readme, sections };
}
import { MediaFile } from "./models/MediaFile.ts";

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 9.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 6.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 8.7 KiB

Some files were not shown because too many files have changed in this diff Show more