Compare commits

...

35 commits

Author SHA1 Message Date
clo
9795249ab4 Update run.js 2025-07-15 13:45:19 -07:00
cb12014ecb tidy 2025-07-10 00:48:39 -07:00
8a3a36f0c2 sot: fix deployment 2025-07-09 23:45:21 -07:00
c5db92203c use port from env in servers 2025-07-09 23:22:46 -07:00
47215df902 sot: fix missing mime.txt 2025-07-09 22:45:20 -07:00
8c72184d19 setup source of truth server 2025-07-09 22:34:07 -07:00
caf4da47e0 add "source of truth" server to replace the old cache url 2025-07-08 23:10:41 -07:00
7ef08faaec finish scan3 2025-07-08 20:48:30 -07:00
4c2a4f7578 start the markdown parser 2025-07-08 01:09:55 -07:00
ea5f2bc325 format 2025-07-07 20:58:02 -07:00
f1b1c650ce initial run of scan3 on sandwich 2025-07-07 09:42:04 -07:00
502786b689 actually run ffmpeg fr fr 2025-07-03 10:34:19 -07:00
2320091125 move discovered ffmpeg presets 2025-07-03 01:22:59 -07:00
8d1dc0d825 start av1 encoding ??? 2025-06-27 22:35:03 -07:00
4f89374ee0 stuff for file view 2025-06-27 19:40:19 -07:00
71a072b0be file viewer work 2025-06-22 14:38:36 -07:00
a367dfdb29 get clo file viewer running 2025-06-21 16:04:57 -07:00
c7dfbe1090 throw in the file viewer 2025-06-15 23:42:10 -07:00
7f5011bace finish q+a 2025-06-15 13:11:21 -07:00
db244583d7 work on porting paperclover.net and also some tests 2025-06-15 11:35:28 -07:00
c5113954a8 experiment: streaming suspense implementation 2025-06-15 01:26:38 -07:00
clo
50d245569c Update readme.md 2025-06-13 15:41:55 -07:00
a7220a7e74 update readme 2025-06-13 00:29:30 -07:00
a41569983f incremental sitegen dev server! 2025-06-13 00:13:22 -07:00
d5ef829f01 fine grained incremental rebuilding 2025-06-11 00:17:58 -07:00
15a4600c48 clean up watching 2025-06-10 22:29:12 -07:00
925366e79e add a file watcher, live rebuild.
this is only verified functional on windows 7
2025-06-10 20:06:32 -07:00
c8b5e91251 almost implement views 2025-06-10 01:13:59 -07:00
a1d17a5d61 stuff 2025-06-09 21:13:51 -07:00
399ccec226 incremental generator 2025-06-09 00:12:41 -07:00
92ddecc37e more organize 2025-06-08 17:31:03 -07:00
2767bf4455 add readme 2025-06-08 17:00:07 -07:00
0c5db556f1 primative backend support 2025-06-08 15:12:04 -07:00
46a67453a1 add content type library 2025-06-08 12:38:25 -07:00
7242c6eb89 fix all type errors 2025-06-07 17:01:34 -07:00
204 changed files with 66289 additions and 6394 deletions

4
.dockerignore Normal file
View file

@ -0,0 +1,4 @@
.clover
.env
node_modules

1
.gitignore vendored
View file

@ -1,3 +1,4 @@
.clover
.env
node_modules

View file

@ -1,6 +1,6 @@
{
"lint": {
"exclude": ["framework/meta"], // OLD
"exclude": ["src"], // OLD
"rules": {
"exclude": [
"no-explicit-any" // TODO

61
flake.lock Normal file
View file

@ -0,0 +1,61 @@
{
"nodes": {
"nixpkgs": {
"locked": {
"lastModified": 1751271578,
"narHash": "sha256-P/SQmKDu06x8yv7i0s8bvnnuJYkxVGBWLWHaU+tt4YY=",
"owner": "NixOS",
"repo": "nixpkgs",
"rev": "3016b4b15d13f3089db8a41ef937b13a9e33a8df",
"type": "github"
},
"original": {
"owner": "NixOS",
"ref": "nixos-unstable",
"repo": "nixpkgs",
"type": "github"
}
},
"root": {
"inputs": {
"nixpkgs": "nixpkgs",
"utils": "utils"
}
},
"systems": {
"locked": {
"lastModified": 1681028828,
"narHash": "sha256-Vy1rq5AaRuLzOxct8nz4T6wlgyUR7zLU309k9mBC768=",
"owner": "nix-systems",
"repo": "default",
"rev": "da67096a3b9bf56a91d16901293e51ba5b49a27e",
"type": "github"
},
"original": {
"owner": "nix-systems",
"repo": "default",
"type": "github"
}
},
"utils": {
"inputs": {
"systems": "systems"
},
"locked": {
"lastModified": 1731533236,
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
"owner": "numtide",
"repo": "flake-utils",
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
"type": "github"
},
"original": {
"owner": "numtide",
"repo": "flake-utils",
"type": "github"
}
}
},
"root": "root",
"version": 7
}

29
flake.nix Normal file
View file

@ -0,0 +1,29 @@
{
inputs = {
nixpkgs.url = "github:NixOS/nixpkgs/nixos-unstable";
utils.url = "github:numtide/flake-utils";
};
outputs =
{ nixpkgs, utils, ... }:
utils.lib.eachDefaultSystem (
system:
let
pkgs = nixpkgs.legacyPackages.${system};
in
{
devShells.default = pkgs.mkShell {
buildInputs = [
pkgs.nodejs_24 # runtime
pkgs.deno # formatter
(pkgs.ffmpeg.override {
withOpus = true;
withSvtav1 = true;
withJxl = true;
withWebp = true;
})
pkgs.rsync
];
};
}
);
}

View file

@ -0,0 +1,42 @@
import "@paperclover/console/inject";
import "#debug";
const app = require(globalThis.CLOVER_SERVER_ENTRY ?? process.argv[2]);
const protocol = "http";
const server = serve({
fetch: app.default.fetch,
port: Number(process.env.PORT ?? 3000),
}, ({ address, port }) => {
if (address === "::") address = "::1";
console.info(url.format({
protocol,
hostname: address,
port,
}));
});
process.on("SIGINT", () => {
server.close();
process.exit(0);
});
process.on("SIGTERM", () => {
server.close((err) => {
if (err) {
console.error(err);
process.exit(1);
}
process.exit(0);
});
});
declare global {
/* Control via --define:globalThis.CLOVER_SERVER_ENTRY="..." */
var CLOVER_SERVER_ENTRY: string;
}
import url from "node:url";
import { serve } from "@hono/node-server";
import process from "node:process";

View file

@ -0,0 +1,4 @@
import "@paperclover/console/inject";
export default app;
import app from "#backend";

View file

@ -1,9 +1,4 @@
// This file implements client-side bundling, mostly wrapping esbuild.
import process from "node:process";
const plugins: esbuild.Plugin[] = [
// There are currently no plugins needed by 'paperclover.net'
];
export async function bundleClientJavaScript(
referencedScripts: string[],
extraPublicScripts: string[],
@ -12,7 +7,7 @@ export async function bundleClientJavaScript(
) {
const entryPoints = [
...new Set([
...referencedScripts,
...referencedScripts.map((file) => path.resolve(hot.projectSrc, file)),
...extraPublicScripts,
]),
];
@ -22,23 +17,39 @@ export async function bundleClientJavaScript(
if (invalidFiles.length > 0) {
const cwd = process.cwd();
throw new Error(
"All client-side scripts should be named like '.client.ts'. Exceptions: " +
invalidFiles.map((x) => path.join(cwd, x)).join(","),
"All client-side scripts should be named like '.client.ts'. Exceptions: \n" +
invalidFiles.map((x) => path.join(cwd, x)).join("\n"),
);
}
const clientPlugins: esbuild.Plugin[] = [
projectRelativeResolution(),
markoViaBuildCache(incr),
];
const bundle = await esbuild.build({
assetNames: "/asset/[hash]",
bundle: true,
chunkNames: "/js/c.[hash]",
entryNames: "/js/[name]",
assetNames: "/asset/[hash]",
entryPoints,
format: "esm",
jsx: "automatic",
jsxDev: dev,
jsxImportSource: "#ssr",
logLevel: "silent",
metafile: true,
minify: !dev,
outdir: "/out!",
plugins,
splitting: true,
outdir: "out!",
plugins: clientPlugins,
write: false,
define: {
"ASSERT": "console.assert",
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
},
}).catch((err: any) => {
err.message = `Client ${err.message}`;
throw err;
});
if (bundle.errors.length || bundle.warnings.length) {
throw new AggregateError(
@ -46,40 +57,264 @@ export async function bundleClientJavaScript(
"JS bundle failed",
);
}
incr.invalidate("bundle-script");
const publicScriptRoutes = extraPublicScripts.map((file) =>
path.basename(file).replace(/\.client\.[tj]sx?/, "")
"/js/" +
path.relative(hot.projectSrc, file).replaceAll("\\", "/").replace(
/\.client\.[tj]sx?/,
".js",
)
);
const promises: Promise<unknown>[] = [];
// TODO: add a shared build hash to entrypoints, derived from all the chunk hashes.
for (const file of bundle.outputFiles) {
const { metafile, outputFiles } = bundle;
const promises: Promise<void>[] = [];
for (const file of outputFiles) {
const { text } = file;
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
const text = file.text;
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
const sources = Object.keys(inputs)
.filter((x) => !x.startsWith("<define:"));
// Register non-chunks as script entries.
const chunk = route.startsWith("/js/c.");
if (!chunk) {
route = route.replace(".client.js", ".js");
const key = hot.getScriptId(path.resolve(sources[sources.length - 1]));
route = "/js/" + key.replace(/\.client\.tsx?/, ".js");
incr.put({
srcId: "bundle-script",
type: "script",
key: route.slice("/js/".length, -".js".length),
sources,
kind: "script",
key,
value: text,
});
}
// Register chunks and public scripts as assets.
if (chunk || publicScriptRoutes.includes(route)) {
promises.push(incr.putAsset({
srcId: "bundle-script",
sources,
key: route,
body: text,
}));
}
}
if (promises.length > 0) {
await Promise.all(promises);
}
await Promise.all(promises);
}
export type ServerPlatform = "node" | "passthru";
export async function bundleServerJavaScript(
incr: Incremental,
platform: ServerPlatform = "node",
) {
if (incr.hasArtifact("backendBundle", platform)) return;
// Comment
const magicWord = "C_" + crypto.randomUUID().replaceAll("-", "_");
const viewSource = [
...Array.from(
incr.out.viewMetadata,
([, view], i) => `import * as view${i} from ${JSON.stringify(view.file)}`,
),
`const styles = ${magicWord}[-2]`,
`export const scripts = ${magicWord}[-1]`,
"export const views = {",
...Array.from(incr.out.viewMetadata, ([key, view], i) =>
[
` ${JSON.stringify(key)}: {`,
` component: view${i}.default,`,
// ` meta: ${
// view.staticMeta ? JSON.stringify(view.staticMeta) : `view${i}.meta`
// },`,
` meta: view${i}.meta,`,
` layout: ${view.hasLayout ? `view${i}.layout?.default` : "null"},`,
` inlineCss: styles[${magicWord}[${i}]]`,
` },`,
].join("\n")),
"}",
].join("\n");
// -- plugins --
const serverPlugins: esbuild.Plugin[] = [
virtualFiles({
"$views": viewSource,
}),
projectRelativeResolution(),
markoViaBuildCache(incr),
{
name: "replace client references",
setup(b) {
b.onLoad({ filter: /\.tsx?$/ }, async ({ path: file }) => ({
contents:
hot.resolveClientRefs(await fs.readFile(file, "utf-8"), file).code,
loader: path.extname(file).slice(1) as esbuild.Loader,
}));
},
},
{
name: "mark css external",
setup(b) {
b.onResolve(
{ filter: /\.css$/ },
() => ({ path: ".", namespace: "dropped" }),
);
b.onLoad(
{ filter: /./, namespace: "dropped" },
() => ({ contents: "" }),
);
},
},
];
const pkg = await fs.readJson("package.json") as {
dependencies: Record<string, string>;
};
const { metafile, outputFiles } = await esbuild.build({
bundle: true,
chunkNames: "c.[hash]",
entryNames: "server",
entryPoints: [
path.join(import.meta.dirname, "backend/entry-" + platform + ".ts"),
],
platform: "node",
format: "esm",
minify: false,
outdir: "out!",
plugins: serverPlugins,
splitting: true,
logLevel: "silent",
write: false,
metafile: true,
jsx: "automatic",
jsxImportSource: "#ssr",
jsxDev: false,
define: {
MIME_INLINE_DATA: JSON.stringify(mime.rawEntriesText),
},
external: Object.keys(pkg.dependencies)
.filter((x) => !x.startsWith("@paperclover")),
});
const files: Record<string, Buffer> = {};
let fileWithMagicWord: string | null = null;
for (const output of outputFiles) {
const basename = output.path.replace(/^.*?!/, "");
const key = "out!" + basename.replaceAll("\\", "/");
// If this contains the generated "$views" file, then
// mark this file as the one for replacement. Because
// `splitting` is `true`, esbuild will not emit this
// file in more than one chunk.
if (metafile.outputs[key].inputs["framework/lib/view.ts"]) {
fileWithMagicWord = basename;
}
files[basename] = Buffer.from(output.contents);
}
incr.put({
kind: "backendBundle",
key: platform,
value: {
magicWord,
files,
fileWithMagicWord,
},
sources: Object.keys(metafile.inputs).filter((x) =>
!x.includes("<define:") &&
!x.startsWith("vfs:") &&
!x.startsWith("dropped:") &&
!x.includes("node_modules")
),
});
}
export async function finalizeServerJavaScript(
incr: Incremental,
platform: ServerPlatform,
) {
if (incr.hasArtifact("backendReplace", platform)) return;
const {
files,
fileWithMagicWord,
magicWord,
} = UNWRAP(incr.getArtifact("backendBundle", platform));
if (!fileWithMagicWord) return;
// Only the reachable resources need to be inserted into the bundle.
const viewScriptsList = new Set(
Array.from(incr.out.viewMetadata.values())
.flatMap((view) => view.clientRefs),
);
const viewStyleKeys = Array.from(incr.out.viewMetadata.values())
.map((view) => css.styleKey(view.cssImports, view.theme));
const viewCssBundles = viewStyleKeys
.map((key) => UNWRAP(incr.out.style.get(key), "Style key: " + key));
// Deduplicate styles
const styleList = Array.from(new Set(viewCssBundles));
// Replace the magic word
let text = files[fileWithMagicWord].toString("utf-8");
text = text.replace(
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
(_, i) => {
i = Number(i);
// Inline the styling data
if (i === -2) {
return JSON.stringify(styleList.map((cssText) => cssText));
}
// Inline the script data
if (i === -1) {
return JSON.stringify(Object.fromEntries(incr.out.script));
}
// Reference an index into `styleList`
return `${styleList.indexOf(viewCssBundles[i])}`;
},
);
incr.put({
kind: "backendReplace",
key: platform,
sources: [
// Backend input code (includes view code)
...incr.sourcesFor("backendBundle", platform),
// Script
...Array.from(viewScriptsList)
.flatMap((key) => incr.sourcesFor("script", hot.getScriptId(key))),
// Style
...viewStyleKeys.flatMap((key) => incr.sourcesFor("style", key)),
],
value: Buffer.from(text),
});
}
function markoViaBuildCache(incr: Incremental): esbuild.Plugin {
return {
name: "marko via build cache",
setup(b) {
b.onLoad(
{ filter: /\.marko$/ },
async ({ path: file }) => {
const key = path.relative(hot.projectRoot, file)
.replaceAll("\\", "/");
const cacheEntry = incr.out.serverMarko.get(key);
if (!cacheEntry) {
if (!fs.existsSync(file)) {
console.log(`File does not exist: ${file}`);
}
throw new Error("Marko file not in cache: " + file);
}
return ({
loader: "ts",
contents: cacheEntry.src,
resolveDir: path.dirname(file),
});
},
);
},
};
}
import * as path from "node:path";
import * as esbuild from "esbuild";
import * as path from "node:path";
import process from "node:process";
import * as hot from "./hot.ts";
import { projectRelativeResolution, virtualFiles } from "./esbuild-support.ts";
import { Incremental } from "./incremental.ts";
import * as css from "./css.ts";
import * as fs from "#sitegen/fs";
import * as mime from "#sitegen/mime";

View file

@ -5,6 +5,12 @@ export interface Theme {
h1?: string;
}
export const defaultTheme: Theme = {
bg: "#ffffff",
fg: "#050505",
primary: "#2e7dab",
};
export function stringifyTheme(theme: Theme) {
return [
":root {",
@ -26,33 +32,42 @@ export function preprocess(css: string, theme: Theme): string {
return css.replace(
regex,
(_, line) =>
line.replace(regex2, (_: string, varName: string) => theme[varName]) +
line.replace(
regex2,
(_: string, varName: string) => theme[varName as keyof Theme],
) +
";" + line.slice(1),
);
}
export interface Output {
text: string;
sources: string[];
}
export function styleKey(
cssImports: string[],
theme: Theme,
) {
cssImports = cssImports
.map((file) =>
(path.isAbsolute(file) ? path.relative(hot.projectSrc, file) : file)
.replaceAll("\\", "/")
)
.sort();
return cssImports.join(":") + ":" +
Object.entries(theme).map(([k, v]) => `${k}=${v}`);
}
export async function bundleCssFiles(
cssImports: string[],
theme: Theme,
dev: boolean = false,
): Promise<string> {
): Promise<Output> {
cssImports = cssImports.map((file) => path.resolve(hot.projectSrc, file));
const plugin = {
name: "clover",
name: "clover css",
setup(b) {
b.onResolve(
{ filter: /^\$input\$$/ },
() => ({ path: ".", namespace: "input" }),
);
b.onLoad(
{ filter: /./, namespace: "input" },
() => ({
loader: "css",
contents:
cssImports.map((path) => `@import url(${JSON.stringify(path)});`)
.join("\n") + stringifyTheme(theme),
resolveDir: ".",
}),
);
b.onLoad(
{ filter: /\.css$/ },
async ({ path: file }) => ({
@ -61,17 +76,29 @@ export async function bundleCssFiles(
}),
);
},
} satisfies Plugin;
} satisfies esbuild.Plugin;
const build = await esbuild.build({
bundle: true,
entryPoints: ["$input$"],
write: false,
external: ["*.woff2"],
target: ["ie11"],
plugins: [plugin],
external: ["*.woff2", "*.ttf", "*.png", "*.jpeg"],
metafile: true,
minify: !dev,
plugins: [
virtualFiles({
"$input$": {
contents: cssImports.map((path) =>
`@import url(${JSON.stringify(path)});`
)
.join("\n") + stringifyTheme(theme),
loader: "css",
},
}),
plugin,
],
target: ["ie11"],
write: false,
});
const { errors, warnings, outputFiles } = build;
const { errors, warnings, outputFiles, metafile } = build;
if (errors.length > 0) {
throw new AggregateError(errors, "CSS Build Failed");
}
@ -79,9 +106,15 @@ export async function bundleCssFiles(
throw new AggregateError(warnings, "CSS Build Failed");
}
if (outputFiles.length > 1) throw new Error("Too many output files");
return outputFiles[0].text;
return {
text: outputFiles[0].text,
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
.filter((x) => !x.startsWith("vfs:")),
};
}
import type { Plugin } from "esbuild";
import * as esbuild from "esbuild";
import * as fs from "./fs.ts";
import * as fs from "#sitegen/fs";
import * as hot from "./hot.ts";
import * as path from "node:path";
import { virtualFiles } from "./esbuild-support.ts";

17
framework/debug.safe.ts Normal file
View file

@ -0,0 +1,17 @@
globalThis.UNWRAP = (t, ...args) => {
if (t == null) {
throw new Error(
args.length > 0 ? util.format(...args) : "UNWRAP(" + t + ")",
);
}
return t;
};
globalThis.ASSERT = (t, ...args) => {
if (!t) {
throw new Error(
args.length > 0 ? util.format(...args) : "Assertion Failed",
);
}
};
import * as util from "node:util";

View file

@ -1,2 +1,4 @@
declare function UNWRAP<T>(value: T | null | undefined): T;
declare function ASSERT(value: unknown, ...log: unknown[]): asserts value;
declare function UNWRAP<T>(value: T | null | undefined, ...log: unknown[]): T;
declare function ASSERT(value: unknown, ...log: unknown[]): asserts value;
type Timer = ReturnType<typeof setTimeout>;

View file

@ -1,32 +1,54 @@
export const Fragment = ({ children }: { children: engine.Node[] }) => children;
export function jsx(
type: string | engine.Component,
props: Record<string, unknown>,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
export function jsxDEV(
type: string | engine.Component,
props: Record<string, unknown>,
// Unused with the clover engine
_key: string,
// Unused with the clover engine
_isStaticChildren: boolean,
// Unused with the clover engine
_source: unknown,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
// jsxs
export { jsx as jsxs };
import * as engine from "./ssr.ts";
export const Fragment = ({ children }: { children: engine.Node[] }) => children;
export function jsx(
type: string | engine.Component,
props: Record<string, unknown>,
): engine.Element {
if (typeof type !== "function" && typeof type !== "string") {
throw new Error("Invalid component type: " + engine.inspect(type));
}
return [engine.kElement, type, props];
}
export function jsxDEV(
type: string | engine.Component,
props: Record<string, unknown>,
// Unused with the clover engine
_key: string,
// Unused with the clover engine
_isStaticChildren: boolean,
source: engine.SrcLoc,
): engine.Element {
const { fileName, lineNumber, columnNumber } = source;
// Assert the component type is valid to render.
if (typeof type !== "function" && typeof type !== "string") {
throw new Error(
`Invalid component type at ${fileName}:${lineNumber}:${columnNumber}: ` +
engine.inspect(type) +
". Clover SSR element must be a function or string",
);
}
// Construct an `ssr.Element`
return [engine.kElement, type, props, "", source];
}
// jsxs
export { jsx as jsxs };
declare global {
namespace JSX {
interface IntrinsicElements {
[name: string]: Record<string, unknown>;
}
interface ElementChildrenAttribute {
children: Node;
}
type Element = engine.Element;
type ElementType = keyof IntrinsicElements | engine.Component;
type ElementClass = ReturnType<engine.Component>;
}
}
import * as engine from "./ssr.ts";

View file

@ -1,125 +1,147 @@
// This file is used to integrate Marko into the Clover Engine and Sitegen
// To use, replace the "marko/html" import with this file.
export * from "#marko/html";
interface BodyContentObject {
[x: PropertyKey]: unknown;
content: ServerRenderer;
}
export const createTemplate = (
templateId: string,
renderer: ServerRenderer,
) => {
const { render } = marko.createTemplate(templateId, renderer);
function wrap(props: Record<string, unknown>, n: number) {
// Marko components
const cloverAsyncMarker = { isAsync: false };
let r: engine.Render | undefined = undefined;
try {
r = engine.getCurrentRender();
} catch {}
// Support using Marko outside of Clover SSR
if (r) {
const markoResult = render.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },
});
if (cloverAsyncMarker.isAsync) {
return markoResult.then(engine.html);
}
const rr = markoResult.toString();
return engine.html(rr);
} else {
return renderer(props, n);
}
}
wrap.render = render;
wrap.unwrapped = renderer;
return wrap;
};
export const dynamicTag = (
scopeId: number,
accessor: Accessor,
tag: unknown | string | ServerRenderer | BodyContentObject,
inputOrArgs: unknown,
content?: (() => void) | 0,
inputIsArgs?: 1,
serializeReason?: 1 | 0,
) => {
marko.dynamicTag;
if (typeof tag === "function") {
clover: {
const unwrapped = (tag as any).unwrapped;
if (unwrapped) {
tag = unwrapped;
break clover;
}
let r: engine.Render;
try {
r = engine.getCurrentRender();
if (!r) throw 0;
} catch {
r = marko.$global().clover as engine.Render;
}
if (!r) throw new Error("No Clover Render Active");
const subRender = engine.initRender(r.async !== -1, r.addon);
const resolved = engine.resolveNode(subRender, [
engine.kElement,
tag,
inputOrArgs,
]);
if (subRender.async > 0) {
const marker = marko.$global().cloverAsyncMarker;
marker.isAsync = true;
// Wait for async work to finish
const { resolve, reject, promise } = Promise.withResolvers<string>();
subRender.asyncDone = () => {
const rejections = subRender.rejections;
if (!rejections) return resolve(engine.renderNode(resolved));
(r.rejections ??= []).push(...rejections);
return reject(new Error("Render had errors"));
};
marko.fork(
scopeId,
accessor,
promise,
(string: string) => marko.write(string),
0,
);
} else {
marko.write(engine.renderNode(resolved));
}
return;
}
}
return marko.dynamicTag(
scopeId,
accessor,
tag,
inputOrArgs,
content,
inputIsArgs,
serializeReason,
);
};
export function fork(
scopeId: string,
accessor: Accessor,
promise: Promise<unknown>,
callback: (data: unknown) => void,
serializeMarker?: 0 | 1,
) {
const marker = marko.$global().cloverAsyncMarker;
marker.isAsync = true;
marko.fork(scopeId, accessor, promise, callback, serializeMarker);
}
import * as engine from "./ssr.ts";
import type { ServerRenderer } from "marko/html/template";
import { type Accessor } from "marko/common/types";
import * as marko from "#marko/html";
// This file is used to integrate Marko into the Clover Engine and Sitegen
// To use, replace the "marko/html" import with this file.
export * from "#marko/html";
interface BodyContentObject {
[x: PropertyKey]: unknown;
content: ServerRenderer;
}
export const createTemplate = (
templateId: string,
renderer: ServerRenderer,
) => {
const { render } = marko.createTemplate(templateId, renderer);
function wrap(props: Record<string, unknown>, n: number) {
// Marko Custom Tags
const cloverAsyncMarker = { isAsync: false };
let r: engine.Render | undefined = undefined;
try {
r = engine.getCurrentRender();
} catch {}
// Support using Marko outside of Clover SSR
if (r) {
engine.setCurrentRender(null);
const markoResult = render.call(renderer, {
...props,
$global: { clover: r, cloverAsyncMarker },
});
if (cloverAsyncMarker.isAsync) {
return markoResult.then(engine.html);
}
const rr = markoResult.toString();
return engine.html(rr);
} else {
return renderer(props, n);
}
}
wrap.render = render;
wrap.unwrapped = renderer;
return wrap;
};
export const dynamicTag = (
scopeId: number,
accessor: Accessor,
tag: unknown | string | ServerRenderer | BodyContentObject,
inputOrArgs: unknown,
content?: (() => void) | 0,
inputIsArgs?: 1,
serializeReason?: 1 | 0,
) => {
if (typeof tag === "function") {
clover: {
const unwrapped = (tag as any).unwrapped;
if (unwrapped) {
tag = unwrapped;
break clover;
}
let r: engine.Render;
try {
r = engine.getCurrentRender();
if (!r) throw 0;
} catch {
r = marko.$global().clover as engine.Render;
}
if (!r) throw new Error("No Clover Render Active");
const subRender = engine.initRender(r.async !== -1, r.addon);
const resolved = engine.resolveNode(subRender, [
engine.kElement,
tag,
inputOrArgs,
]);
if (subRender.async > 0) {
const marker = marko.$global().cloverAsyncMarker as Async;
marker.isAsync = true;
// Wait for async work to finish
const { resolve, reject, promise } = Promise.withResolvers<string>();
subRender.asyncDone = () => {
const rejections = subRender.rejections;
if (!rejections) return resolve(engine.renderNode(resolved));
(r.rejections ??= []).push(...rejections);
return reject(new Error("Render had errors"));
};
marko.fork(
scopeId,
accessor,
promise,
(string: string) => marko.write(string),
0,
);
} else {
marko.write(engine.renderNode(resolved));
}
return;
}
}
return marko.dynamicTag(
scopeId,
accessor,
tag,
inputOrArgs,
content,
inputIsArgs,
serializeReason,
);
};
export function fork(
scopeId: number,
accessor: Accessor,
promise: Promise<unknown>,
callback: (data: unknown) => void,
serializeMarker?: 0 | 1,
) {
const marker = marko.$global().cloverAsyncMarker as Async;
marker.isAsync = true;
marko.fork(scopeId, accessor, promise, callback, serializeMarker);
}
export function escapeXML(input: unknown) {
// The rationale of this check is that the default toString method
// creating `[object Object]` is universally useless to any end user.
if (
input == null ||
(typeof input === "object" && input &&
// only block this if it's the default `toString`
input.toString === Object.prototype.toString)
) {
throw new Error(
`Unexpected value in template placeholder: '` +
engine.inspect(input) + "'. " +
`To emit a literal '${input}', use \${String(value)}`,
);
}
return marko.escapeXML(input);
}
interface Async {
isAsync: boolean;
}
import * as engine from "./ssr.ts";
import type { ServerRenderer } from "marko/html/template";
import { type Accessor } from "marko/common/types";
import * as marko from "#marko/html";

View file

@ -0,0 +1,41 @@
import { test } from "node:test";
import * as engine from "./ssr.ts";
test("sanity", (t) => t.assert.equal(engine.ssrSync("gm <3").text, "gm &lt;3"));
test("simple tree", (t) =>
t.assert.equal(
engine.ssrSync(
<main class={["a", "b"]}>
<h1 style="background-color:red">hello world</h1>
<p>haha</p>
{1}|
{0}|
{true}|
{false}|
{null}|
{undefined}|
</main>,
).text,
'<main class="a b"><h1 style=background-color:red>hello world</h1><p>haha</p>1|0|||||</main>',
));
test("unescaped/escaped html", (t) =>
t.assert.equal(
engine.ssrSync(<div>{engine.html("<fuck>")}{"\"&'`<>"}</div>).text,
"<div><fuck>&quot;&amp;&#x27;&#x60;&lt;&gt;</div>",
));
test("clsx built-in", (t) =>
t.assert.equal(
engine.ssrSync(
<>
<a class="a" />
<b class={null} />
<c class={undefined} />
<d class={["a", "b", null]} />
<e class={{ a: true, b: false }} />
<e
class={[null, "x", { z: true }, [{ m: true }, null, { v: false }]]}
/>
</>,
).text,
'<a class=a></a><b></b><c></c><d class="a b"></d><e class=a></e><e class="x z m"></e>',
));

View file

@ -1,300 +1,304 @@
// Clover's Rendering Engine is the backbone of her website generator. It
// converts objects and components (functions returning 'Node') into HTML. The
// engine is simple and self-contained, with integrations for JSX and Marko
// (which can interop with each-other) are provided next to this file.
//
// Add-ons to the rendering engine can provide opaque data, And retrieve it
// within component calls with 'getAddonData'. For example, 'sitegen' uses this
// to track needed client scripts without introducing patches to the engine.
type AddonData = Record<string | symbol, unknown>;
export function ssrSync(node: Node): Result;
export function ssrSync<A extends AddonData>(
node: Node,
addon: AddonData,
): Result<A>;
export function ssrSync(node: Node, addon: AddonData = {}) {
const r = initRender(false, addon);
const resolved = resolveNode(r, node);
return { text: renderNode(resolved), addon };
}
export function ssrAsync(node: Node): Promise<Result>;
export function ssrAsync<A extends AddonData>(
node: Node,
addon: AddonData,
): Promise<Result<A>>;
export function ssrAsync(node: Node, addon: AddonData = {}) {
const r = initRender(true, addon);
const resolved = resolveNode(r, node);
if (r.async === 0) {
return Promise.resolve({ text: renderNode(resolved), addon });
}
const { resolve, reject, promise } = Promise.withResolvers<Result>();
r.asyncDone = () => {
const rejections = r.rejections;
if (!rejections) return resolve({ text: renderNode(resolved), addon });
if (rejections.length === 1) return reject(rejections[0]);
return reject(new AggregateError(rejections));
};
return promise;
}
/** Inline HTML into a render without escaping it */
export function html(rawText: string) {
return [kDirectHtml, rawText];
}
interface Result<A extends AddonData = AddonData> {
text: string;
addon: A;
}
export interface Render {
/**
* Set to '-1' if rendering synchronously
* Number of async promises the render is waiting on.
*/
async: number | -1;
asyncDone: null | (() => void);
/** When components reject, those are logged here */
rejections: unknown[] | null;
/** Add-ons to the rendering engine store state here */
addon: AddonData;
}
export const kElement = Symbol("Element");
export const kDirectHtml = Symbol("DirectHtml");
/** Node represents a webpage that can be 'rendered' into HTML. */
export type Node =
| number
| string // Escape HTML
| Node[] // Concat
| Element // Render
| DirectHtml // Insert
| Promise<Node> // Await
// Ignore
| undefined
| null
| boolean;
export type Element = [
tag: typeof kElement,
type: string | Component,
props: Record<string, unknown>,
];
export type DirectHtml = [tag: typeof kDirectHtml, html: string];
/**
* Components must return a value; 'undefined' is prohibited here
* to avoid functions that are missing a return statement.
*/
export type Component = (
props: Record<string, unknown>,
) => Exclude<Node, undefined>;
/**
* Resolution narrows the type 'Node' into 'ResolvedNode'. Async trees are
* marked in the 'Render'. This operation performs everything besides the final
* string concatenation. This function is agnostic across async/sync modes.
*/
export function resolveNode(r: Render, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHTML(node);
if (typeof node === "number") return String(node); // no escaping ever
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
if (r.async === -1) {
throw new Error(`Asynchronous rendering is not supported here.`);
}
const placeholder: InsertionPoint = [null];
r.async += 1;
node
.then((result) => void (placeholder[0] = resolveNode(r, result)))
// Intentionally catching errors in `resolveNode`
.catch((e) => (r.rejections ??= []).push(e))
.finally(() => {
if (--r.async == 0) {
if (r.asyncDone == null) throw new Error("r.asyncDone == null");
r.asyncDone();
r.asyncDone = null;
}
});
// This lie is checked with an assertion in `renderNode`
return placeholder as [ResolvedNode];
}
if (!Array.isArray(node)) {
throw new Error(`Invalid node type: ${inspect(node)}`);
}
const type = node[0];
if (type === kElement) {
const { 1: tag, 2: props } = node;
if (typeof tag === "function") {
currentRender = r;
const result = tag(props);
currentRender = null;
return resolveNode(r, result);
}
if (typeof tag !== "string") throw new Error("Unexpected " + typeof type);
const children = props?.children;
if (children) return [kElement, tag, props, resolveNode(r, children)];
return node;
}
if (type === kDirectHtml) return node[1];
return node.map((elem) => resolveNode(r, elem));
}
export type ResolvedNode =
| ResolvedNode[] // Concat
| ResolvedElement // Render
| string; // Direct HTML
export type ResolvedElement = [
tag: typeof kElement,
type: string,
props: Record<string, unknown>,
children: ResolvedNode,
];
/**
* Async rendering is done by creating an array of one item,
* which is already a valid 'Node', but the element is written
* once the data is available. The 'Render' contains a count
* of how many async jobs are left.
*/
export type InsertionPoint = [null | ResolvedNode];
/**
* Convert 'ResolvedNode' into HTML text. This operation happens after all
* async work is settled. The HTML is emitted as concisely as possible.
*/
export function renderNode(node: ResolvedNode): string {
if (typeof node === "string") return node;
ASSERT(node, "Unresolved Render Node");
const type = node[0];
if (type === kElement) {
return renderElement(node as ResolvedElement);
}
node = node as ResolvedNode[]; // TS cannot infer.
let out = type ? renderNode(type) : "";
let len = node.length;
for (let i = 1; i < len; i++) {
const elem = node[i];
if (elem) out += renderNode(elem);
}
return out;
}
function renderElement(element: ResolvedElement) {
const { 1: tag, 2: props, 3: children } = element;
let out = "<" + tag;
let needSpace = true;
for (const prop in props) {
const value = props[prop];
if (!value || typeof value === "function") continue;
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHTML(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHTML(clsx(value)))}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
// Do not process these
case "children":
case "ref":
case "dangerouslySetInnerHTML":
case "key":
continue;
}
if (needSpace) out += " ", needSpace = !attr.endsWith('"');
out += attr;
}
out += ">";
if (children) out += renderNode(children);
if (
tag !== "br" && tag !== "img" && tag !== "input" && tag !== "meta" &&
tag !== "link" && tag !== "hr"
) {
out += `</${tag}>`;
}
return out;
}
export function renderStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeHTML(String(style[styleName]))
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text) {
if (text.includes(" ")) return '"' + text + '"';
return text;
}
// -- utility functions --
export function initRender(allowAsync: boolean, addon: AddonData): Render {
return {
async: allowAsync ? 0 : -1,
rejections: null,
asyncDone: null,
addon,
};
}
let currentRender: Render | null = null;
export function getCurrentRender() {
if (!currentRender) throw new Error("No Render Active");
return currentRender;
}
export function setCurrentRender(r?: Render | null) {
currentRender = r ?? null;
}
export function getUserData<T>(namespace: PropertyKey, def: () => T): T {
return (getCurrentRender().addon[namespace] ??= def()) as T;
}
export function inspect(object: unknown) {
try {
return require("node:util").inspect(object);
} catch {
return typeof object;
}
}
export type ClsxInput = string | Record<string, boolean | null> | ClsxInput[];
export function clsx(mix: ClsxInput) {
var k, y, str;
if (typeof mix === "string") {
return mix;
} else if (typeof mix === "object") {
str = "";
if (Array.isArray(mix)) {
for (k = 0; k < mix.length; k++) {
if (mix[k] && (y = clsx(mix[k]))) {
str && (str += " ");
str += y;
}
}
} else {
for (k in mix) {
if (mix[k]) {
str && (str += " ");
str += k;
}
}
}
}
return str;
}
export const escapeHTML = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;")
.replace(/"/g, "&quot;").replace(/'/g, "&#x27;").replace(/`/g, "&#x60;");
// Clover's Rendering Engine is the backbone of her website generator. It
// converts objects and components (functions returning 'Node') into HTML. The
// engine is simple and self-contained, with integrations for JSX and Marko
// (which can interop with each-other) are provided next to this file.
//
// Add-ons to the rendering engine can provide opaque data, And retrieve it
// within component calls with 'getAddonData'. For example, 'sitegen' uses this
// to track needed client scripts without introducing patches to the engine.
export type Addons = Record<string | symbol, unknown>;
export function ssrSync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(false, addon);
const resolved = resolveNode(r, node);
return { text: renderNode(resolved), addon };
}
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(true, addon);
const resolved = resolveNode(r, node);
if (r.async === 0) {
return Promise.resolve({ text: renderNode(resolved), addon });
}
const { resolve, reject, promise } = Promise.withResolvers<Result>();
r.asyncDone = () => {
const rejections = r.rejections;
if (!rejections) return resolve({ text: renderNode(resolved), addon });
if (rejections.length === 1) return reject(rejections[0]);
return reject(new AggregateError(rejections));
};
return promise;
}
/** Inline HTML into a render without escaping it */
export function html(rawText: ResolvedNode): DirectHtml {
return [kDirectHtml, rawText];
}
interface Result<A extends Addons = Addons> {
text: string;
addon: A;
}
export interface Render {
/**
* Set to '-1' if rendering synchronously
* Number of async promises the render is waiting on.
*/
async: number | -1;
asyncDone: null | (() => void);
/** When components reject, those are logged here */
rejections: unknown[] | null;
/** Add-ons to the rendering engine store state here */
addon: Addons;
}
export const kElement = Symbol("Element");
export const kDirectHtml = Symbol("DirectHtml");
/** Node represents a webpage that can be 'rendered' into HTML. */
export type Node =
| number
| string // Escape HTML
| Node[] // Concat
| Element // Render
| DirectHtml // Insert
| Promise<Node> // Await
// Ignore
| undefined
| null
| boolean;
export type Element = [
tag: typeof kElement,
type: string | Component,
props: Record<string, unknown>,
_?: "",
source?: SrcLoc,
];
export type DirectHtml = [tag: typeof kDirectHtml, html: ResolvedNode];
/**
* Components must return a value; 'undefined' is prohibited here
* to avoid functions that are missing a return statement.
*/
export type Component = (
props: Record<any, any>,
) => Exclude<Node, undefined>;
/** Emitted by JSX runtime */
export interface SrcLoc {
fileName: string;
lineNumber: number;
columnNumber: number;
}
/**
* Resolution narrows the type 'Node' into 'ResolvedNode'. Async trees are
* marked in the 'Render'. This operation performs everything besides the final
* string concatenation. This function is agnostic across async/sync modes.
*/
export function resolveNode(r: Render, node: unknown): ResolvedNode {
if (!node && node !== 0) return ""; // falsy, non numeric
if (typeof node !== "object") {
if (node === true) return ""; // booleans are ignored
if (typeof node === "string") return escapeHtml(node);
if (typeof node === "number") return String(node); // no escaping ever
throw new Error(`Cannot render ${inspect(node)} to HTML`);
}
if (node instanceof Promise) {
if (r.async === -1) {
throw new Error(`Asynchronous rendering is not supported here.`);
}
const placeholder: InsertionPoint = [null];
r.async += 1;
node
.then((result) => void (placeholder[0] = resolveNode(r, result)))
// Intentionally catching errors in `resolveNode`
.catch((e) => (r.rejections ??= []).push(e))
.finally(() => {
if (--r.async == 0) {
if (r.asyncDone == null) throw new Error("r.asyncDone == null");
r.asyncDone();
r.asyncDone = null;
}
});
// This lie is checked with an assertion in `renderNode`
return placeholder as [ResolvedNode];
}
if (!Array.isArray(node)) {
throw new Error(`Invalid node type: ${inspect(node)}`);
}
const type = node[0];
if (type === kElement) {
const { 1: tag, 2: props } = node;
if (typeof tag === "function") {
currentRender = r;
try {
return resolveNode(r, tag(props));
} catch (e) {
const { 4: src } = node;
if (e && typeof e === "object") (e as { src?: string }).src = src;
throw e;
} finally {
currentRender = null;
}
}
if (typeof tag !== "string") throw new Error("Unexpected " + inspect(type));
const children = props?.children;
if (children) return [kElement, tag, props, resolveNode(r, children)];
return node;
}
if (type === kDirectHtml) return node[1];
return node.map((elem) => resolveNode(r, elem));
}
export type ResolvedNode =
| ResolvedNode[] // Concat
| ResolvedElement // Render
| string; // Direct HTML
export type ResolvedElement = [
tag: typeof kElement,
type: string,
props: Record<string, unknown>,
children: ResolvedNode,
];
/**
* Async rendering is done by creating an array of one item,
* which is already a valid 'Node', but the element is written
* once the data is available. The 'Render' contains a count
* of how many async jobs are left.
*/
export type InsertionPoint = [null | ResolvedNode];
/**
* Convert 'ResolvedNode' into HTML text. This operation happens after all
* async work is settled. The HTML is emitted as concisely as possible.
*/
export function renderNode(node: ResolvedNode): string {
if (typeof node === "string") return node;
ASSERT(node, "Unresolved Render Node");
const type = node[0];
if (type === kElement) {
return renderElement(node as ResolvedElement);
}
node = node as ResolvedNode[]; // TS cannot infer.
let out = type ? renderNode(type) : "";
let len = node.length;
for (let i = 1; i < len; i++) {
const elem = node[i];
if (elem) out += renderNode(elem);
}
return out;
}
function renderElement(element: ResolvedElement) {
const { 1: tag, 2: props, 3: children } = element;
let out = "<" + tag;
let needSpace = true;
for (const prop in props) {
const value = props[prop];
if (!value || typeof value === "function") continue;
let attr;
switch (prop) {
default:
attr = `${prop}=${quoteIfNeeded(escapeHtml(String(value)))}`;
break;
case "className":
// Legacy React Compat
case "class":
attr = `class=${quoteIfNeeded(escapeHtml(clsx(value as ClsxInput)))}`;
break;
case "htmlFor":
throw new Error("Do not use the `htmlFor` attribute. Use `for`");
// Do not process these
case "children":
case "ref":
case "dangerouslySetInnerHTML":
case "key":
continue;
}
if (needSpace) out += " ", needSpace = !attr.endsWith('"');
out += attr;
}
out += ">";
if (children) out += renderNode(children);
if (
tag !== "br" && tag !== "img" && tag !== "input" && tag !== "meta" &&
tag !== "link" && tag !== "hr"
) {
out += `</${tag}>`;
}
return out;
}
export function renderStyleAttribute(style: Record<string, string>) {
let out = ``;
for (const styleName in style) {
if (out) out += ";";
out += `${styleName.replace(/[A-Z]/g, "-$&").toLowerCase()}:${
escapeHtml(String(style[styleName]))
}`;
}
return "style=" + quoteIfNeeded(out);
}
export function quoteIfNeeded(text: string) {
if (text.includes(" ")) return '"' + text + '"';
return text;
}
// -- utility functions --
export function initRender(allowAsync: boolean, addon: Addons): Render {
return {
async: allowAsync ? 0 : -1,
rejections: null,
asyncDone: null,
addon,
};
}
let currentRender: Render | null = null;
export function getCurrentRender() {
if (!currentRender) throw new Error("No Render Active");
return currentRender;
}
export function setCurrentRender(r?: Render | null) {
currentRender = r ?? null;
}
export function getUserData<T>(namespace: PropertyKey, def: () => T): T {
return (getCurrentRender().addon[namespace] ??= def()) as T;
}
export function inspect(object: unknown) {
try {
return require("node:util").inspect(object);
} catch {
return typeof object;
}
}
export type ClsxInput = string | Record<string, boolean | null> | ClsxInput[];
export function clsx(mix: ClsxInput) {
var k, y, str = "";
if (typeof mix === "string") {
return mix;
} else if (typeof mix === "object") {
if (Array.isArray(mix)) {
for (k = 0; k < mix.length; k++) {
if (mix[k] && (y = clsx(mix[k]))) {
str && (str += " ");
str += y;
}
}
} else {
for (k in mix) {
if (mix[k]) {
str && (str += " ");
str += k;
}
}
}
}
return str;
}
export const escapeHtml = (unsafeText: string) =>
String(unsafeText)
.replace(/&/g, "&amp;").replace(/</g, "&lt;").replace(/>/g, "&gt;")
.replace(/"/g, "&quot;").replace(/'/g, "&#x27;").replace(/`/g, "&#x60;");

View file

@ -0,0 +1,40 @@
import { test } from "node:test";
import { renderStreaming, Suspense } from "./suspense.ts";
test("sanity", async (t) => {
let resolve: () => void = null!;
// @ts-expect-error
async function AsyncComponent() {
await new Promise<void>((done) => resolve = done);
return <button>wow!</button>;
}
const example = (
<main>
<h1>app shell</h1>
<Suspense fallback="loading...">
<AsyncComponent />
</Suspense>
<footer>(c) 2025</footer>
</main>
);
const iterator = renderStreaming(example);
const assertContinue = (actual: unknown, value: unknown) =>
t.assert.deepEqual(actual, { done: false, value });
assertContinue(
await iterator.next(),
"<template shadowrootmode=open><main><h1>app shell</h1><slot name=suspended_1>loading...</slot><footer>(c) 2025</footer></main></template>",
);
t.assert.ok(resolve !== null), resolve();
assertContinue(
await iterator.next(),
"<button slot=suspended_1>wow!</button>",
);
t.assert.deepEqual(
await iterator.next(),
{ done: true, value: {} },
);
});

View file

@ -0,0 +1,102 @@
// This file implements out-of-order HTML streaming, mimicking the React
// Suspense API. To use, place Suspense around an expensive async component
// and render the page with 'renderStreaming'.
//
// Implementation of this article:
// https://lamplightdev.com/blog/2024/01/10/streaming-html-out-of-order-without-javascript/
//
// I would link to an article from Next.js or React, but their examples
// are too verbose and not informative to what they actually do.
const kState = Symbol("SuspenseState");
interface SuspenseProps {
children: ssr.Node;
fallback?: ssr.Node;
}
interface State {
nested: boolean;
nextId: number;
completed: number;
pushChunk(name: string, node: ssr.ResolvedNode): void;
}
export function Suspense({ children, fallback }: SuspenseProps): ssr.Node {
const state = ssr.getUserData<State>(kState, () => {
throw new Error("Can only use <Suspense> with 'renderStreaming'");
});
if (state.nested) throw new Error("<Suspense> cannot be nested");
const parent = ssr.getCurrentRender()!;
const r = ssr.initRender(true, { [kState]: { nested: true } });
const resolved = ssr.resolveNode(r, children);
if (r.async == 0) return ssr.html(resolved);
const name = "suspended_" + (++state.nextId);
state.nested = true;
const ip: [ssr.ResolvedNode] = [
[
ssr.kElement,
"slot",
{ name },
fallback ? ssr.resolveNode(parent, fallback) : "",
],
];
state.nested = false;
r.asyncDone = () => {
const rejections = r.rejections;
if (rejections && rejections.length > 0) throw new Error("TODO");
state.pushChunk?.(name, ip[0] = resolved);
};
return ssr.html(ip);
}
// TODO: add a User-Agent parameter, which is used to determine if a
// fallback path must be used.
// - Before ~2024 needs to use a JS implementation.
// - IE should probably bail out entirely.
export async function* renderStreaming<
T extends ssr.Addons = Record<never, unknown>,
>(
node: ssr.Node,
addon: T = {} as T,
) {
const {
text: begin,
addon: { [kState]: state, ...addonOutput },
} = await ssr.ssrAsync(node, {
...addon,
[kState]: {
nested: false,
nextId: 0,
completed: 0,
pushChunk: () => {},
} satisfies State as State,
});
if (state.nextId === 0) {
yield begin;
return addonOutput as unknown as T;
}
let resolve: (() => void) | null = null;
let chunks: string[] = [];
state.pushChunk = (slot, node) => {
while (node.length === 1 && Array.isArray(node)) node = node[0];
if (node[0] === ssr.kElement) {
(node as ssr.ResolvedElement)[2].slot = slot;
} else {
node = [ssr.kElement, "clover-suspense", {
style: "display:contents",
slot,
}, node];
}
chunks.push(ssr.renderNode(node));
resolve?.();
};
yield `<template shadowrootmode=open>${begin}</template>`;
do {
await new Promise<void>((done) => resolve = done);
yield* chunks;
chunks = [];
} while (state.nextId < state.completed);
return addonOutput as unknown as T;
}
import * as ssr from "./ssr.ts";

View file

@ -0,0 +1,79 @@
export function virtualFiles(
map: Record<string, string | esbuild.OnLoadResult>,
) {
return {
name: "clover vfs",
setup(b) {
b.onResolve(
{
filter: new RegExp(
`^(?:${
Object.keys(map).map((file) => string.escapeRegExp(file)).join(
"|",
)
})\$`,
),
},
({ path }) => ({ path, namespace: "vfs" }),
);
b.onLoad(
{ filter: /./, namespace: "vfs" },
({ path }) => {
const entry = map[path];
return ({
resolveDir: ".",
loader: "ts",
...typeof entry === "string" ? { contents: entry } : entry,
});
},
);
},
} satisfies esbuild.Plugin;
}
export function banFiles(
files: string[],
) {
return {
name: "clover vfs",
setup(b) {
b.onResolve(
{
filter: new RegExp(
`^(?:${
files.map((file) => string.escapeRegExp(file)).join("|")
})\$`,
),
},
({ path, importer }) => {
throw new Error(
`Loading ${path} (from ${importer}) is banned!`,
);
},
);
},
} satisfies esbuild.Plugin;
}
export function projectRelativeResolution(root = process.cwd() + "/src") {
return {
name: "project relative resolution ('@/' prefix)",
setup(b) {
b.onResolve({ filter: /^@\// }, ({ path: id }) => {
return {
path: path.resolve(root, id.slice(2)),
};
});
b.onResolve({ filter: /^#/ }, ({ path: id, importer }) => {
return {
path: hot.resolveFrom(importer, id),
};
});
},
} satisfies esbuild.Plugin;
}
import * as esbuild from "esbuild";
import * as string from "#sitegen/string";
import * as path from "node:path";
import * as hot from "./hot.ts";

View file

@ -1,61 +0,0 @@
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
// Node.js sync+promise fs methods. For convenince.
export {
existsSync,
readdir,
readdirSync,
readFile,
readFileSync,
rm,
rmSync,
stat,
statSync,
writeFile,
writeFileSync,
};
export function mkdir(dir: string) {
return nodeMkdir(dir, { recursive: true });
}
export function mkdirSync(dir: string) {
return nodeMkdirSync(dir, { recursive: true });
}
export async function writeMkdir(file: string, contents: Buffer | string) {
await mkdir(path.dirname(file));
return writeFile(file, contents);
}
export function writeMkdirSync(file: string, contents: Buffer | string) {
mkdirSync(path.dirname(file));
return writeFileSync(file, contents);
}
export function readDirRecOptionalSync(dir: string) {
try {
return readdirSync(dir, { withFileTypes: true });
} catch (err: any) {
if (err.code === "ENOENT") return [];
throw err;
}
}
import * as path from "node:path";
import {
existsSync,
mkdirSync as nodeMkdirSync,
readdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from "node:fs";
import {
mkdir as nodeMkdir,
readdir,
readFile,
rm,
stat,
writeFile,
} from "node:fs/promises";

456
framework/generate.ts Normal file
View file

@ -0,0 +1,456 @@
// This file contains the main site generator build process.
// By using `Incremental`'s ability to automatically purge stale
// assets, the `sitegen` function performs partial rebuilds.
export function main() {
return withSpinner<Record<string, unknown>, any>({
text: "Recovering State",
successText,
failureText: () => "sitegen FAIL",
}, async (spinner) => {
// const incr = Incremental.fromDisk();
// await incr.statAllFiles();
const incr = new Incremental();
const result = await sitegen(spinner, incr);
incr.toDisk(); // Allows picking up this state again
return result;
}) as ReturnType<typeof sitegen>;
}
export function successText({
elapsed,
inserted,
referenced,
unreferenced,
}: Awaited<ReturnType<typeof sitegen>>) {
const s = (array: unknown[]) => array.length === 1 ? "" : "s";
const kind = inserted.length === referenced.length ? "build" : "update";
const status = inserted.length > 0
? `${kind} ${inserted.length} key${s(inserted)}`
: unreferenced.length > 0
? `pruned ${unreferenced.length} key${s(unreferenced)}`
: `checked ${referenced.length} key${s(referenced)}`;
return `sitegen! ${status} in ${elapsed.toFixed(1)}s`;
}
export async function sitegen(
status: Spinner,
incr: Incremental,
) {
const startTime = performance.now();
let root = path.resolve(import.meta.dirname, "../src");
const join = (...sub: string[]) => path.join(root, ...sub);
// Sitegen reviews every defined section for resources to process
const sections: sg.Section[] =
require(path.join(root, "site.ts")).siteSections;
// Static files are compressed and served as-is.
// - "{section}/static/*.png"
let staticFiles: FileItem[] = [];
// Pages are rendered then served as static files.
// - "{section}/pages/*.marko"
let pages: FileItem[] = [];
// Views are dynamically rendered pages called via backend code.
// - "{section}/views/*.tsx"
let views: FileItem[] = [];
// Public scripts are bundled for the client as static assets under "/js/[...]"
// This is used for the file viewer's canvases.
// Note that '.client.ts' can be placed anywhere in the file structure.
// - "{section}/scripts/*.client.ts"
let scripts: FileItem[] = [];
// -- Scan for files --
status.text = "Scanning Project";
for (const section of sections) {
const { root: sectionRoot } = section;
const sectionPath = (...sub: string[]) => path.join(sectionRoot, ...sub);
const rootPrefix = root === sectionRoot
? ""
: path.relative(root, sectionRoot) + "/";
const kinds = [
{
dir: sectionPath("pages"),
list: pages,
prefix: "/",
include: [".tsx", ".mdx", ".marko"],
exclude: [".client.ts", ".client.tsx"],
},
{
dir: sectionPath("static"),
list: staticFiles,
prefix: "/",
ext: true,
},
{
dir: sectionPath("scripts"),
list: scripts,
prefix: rootPrefix,
include: [".client.ts", ".client.tsx"],
},
{
dir: sectionPath("views"),
list: views,
prefix: rootPrefix,
include: [".tsx", ".mdx", ".marko"],
exclude: [".client.ts", ".client.tsx"],
},
];
for (
const { dir, list, prefix, include = [""], exclude = [], ext = false }
of kinds
) {
const items = fs.readDirRecOptionalSync(dir);
for (const subPath of items) {
const file = path.join(dir, subPath);
const stat = fs.statSync(file);
if (stat.isDirectory()) continue;
if (!include.some((e) => subPath.endsWith(e))) continue;
if (exclude.some((e) => subPath.endsWith(e))) continue;
const trim = ext
? subPath
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
".",
"/",
);
let id = prefix + trim.replaceAll("\\", "/");
if (prefix === "/" && id.endsWith("/index")) {
id = id.slice(0, -"/index".length) || "/";
}
list.push({ id, file: file });
}
}
}
const globalCssPath = join("global.css");
// TODO: make sure that `static` and `pages` does not overlap
// -- inline style sheets, used and shared by pages and views --
status.text = "Building";
const cssOnce = new OnceMap();
const cssQueue = new Queue({
name: "Bundle",
async fn([, key, files, theme]: [string, string, string[], css.Theme]) {
const { text, sources } = await css.bundleCssFiles(files, theme);
incr.put({
kind: "style",
key,
sources,
value: text,
});
},
passive: true,
getItemText: ([id]) => id,
maxJobs: 2,
});
function ensureCssGetsBuilt(
cssImports: string[],
theme: css.Theme,
referrer: string,
) {
const key = css.styleKey(cssImports, theme);
cssOnce.get(
key,
async () => {
incr.getArtifact("style", key) ??
await cssQueue.add([referrer, key, cssImports, theme]);
},
);
}
// -- server side render pages --
async function loadPageModule({ file }: FileItem) {
require(file);
}
async function renderPage(item: FileItem) {
// -- load and validate module --
let {
default: Page,
meta: metadata,
theme: pageTheme,
layout,
} = require(item.file);
if (!Page) {
throw new Error("Page is missing a 'default' export.");
}
if (!metadata) {
throw new Error("Page is missing 'meta' export with a title.");
}
// -- css --
if (layout?.theme) pageTheme = layout.theme;
const theme: css.Theme = {
...css.defaultTheme,
...pageTheme,
};
const cssImports = Array.from(
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
(file) => path.relative(hot.projectSrc, file),
);
ensureCssGetsBuilt(cssImports, theme, item.id);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
).then((m) => meta.renderMeta(m));
// -- html --
let page = [engine.kElement, Page, {}];
if (layout?.default) {
page = [engine.kElement, layout.default, { children: page }];
}
const bodyPromise = engine.ssrAsync(page, {
sitegen: sg.initRender(),
});
const [{ text, addon }, renderedMeta] = await Promise.all([
bodyPromise,
renderedMetaPromise,
]);
if (!renderedMeta.includes("<title>")) {
throw new Error(
"Page is missing 'meta.title'. " +
"All pages need a title tag.",
);
}
incr.put({
kind: "pageMetadata",
key: item.id,
// Incremental integrates with `hot.ts` + `require`
// to trace all the needed source files here.
sources: [item.file],
value: {
html: text,
meta: renderedMeta,
cssImports,
theme: theme ?? null,
clientRefs: Array.from(addon.sitegen.scripts),
},
});
}
async function prepareView(item: FileItem) {
const module = require(item.file);
if (!module.meta) {
throw new Error(`${item.file} is missing 'export const meta'`);
}
if (!module.default) {
throw new Error(`${item.file} is missing a default export.`);
}
const pageTheme = module.layout?.theme ?? module.theme;
const theme: css.Theme = {
...css.defaultTheme,
...pageTheme,
};
const cssImports = Array.from(
new Set([globalCssPath, ...hot.getCssImports(item.file)]),
(file) => path.relative(hot.projectSrc, file),
);
ensureCssGetsBuilt(cssImports, theme, item.id);
incr.put({
kind: "viewMetadata",
key: item.id,
sources: [item.file],
value: {
file: path.relative(hot.projectRoot, item.file),
cssImports,
theme,
clientRefs: hot.getClientScriptRefs(item.file),
hasLayout: !!module.layout?.default,
},
});
}
// Of the pages that are already built, a call to 'ensureCssGetsBuilt' is
// required so that it's (1) re-built if needed, (2) not pruned from build.
const neededPages = pages.filter((page) => {
const existing = incr.getArtifact("pageMetadata", page.id);
if (existing) {
const { cssImports, theme } = existing;
ensureCssGetsBuilt(cssImports, theme, page.id);
}
return !existing;
});
const neededViews = views.filter((view) => {
const existing = incr.getArtifact("viewMetadata", view.id);
if (existing) {
const { cssImports, theme } = existing;
ensureCssGetsBuilt(cssImports, theme, view.id);
}
return !existing;
});
// Load the marko cache before render modules are loaded
incr.loadMarkoCache();
// This is done in two passes so that a page that throws during evaluation
// will report "Load Render Module" instead of "Render Static Page".
const spinnerFormat = status.format;
status.format = () => "";
const moduleLoadQueue = new Queue({
name: "Load Render Module",
fn: loadPageModule,
getItemText,
maxJobs: 1,
});
moduleLoadQueue.addMany(neededPages);
moduleLoadQueue.addMany(neededViews);
await moduleLoadQueue.done({ method: "stop" });
const pageQueue = new Queue({
name: "Render Static Page",
fn: renderPage,
getItemText,
maxJobs: 2,
});
pageQueue.addMany(neededPages);
const viewQueue = new Queue({
name: "Build Dynamic View",
fn: prepareView,
getItemText,
maxJobs: 2,
});
viewQueue.addMany(neededViews);
const pageAndViews = [
pageQueue.done({ method: "stop" }),
viewQueue.done({ method: "stop" }),
];
await Promise.allSettled(pageAndViews);
await Promise.all(pageAndViews);
status.format = spinnerFormat;
// -- bundle server javascript (backend and views) --
status.text = "Bundle JavaScript";
incr.snapshotMarkoCache();
const serverJavaScriptPromise = bundle.bundleServerJavaScript(incr, "node");
// -- bundle client javascript --
const referencedScripts = Array.from(
new Set(
[
...pages.map((item) =>
UNWRAP(
incr.getArtifact("pageMetadata", item.id),
`Missing pageMetadata ${item.id}`,
)
),
...views.map((item) =>
UNWRAP(
incr.getArtifact("viewMetadata", item.id),
`Missing viewMetadata ${item.id}`,
)
),
].flatMap((item) => item.clientRefs),
),
(script) => path.resolve(hot.projectSrc, script),
).filter((file) => !incr.hasArtifact("script", hot.getScriptId(file)));
const extraPublicScripts = scripts.map((entry) => entry.file);
const clientJavaScriptPromise = bundle.bundleClientJavaScript(
referencedScripts,
extraPublicScripts,
incr,
);
await Promise.all([
serverJavaScriptPromise,
clientJavaScriptPromise,
cssQueue.done({ method: "stop" }),
]);
await bundle.finalizeServerJavaScript(incr, "node");
// -- copy/compress static files --
async function doStaticFile(item: FileItem) {
const body = await fs.readFile(item.file);
await incr.putAsset({
sources: [item.file],
key: item.id,
body,
});
}
const staticQueue = new Queue({
name: "Load Static",
fn: doStaticFile,
getItemText,
maxJobs: 16,
});
status.format = () => "";
staticQueue.addMany(
staticFiles.filter((file) => !incr.hasArtifact("asset", file.id)),
);
await staticQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- concatenate static rendered pages --
status.text = `Concat Pages`;
await Promise.all(pages.map(async (page) => {
if (incr.hasArtifact("asset", page.id)) return;
const {
html,
meta,
cssImports,
theme,
clientRefs,
} = UNWRAP(incr.out.pageMetadata.get(page.id));
const scriptIds = clientRefs.map(hot.getScriptId);
const styleKey = css.styleKey(cssImports, theme);
const style = UNWRAP(
incr.out.style.get(styleKey),
`Missing style ${styleKey}`,
);
const doc = wrapDocument({
body: html,
head: meta,
inlineCss: style,
scripts: scriptIds.map(
(ref) => UNWRAP(incr.out.script.get(ref), `Missing script ${ref}`),
).map((x) => `{${x}}`).join("\n"),
});
await incr.putAsset({
sources: [
page.file,
...incr.sourcesFor("style", styleKey),
...scriptIds.flatMap((ref) => incr.sourcesFor("script", ref)),
],
key: page.id,
body: doc,
headers: {
"Content-Type": "text/html",
},
});
}));
status.format = () => "";
status.text = ``;
// This will wait for all compression jobs to finish, which up
// to this point have been left as dangling promises.
await incr.wait();
const { inserted, referenced, unreferenced } = incr.shake();
// Flush the site to disk.
status.format = spinnerFormat;
status.text = `Incremental Flush`;
incr.flush("node"); // Write outputs
return {
incr,
inserted,
referenced,
unreferenced,
elapsed: (performance.now() - startTime) / 1000,
};
}
function getItemText({ file }: FileItem) {
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
}
import { OnceMap, Queue } from "#sitegen/async";
import { Incremental } from "./incremental.ts";
import * as bundle from "./bundle.ts";
import * as css from "./css.ts";
import * as engine from "./engine/ssr.ts";
import * as hot from "./hot.ts";
import * as fs from "#sitegen/fs";
import * as sg from "#sitegen";
import type { FileItem } from "#sitegen";
import * as path from "node:path";
import * as meta from "#sitegen/meta";
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
import { wrapDocument } from "./lib/view.ts";

View file

@ -34,18 +34,11 @@ export interface FileStat {
cssImportsRecursive: string[] | null;
lastModified: number;
imports: string[];
/* Used by 'incremental.ts' */
srcIds: string[];
}
let fsGraph = new Map<string, FileStat>();
export function setFsGraph(g: Map<string, FileStat>) {
if (fsGraph.size > 0) {
throw new Error("Cannot restore fsGraph when it has been written into");
}
fsGraph = g;
}
export function getFsGraph() {
return fsGraph;
const fileStats = new Map<string, FileStat>();
export function getFileStat(filepath: string) {
return fileStats.get(path.resolve(filepath));
}
function shouldTrackPath(filename: string) {
@ -60,14 +53,6 @@ Module.prototype._compile = function (
filename: string,
format: "module" | "commonjs",
) {
fs.writeMkdirSync(
".clover/debug-transpilation/" +
path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll(
"../",
"_/",
).replaceAll("/", "."),
content,
);
const result = ModulePrototypeUnderscoreCompile.call(
this,
content,
@ -78,25 +63,26 @@ Module.prototype._compile = function (
if (shouldTrackPath(filename)) {
const cssImportsMaybe: string[] = [];
const imports: string[] = [];
for (const { filename: file } of this.children) {
const relative = path.relative(projectRoot, file);
if (file.endsWith(".css")) cssImportsMaybe.push(relative);
for (const { filename: file, cloverClientRefs } of this.children) {
if (file.endsWith(".css")) cssImportsMaybe.push(file);
else {
const child = fsGraph.get(relative);
const child = fileStats.get(file);
if (!child) continue;
const { cssImportsRecursive } = child;
if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive);
imports.push(relative);
imports.push(file);
if (cloverClientRefs && cloverClientRefs.length > 0) {
(this.cloverClientRefs ??= [])
.push(...cloverClientRefs);
}
}
}
const relative = path.relative(projectRoot, filename);
fsGraph.set(relative, {
fileStats.set(filename, {
cssImportsRecursive: cssImportsMaybe.length > 0
? Array.from(new Set(cssImportsMaybe))
: null,
imports,
lastModified: stat.mtimeMs,
srcIds: [],
});
}
return result;
@ -110,7 +96,9 @@ Module._resolveFilename = (...args) => {
try {
return require.resolve(replacedPath, { paths: [projectSrc] });
} catch (err: any) {
if (err.code === "MODULE_NOT_FOUND" && err.requireStack.length <= 1) {
if (
err.code === "MODULE_NOT_FOUND" && (err?.requireStack?.length ?? 0) <= 1
) {
err.message.replace(replacedPath, args[0]);
}
}
@ -119,26 +107,39 @@ Module._resolveFilename = (...args) => {
};
function loadEsbuild(module: NodeJS.Module, filepath: string) {
let src = fs.readFileSync(filepath, "utf8");
return loadEsbuildCode(module, filepath, src);
return loadEsbuildCode(module, filepath, fs.readFileSync(filepath, "utf8"));
}
function loadEsbuildCode(module: NodeJS.Module, filepath: string, src: string) {
interface LoadOptions {
scannedClientRefs?: string[];
}
function loadEsbuildCode(
module: NodeJS.Module,
filepath: string,
src: string,
opt: LoadOptions = {},
) {
if (filepath === import.meta.filename) {
module.exports = self;
return;
}
let loader: any = "tsx";
if (filepath.endsWith(".ts")) loader = "ts";
else if (filepath.endsWith(".jsx")) loader = "jsx";
else if (filepath.endsWith(".js")) loader = "js";
if (opt.scannedClientRefs) {
module.cloverClientRefs = opt.scannedClientRefs;
} else {
let { code, refs } = resolveClientRefs(src, filepath);
module.cloverClientRefs = refs;
src = code;
}
if (src.includes("import.meta")) {
src = `
import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())};
import.meta.dirname = ${JSON.stringify(path.dirname(filepath))};
import.meta.filename = ${JSON.stringify(filepath)};
` + src;
`.trim().replace(/\n/g, "") + src;
}
src = esbuild.transformSync(src, {
loader,
@ -146,25 +147,61 @@ function loadEsbuildCode(module: NodeJS.Module, filepath: string, src: string) {
target: "esnext",
jsx: "automatic",
jsxImportSource: "#ssr",
jsxDev: true,
sourcefile: filepath,
}).code;
return module._compile(src, filepath, "commonjs");
}
function resolveClientRef(sourcePath: string, ref: string) {
const filePath = resolveFrom(sourcePath, ref);
if (
!filePath.endsWith(".client.ts") &&
!filePath.endsWith(".client.tsx")
) {
throw new Error("addScript must be a .client.ts or .client.tsx");
}
return path.relative(projectSrc, filePath);
}
// TODO: extract the marko compilation tools out, lazy load them
export interface MarkoCacheEntry {
src: string;
scannedClientRefs: string[];
}
export const markoCache = new Map<string, MarkoCacheEntry>();
function loadMarko(module: NodeJS.Module, filepath: string) {
let src = fs.readFileSync(filepath, "utf8");
// A non-standard thing here is Clover Sitegen implements
// its own client side scripting stuff, so it overrides
// bare client import statements to it's own usage.
if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src.replace(
/^\s*client\s+import\s+("[^"]+|'[^']+)[^\n]+/m,
"<CloverScriptInclude src=$1 />",
) + '\nimport { Script as CloverScriptInclude } from "#sitegen";';
let cache = markoCache.get(filepath);
if (!cache) {
let src = fs.readFileSync(filepath, "utf8");
// A non-standard thing here is Clover Sitegen implements
// its own client side scripting stuff, so it overrides
// bare client import statements to it's own usage.
const scannedClientRefs = new Set<string>();
if (src.match(/^\s*client\s+import\s+["']/m)) {
src = src.replace(
/^\s*client\s+import\s+("[^"]+"|'[^']+')[^\n]+/m,
(_, src) => {
const ref = JSON.parse(`"${src.slice(1, -1)}"`);
const resolved = resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `<CloverScriptInclude=${
JSON.stringify(getScriptId(resolved))
} />`;
},
) + '\nimport { addScript as CloverScriptInclude } from "#sitegen";\n';
}
src = marko.compileSync(src, filepath).code;
src = src.replace("marko/debug/html", "#ssr/marko");
cache = { src, scannedClientRefs: Array.from(scannedClientRefs) };
markoCache.set(filepath, cache);
}
src = marko.compileSync(filepath, {}).code;
src = src.replace("marko/debug/html", "#ssr/marko");
return loadEsbuildCode(module, filepath, src);
const { src, scannedClientRefs } = cache;
return loadEsbuildCode(module, filepath, src, {
scannedClientRefs,
});
}
function loadMdx(module: NodeJS.Module, filepath: string) {
@ -174,19 +211,26 @@ function loadMdx(module: NodeJS.Module, filepath: string) {
return loadEsbuildCode(module, filepath, src);
}
function loadCss(module: NodeJS.Module, filepath: string) {
function loadCss(module: NodeJS.Module, _filepath: string) {
module.exports = {};
}
export function reloadRecursive(filepath: string) {
filepath = path.resolve(filepath);
const existing = cache[filepath];
if (existing) deleteRecursive(filepath, existing);
fsGraph.clear();
if (existing) deleteRecursiveInner(filepath, existing);
fileStats.clear();
return require(filepath);
}
function deleteRecursive(id: string, module: any) {
export function unload(filepath: string) {
filepath = path.resolve(filepath);
const existing = cache[filepath];
if (existing) delete cache[filepath];
fileStats.delete(filepath);
}
function deleteRecursiveInner(id: string, module: any) {
if (id.includes(path.sep + "node_modules" + path.sep)) {
return;
}
@ -194,15 +238,31 @@ function deleteRecursive(id: string, module: any) {
for (const child of module.children) {
if (child.filename.includes("/engine/")) return;
const existing = cache[child.filename];
if (existing === child) deleteRecursive(child.filename, existing);
if (existing === child) deleteRecursiveInner(child.filename, existing);
}
}
export function getCssImports(filepath: string) {
filepath = path.resolve(filepath);
if (!require.cache[filepath]) throw new Error(filepath + " was never loaded");
return fsGraph.get(path.relative(projectRoot, filepath))
?.cssImportsRecursive ?? [];
return fileStats.get(filepath)?.cssImportsRecursive ?? [];
}
export function getClientScriptRefs(filepath: string) {
filepath = path.resolve(filepath);
const module = require.cache[filepath];
if (!module) throw new Error(filepath + " was never loaded");
return module.cloverClientRefs ?? [];
}
export function getSourceCode(filepath: string) {
filepath = path.resolve(filepath);
const module = require.cache[filepath];
if (!module) throw new Error(filepath + " was never loaded");
if (!module.cloverSourceCode) {
throw new Error(filepath + " did not record source code");
}
return module.cloverSourceCode;
}
export function resolveFrom(src: string, dest: string) {
@ -216,9 +276,57 @@ export function resolveFrom(src: string, dest: string) {
}
}
const importRegExp =
/import\s+(\*\sas\s([a-zA-Z0-9$_]+)|{[^}]+})\s+from\s+(?:"#sitegen"|'#sitegen')/s;
const getSitegenAddScriptRegExp = /addScript(?:\s+as\s+([a-zA-Z0-9$_]+))?/;
interface ResolvedClientRefs {
code: string;
refs: string[];
}
export function resolveClientRefs(
code: string,
filepath: string,
): ResolvedClientRefs {
// This match finds a call to 'import ... from "#sitegen"'
const importMatch = code.match(importRegExp);
if (!importMatch) return { code, refs: [] };
const items = importMatch[1];
let identifier = "";
if (items.startsWith("{")) {
const clauseMatch = items.match(getSitegenAddScriptRegExp);
if (!clauseMatch) return { code, refs: [] }; // did not import
identifier = clauseMatch[1] || "addScript";
} else if (items.startsWith("*")) {
identifier = importMatch[2] + "\\s*\\.\\s*addScript";
} else {
throw new Error("Impossible");
}
identifier = identifier.replaceAll("$", "\\$"); // only needed escape
const findCallsRegExp = new RegExp(
`\\b(${identifier})\\s*\\(("[^"]+"|'[^']+')\\)`,
"gs",
);
const scannedClientRefs = new Set<string>();
code = code.replace(findCallsRegExp, (_, call, arg) => {
const ref = JSON.parse(`"${arg.slice(1, -1)}"`);
const resolved = resolveClientRef(filepath, ref);
scannedClientRefs.add(resolved);
return `${call}(${JSON.stringify(getScriptId(resolved))})`;
});
return { code, refs: Array.from(scannedClientRefs) };
}
export function getScriptId(file: string) {
return (path.isAbsolute(file) ? path.relative(projectSrc, file) : file)
.replaceAll("\\", "/");
}
declare global {
namespace NodeJS {
interface Module {
cloverClientRefs?: string[];
cloverSourceCode?: string;
_compile(
this: NodeJS.Module,
content: string,
@ -228,8 +336,14 @@ declare global {
}
}
}
declare module "node:module" {
export function _resolveFilename(
id: string,
parent: NodeJS.Module,
): unknown;
}
import * as fs from "./fs.ts";
import * as fs from "./lib/fs.ts";
import * as path from "node:path";
import { pathToFileURL } from "node:url";
import * as esbuild from "esbuild";

View file

@ -1,55 +1,108 @@
// `Incremental` contains multiple maps for the different parts of a site
// build, and tracks reused items across builds. It also handles emitting and
// updating the built site. This structure is self contained and serializable.
//
// Tracking is simple: Files map to one or more 'source IDs', which map to one
// or more 'artifact'. This two layer approach allows many files (say a page +
// all its imports) to map to the build of a page, which produces an HTML file
// plus a list of scripts.
// Incremental contains multiple maps for the different kinds
// of Artifact, which contain a list of source files which
// were used to produce it. When files change, Incremental sees
// that the `mtime` is newer, and purges the referenced artifacts.
import { Buffer } from "node:buffer";
type SourceId = string; // relative to project root, e.g. 'src/global.css'
type ArtifactId = string; // `${ArtifactType}\0${string}`
type Sha1Id = string; // Sha1 hex string
// -- artifact types --
interface ArtifactMap {
/* An asset (serve with "#sitegen/asset" */
asset: Asset;
/* The bundled text of a '.client.ts' script */
// TODO: track imports this has into `asset`
script: string;
/* The bundled style tag contents. Keyed by 'css.styleKey' */
style: string;
/* Metadata about a static page */
pageMetadata: PageMetadata;
/* Metadata about a dynamic view */
viewMetadata: ViewMetadata;
/* Cached '.marko' server compilation */
serverMarko: hot.MarkoCacheEntry;
/* Backend source code, pre-replacement. Keyed by platform type. */
backendBundle: BackendBundle;
/* One file in the backend receives post-processing. */
backendReplace: Buffer;
}
type AllArtifactMaps = {
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
};
type ArtifactType = keyof ArtifactMap;
interface Asset {
type ArtifactKind = keyof ArtifactMap;
/* Automatic path tracing is performed to make it so that
* specifying 'sources: [file]' refers to it and everything it imports.
* These kinds do not have that behavior
*/
const exactDependencyKinds = ["serverMarko"];
export interface Asset {
buffer: Buffer;
headers: Record<string, string | undefined>;
hash: string;
}
export interface PutBase {
srcTag?: string; // deprecated
srcId: string;
key: string;
/**
* This interface intentionally omits the *contents*
* of its scripts and styles for fine-grained rebuilds.
*/
export interface PageMetadata {
html: string;
meta: string;
cssImports: string[];
theme: css.Theme;
clientRefs: string[];
}
/**
* Like a page, this intentionally omits resources,
* but additionally omits the bundled server code.
*/
export interface ViewMetadata {
file: string;
// staticMeta: string | null; TODO
cssImports: string[];
theme: css.Theme;
clientRefs: string[];
hasLayout: boolean;
}
export interface BackendBundle {
magicWord: string;
fileWithMagicWord: string | null;
files: Record<string, Buffer>;
}
export interface Put<T extends ArtifactType> extends PutBase {
type: T;
// -- incremental support types --
export interface PutBase {
sources: SourceId[];
key: string;
}
export interface Put<T extends ArtifactKind> extends PutBase {
kind: T;
value: ArtifactMap[T];
}
export interface Output {
type: ArtifactType;
key: string;
export interface Invalidations {
lastModified: number;
outputs: Set<ArtifactId>;
files: Set<SourceId>;
}
const gzip = util.promisify(zlib.gzip);
const zstd = util.promisify(zlib.zstdCompress);
export class Incremental {
/** The generated artifacts */
out: AllArtifactMaps = {
out: {
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
} = {
asset: new Map(),
script: new Map(),
style: new Map(),
pageMetadata: new Map(),
viewMetadata: new Map(),
serverMarko: new Map(),
backendBundle: new Map(),
backendReplace: new Map(),
};
/** Tracking filesystem entries to `srcId` */
invals = new Map<SourceId, Invalidations>();
/** Tracking output keys to files */
sources = new Map<ArtifactId, SourceId[]>();
/** Compressed resources */
compress = new Map<string, Compressed>();
compress = new Map<Sha1Id, Compressed>();
compressQueue = new Queue<CompressJob, void>({
name: "Compress",
maxJobs: 5,
@ -57,59 +110,183 @@ export class Incremental {
passive: true,
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
});
/** Tracking filesystem entries to `srcId` */
files = new Map<string, hot.FileStat>();
srcIds = new Map<string, Output[]>();
static fromSerialized() {
/** Reset at the end of each update */
round = {
inserted: new Set<ArtifactId>(),
referenced: new Set<ArtifactId>(),
};
getArtifact<T extends ArtifactKind>(kind: T, key: string) {
this.round.referenced.add(`${kind}\0${key}`);
return this.out[kind].get(key);
}
serialize() {
const writer = new BufferWriter();
const asset = Array.from(
this.out.asset,
([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
return [key, {
raw,
gzip,
zstd,
hash,
headers,
}];
},
hasArtifact(kind: ArtifactKind, key: string) {
return this.getArtifact(kind, key) != null;
}
sourcesFor(kind: ArtifactKind, key: string) {
return UNWRAP(
this.sources.get(kind + "\0" + key),
`No artifact '${kind}' '${key}'`,
);
const script = Array.from(this.out.script);
const meta = Buffer.from(
JSON.stringify({
asset,
script,
}),
"utf-8",
);
const lengthBuffer = Buffer.alloc(4);
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
return Buffer.concat([meta, lengthBuffer, ...writer.buffers]);
}
serializeToDisk(file = ".clover/incr.state") {
const buffer = this.serialize();
fs.writeFileSync(file, buffer);
shake() {
const toPublic = (str: string) => {
const [kind, key] = str.split("\0");
return { kind: kind as ArtifactKind, key };
};
const inserted = Array.from(this.round.inserted, toPublic);
const referenced = Array.from(this.round.referenced, toPublic);
const unreferenced: { kind: ArtifactKind; key: string }[] = [];
for (const kind in this.out) {
const map = this.out[kind as keyof typeof this.out];
if (!map) continue;
for (const key of map.keys()) {
if (!this.round.referenced.has(`${kind}\0${key}`)) {
unreferenced.push({ kind: kind as ArtifactKind, key });
// this.out[kind as ArtifactKind].delete(key);
}
}
}
this.round.inserted.clear();
this.round.referenced.clear();
return { inserted, referenced, unreferenced };
}
put<T extends ArtifactType>({
srcId,
type,
/*
* Put built artifacts into the incremental cache. The entry points
* used to build this must be provided. 'Incremental' will trace JS
* imports and file modification times tracked by 'hot.ts'.
*/
put<T extends ArtifactKind>({
sources,
kind,
key,
value,
}: Put<T>) {
this.out[type].set(key, value);
// These three invariants affect incremental accuracy.
if (this.round.inserted.has(`${kind}\0${key}`)) {
console.error(
`Artifact ${kind}:${key} was inserted multiple times in the same round!`,
);
} else if (!this.round.referenced.has(`${kind}\0${key}`)) {
console.error(
`Artifact ${kind}:${key} was inserted without checking if (!hasArtifact())`,
);
} else if (this.out[kind].has(key)) {
console.error(
`Artifact ${kind}:${key} is not stale, but overwritten.`,
);
}
this.out[kind].set(key, value);
this.round.inserted.add(`${kind}\0${key}`);
// Update sources information
ASSERT(sources.length > 0, "Missing sources for " + kind + " " + key);
sources = sources.map((src) => path.normalize(src));
const fullKey = `${kind}\0${key}`;
const prevSources = this.sources.get(fullKey);
const newSources = new Set(
sources.map((file) =>
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
),
);
this.sources.set(fullKey, [...newSources]);
for (const source of prevSources ?? []) {
if (sources.includes(source)) {
newSources.delete(source);
continue;
}
const invals = UNWRAP(this.invals.get(source));
ASSERT(invals.outputs.has(fullKey));
invals.outputs.delete(fullKey);
}
// Use reflection from the plugin system to get imports.
for (const source of newSources) {
const invals = this.#getOrInitInvals(source);
invals.outputs.add(fullKey);
this.#followImports(source);
}
}
// TODO: this doesnt remove stuff when it disappeary
#getOrInitInvals(source: string) {
let invals = this.invals.get(source);
if (!invals) {
const lastModified = hot.getFileStat(source)?.lastModified ??
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
this.invals.set(
source,
invals = {
lastModified,
files: new Set(),
outputs: new Set(),
},
);
}
return invals;
}
#followImports(file: string) {
const stat = hot.getFileStat(file);
if (!stat) return;
for (const i of stat.imports) {
const invals = this.#getOrInitInvals(i);
invals.files.add(file);
this.#followImports(i);
}
}
async statAllFiles() {
for (const file of this.invals.keys()) {
try {
const mtime = fs.statSync(file).mtimeMs;
this.updateStat(file, mtime);
} catch (err) {
}
}
}
updateStat(file: string, newLastModified: number | null) {
file = path.relative(hot.projectRoot, file);
const stat = this.invals.get(file);
ASSERT(stat, "Updated stat on untracked file " + file);
const hasUpdate = !newLastModified || stat.lastModified < newLastModified;
if (hasUpdate) {
// Invalidate
console.info(file + " " + (newLastModified ? "updated" : "deleted"));
hot.unload(file);
const invalidQueue = [file];
let currentInvalid;
while (currentInvalid = invalidQueue.pop()) {
const invalidations = this.invals.get(currentInvalid);
ASSERT(
invalidations,
"No reason to track file '" + currentInvalid +
"' if it has no invalidations",
);
const { files, outputs } = invalidations;
for (const out of outputs) {
const [kind, artifactKey] = out.split("\0");
this.out[kind as ArtifactKind].delete(artifactKey);
}
invalidQueue.push(...files);
}
}
if (newLastModified) {
stat.lastModified = newLastModified;
} else {
this.invals.delete(file);
}
return hasUpdate;
}
async putAsset(info: PutAsset) {
@ -126,30 +303,25 @@ export class Incremental {
},
hash,
};
const a = this.put({ ...info, kind: "asset", value });
if (!this.compress.has(hash)) {
const label = info.key;
this.compress.set(hash, {
zstd: undefined,
gzip: undefined,
});
await Promise.all([
this.compressQueue.add({ label, buffer, algo: "zstd", hash }),
this.compressQueue.add({ label, buffer, algo: "gzip", hash }),
]);
this.compressQueue.add({ label, buffer, algo: "zstd", hash });
this.compressQueue.add({ label, buffer, algo: "gzip", hash });
}
return this.put({ ...info, type: "asset", value });
return a;
}
async compressImpl({ algo, buffer, hash }: CompressJob) {
let out;
switch (algo) {
case "zstd":
out = await zstd(buffer);
break;
case "gzip":
out = await gzip(buffer, { level: 9 });
break;
}
if (algo === "zstd") out = await zstd(buffer);
else if (algo === "gzip") out = await gzip(buffer, { level: 9 });
else algo satisfies never;
let entry = this.compress.get(hash);
if (!entry) {
this.compress.set(
@ -163,32 +335,232 @@ export class Incremental {
entry![algo] = out;
}
invalidate(srcId: string) {
}
async wait() {
await this.compressQueue.done({ method: "stop" });
}
async flush() {
serialize() {
const writer = new BufferWriter();
const asset = Object.fromEntries(
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
// -- artifact --
const asset = Array.from(
this.out.asset,
([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
return [key, {
raw,
gzip,
zstd,
headers,
hash,
headers: headers as Record<string, string>,
}] satisfies SerializedMeta["asset"][0];
},
);
const script = Array.from(this.out.script);
const style = Array.from(this.out.style);
const pageMetadata = Array.from(this.out.pageMetadata);
const viewMetadata = Array.from(this.out.viewMetadata);
const serverMarko = Array.from(this.out.serverMarko);
const backendBundle = Array.from(this.out.backendBundle, ([k, v]) => {
return [k, {
magicWord: v.magicWord,
fileWithMagicWord: v.fileWithMagicWord,
files: Object.entries(v.files).map(
([file, contents]) => [
file,
writer.write(contents, "backendBundle" + k + ":" + file),
],
),
}] satisfies SerializedMeta["backendBundle"][0];
});
const backendReplace = Array.from(
this.out.backendReplace,
([k, v]) =>
[
k,
writer.write(v, "backendReplace" + k),
] satisfies SerializedMeta["backendReplace"][0],
);
// -- incremental metadata --
const invals = Array.from(this.invals, ([key, value]) => {
const { lastModified, files, outputs } = value;
return [key, {
m: lastModified,
f: [...files],
o: [...outputs],
}] satisfies SerializedMeta["invals"][0];
});
const sources = Array.from(this.sources, ([key, value]) => {
return [key, ...value] as [string, ...string[]];
});
const json = {
asset,
script,
invals,
sources,
style,
pageMetadata,
viewMetadata,
serverMarko,
backendBundle,
backendReplace,
} satisfies SerializedMeta;
const meta = Buffer.from(JSON.stringify(json), "utf-8");
const lengthBuffer = Buffer.alloc(4);
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
}
static fromSerialized(buffer: Buffer): Incremental {
const metaLength = buffer.readUint32LE(0);
const meta: SerializedMeta = JSON.parse(
buffer.subarray(4, 4 + metaLength).toString("utf8"),
);
const view = ([start, end]: View) =>
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
const incr = new Incremental();
incr.out = {
asset: new Map(meta.asset.map(([key, value]) => {
const { hash, raw, gzip, zstd, headers } = value;
if ((gzip || zstd) && !incr.compress.has(hash)) {
incr.compress.set(hash, {
gzip: gzip ? view(gzip) : undefined,
zstd: zstd ? view(zstd) : undefined,
});
}
return [key, {
buffer: view(raw),
headers: headers,
hash: hash,
}];
})),
script: new Map(meta.script),
style: new Map(meta.style),
pageMetadata: new Map(meta.pageMetadata),
viewMetadata: new Map(meta.viewMetadata),
serverMarko: new Map(meta.serverMarko),
backendBundle: new Map(meta.backendBundle.map(([key, value]) => {
return [key, {
magicWord: value.magicWord,
fileWithMagicWord: value.fileWithMagicWord,
files: Object.fromEntries(
value.files.map(([file, contents]) => [file, view(contents)]),
),
}];
})),
backendReplace: new Map(
meta.backendReplace.map(([key, contents]) => [key, view(contents)]),
),
};
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
return [key, {
lastModified: m,
files: new Set(f),
outputs: new Set(o),
}];
}));
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
return incr;
}
/*
* Move the cached (server) marko transpilations from this incremental
* into the running process.
*/
loadMarkoCache() {
hot.markoCache.clear();
for (const [key, value] of this.out.serverMarko) {
hot.markoCache.set(path.resolve(hot.projectRoot, key), value);
}
}
/*
* Move the cached (server) marko transpilations from this incremental
* into the running process.
*/
snapshotMarkoCache() {
for (const [file, value] of hot.markoCache) {
const key = path.relative(hot.projectRoot, file).replaceAll("\\", "/");
// Only insert if it doesn't exist. Calling 'put' when it
// already exists would inform the user of extra calls to put.
if (!this.hasArtifact("serverMarko", key)) {
this.put({
kind: "serverMarko",
sources: [file],
key,
value,
});
}
}
}
toDisk(file = ".clover/incr.state") {
const buffer = this.serialize();
fs.writeFileSync(file, buffer);
}
static fromDisk(file = ".clover/incr.state"): Incremental {
try {
const buffer = fs.readFileSync(file);
return Incremental.fromSerialized(buffer);
} catch (err: any) {
if (err.code === "ENOENT") return new Incremental();
throw err;
}
}
async wait() {
await this.compressQueue.done({ method: "success" });
}
async flush(
platform: bundle.ServerPlatform,
dir = path.resolve(".clover/out"),
) {
ASSERT(!this.compressQueue.active);
const join = (...args: string[]) => path.join(dir, ...args);
const writer = new BufferWriter();
// TODO: ensure all compressed got compressed
const asset = Object.fromEntries(
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
const raw = writer.write(buffer, hash);
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
const gzip = writer.write(UNWRAP(gzipBuf), hash + ".gz");
const zstd = writer.write(UNWRAP(zstdBuf), hash + ".zstd");
return [key, { raw, gzip, zstd, headers }];
}),
);
await Promise.all([
fs.writeFile(".clover/static.json", JSON.stringify(asset)),
fs.writeFile(".clover/static.blob", writer.get()),
]);
const backendBundle = UNWRAP(this.out.backendBundle.get(platform));
// Arrange output files
const outFiles: Array<[file: string, contents: string | Buffer]> = [
// Asset manifest
["static.json", JSON.stringify(asset)],
["static.blob", writer.get()],
// Backend
...Object.entries(backendBundle.files).map(([subPath, contents]) =>
[
subPath,
subPath === backendBundle.fileWithMagicWord
? UNWRAP(this.out.backendReplace.get(platform))
: contents,
] as [string, Buffer]
),
];
// TODO: check duplicates
// Perform all i/o
await Promise.all(
outFiles.map(([subPath, contents]) =>
fs.writeMkdir(join(subPath), contents, { flush: true })
),
);
}
}
@ -230,10 +602,10 @@ class BufferWriter {
export type View = [start: number, end: number];
// Alongside this type is a byte buffer, containing all the assets.
export interface BuiltAssetMap {
[route: string]: BuiltAsset;
}
export interface BuiltAsset {
raw: View;
gzip: View;
@ -249,17 +621,37 @@ export interface SerializedMeta {
hash: string;
headers: Record<string, string>;
}]>;
script: [key: string, value: string][];
script: Array<[key: string, value: string]>;
style: Array<[key: string, value: string]>;
pageMetadata: Array<[key: string, PageMetadata]>;
viewMetadata: Array<[key: string, ViewMetadata]>;
serverMarko: Array<[key: string, hot.MarkoCacheEntry]>;
backendBundle: Array<[platform: string, {
magicWord: string;
fileWithMagicWord: string | null;
files: Array<[string, View]>;
}]>;
backendReplace: Array<[key: string, View]>;
invals: Array<[key: string, {
/** Modified */
m: number;
f: SourceId[];
o: ArtifactId[];
}]>;
sources: Array<[string, ...string[]]>;
}
function never(): never {
throw new Error("Impossible");
}
const gzip = util.promisify(zlib.gzip);
const zstd = util.promisify(zlib.zstdCompress);
import * as path from "node:path";
import * as fs from "./fs.ts";
import * as fs from "#sitegen/fs";
import * as zlib from "node:zlib";
import * as util from "node:util";
import { Queue } from "./queue.ts";
import { Queue } from "#sitegen/async";
import * as hot from "./hot.ts";
import * as mime from "./mime.ts";
import * as mime from "#sitegen/mime";
import * as path from "node:path";
import { Buffer } from "node:buffer";
import * as css from "./css.ts";
import type * as bundle from "./bundle.ts";

View file

@ -8,8 +8,8 @@ export type StaticPageId = string;
export async function reload() {
const [map, buf] = await Promise.all([
fs.readFile(".clover/static.json", "utf8"),
fs.readFile(".clover/static.blob"),
fs.readFile(path.join(import.meta.dirname, "static.json"), "utf8"),
fs.readFile(path.join(import.meta.dirname, "static.blob")),
]);
assets = {
map: JSON.parse(map),
@ -18,15 +18,18 @@ export async function reload() {
}
export async function reloadSync() {
const map = fs.readFileSync(".clover/static.json", "utf8");
const buf = fs.readFileSync(".clover/static.blob");
const map = fs.readFileSync(
path.join(import.meta.dirname, "static.json"),
"utf8",
);
const buf = fs.readFileSync(path.join(import.meta.dirname, "static.blob"));
assets = {
map: JSON.parse(map),
buf,
};
}
export async function assetMiddleware(c: Context, next: Next) {
export async function middleware(c: Context, next: Next) {
if (!assets) await reload();
const asset = assets!.map[c.req.path];
if (asset) {
@ -35,6 +38,19 @@ export async function assetMiddleware(c: Context, next: Next) {
return next();
}
export async function notFound(c: Context) {
if (!assets) await reload();
let pathname = c.req.path;
do {
const asset = assets!.map[pathname + "/404"];
if (asset) return assetInner(c, asset, 404);
pathname = pathname.slice(0, pathname.lastIndexOf("/"));
} while (pathname);
const asset = assets!.map["/404"];
if (asset) return assetInner(c, asset, 404);
return c.text("the 'Not Found' page was not found", 404);
}
export async function serveAsset(
c: Context,
id: StaticPageId,
@ -62,14 +78,13 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
if (ifnonematch) {
const etag = asset.headers.ETag;
if (etagMatches(etag, ifnonematch)) {
c.res = new Response(null, {
return c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
headers: {
ETag: etag,
},
});
return;
}
}
const acceptEncoding = c.req.header("Accept-Encoding") ?? "";
@ -90,10 +105,16 @@ function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) {
} else {
body = subarrayAsset(asset.raw);
}
c.res = new Response(body, { headers, status });
return c.res = new Response(body, { headers, status });
}
import * as fs from "./fs.ts";
process.on("message", (msg: any) => {
if (msg?.type === "clover.assets.reload") reload();
});
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import type { StatusCode } from "hono/utils/http-status";
import type { BuiltAsset, BuiltAssetMap, View } from "./incremental.ts";
import type { BuiltAsset, BuiltAssetMap, View } from "../incremental.ts";
import { Buffer } from "node:buffer";
import * as path from "node:path";

View file

@ -1,7 +1,4 @@
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as path from "node:path";
import process from "node:process";
const five_minutes = 5 * 60 * 1000;
interface QueueOptions<T, R> {
name: string;
@ -37,6 +34,12 @@ export class Queue<T, R> {
this.#passive = options.passive ?? false;
}
cancel() {
const bar = this.#cachedProgress;
bar?.stop();
this.#queue = [];
}
get bar() {
const cached = this.#cachedProgress;
if (!cached) {
@ -68,10 +71,10 @@ export class Queue<T, R> {
return cached;
}
add(args: T) {
addReturn(args: T) {
this.#total += 1;
this.updateTotal();
if (this.#active.length > this.#maxJobs) {
if (this.#active.length >= this.#maxJobs) {
const { promise, resolve, reject } = Promise.withResolvers<R>();
this.#queue.push([args, resolve, reject]);
return promise;
@ -79,6 +82,10 @@ export class Queue<T, R> {
return this.#run(args);
}
add(args: T) {
return this.addReturn(args).then(() => {}, () => {});
}
addMany(items: T[]) {
this.#total += items.length;
this.updateTotal();
@ -95,10 +102,12 @@ export class Queue<T, R> {
const itemText = this.#getItemText(args);
const spinner = new Spinner(itemText);
spinner.stop();
(spinner as any).redraw = () => (bar as any).redraw();
const active = this.#active;
try {
active.unshift(spinner);
bar.props = { active };
// console.log(this.#name + ": " + itemText);
const result = await this.#fn(args, spinner);
this.#done++;
return result;
@ -107,6 +116,7 @@ export class Queue<T, R> {
(err as any).job = itemText;
}
this.#errors.push(err);
console.error(util.inspect(err, false, Infinity, true));
throw err;
} finally {
active.splice(active.indexOf(spinner), 1);
@ -142,7 +152,7 @@ export class Queue<T, R> {
}
}
async done(o: { method: "success" | "stop" }) {
async done(o?: { method: "success" | "stop" }) {
if (this.#active.length === 0) {
this.#end(o);
return;
@ -156,8 +166,8 @@ export class Queue<T, R> {
#end(
{ method = this.#passive ? "stop" : "success" }: {
method: "success" | "stop";
},
method?: "success" | "stop";
} = {},
) {
const bar = this.#cachedProgress;
if (this.#errors.length > 0) {
@ -170,6 +180,16 @@ export class Queue<T, R> {
if (bar) bar[method]();
}
get active(): boolean {
return this.#active.length !== 0;
}
[Symbol.dispose]() {
if (this.active) {
this.cancel();
}
}
}
const cwd = process.cwd();
@ -200,7 +220,80 @@ export class OnceMap<T> {
const result = compute();
this.ongoing.set(key, result);
result.finally(() => this.ongoing.delete(key));
return result;
}
}
interface ARCEValue<T> {
value: T;
[Symbol.dispose]: () => void;
}
export function RefCountedExpirable<T>(
init: () => Promise<T>,
deinit: (value: T) => void,
expire: number = five_minutes,
): () => Promise<ARCEValue<T>> {
let refs = 0;
let item: ARCEValue<T> | null = null;
let loading: Promise<ARCEValue<T>> | null = null;
let timer: ReturnType<typeof setTimeout> | null = null;
function deref() {
ASSERT(item !== null);
if (--refs !== 0) return;
ASSERT(timer === null);
timer = setTimeout(() => {
ASSERT(refs === 0);
ASSERT(loading === null);
ASSERT(item !== null);
deinit(item.value);
item = null;
timer = null;
}, expire);
}
return async function () {
if (timer !== null) {
clearTimeout(timer);
timer = null;
}
if (item !== null) {
refs++;
return item;
}
if (loading !== null) {
refs++;
return loading;
}
const p = Promise.withResolvers<ARCEValue<T>>();
loading = p.promise;
try {
const value = await init();
item = { value, [Symbol.dispose]: deref };
refs++;
p.resolve(item);
return item;
} catch (e) {
p.reject(e);
throw e;
} finally {
loading = null;
}
};
}
export function once<T>(fn: () => Promise<T>): () => Promise<T> {
let result: T | Promise<T> | null = null;
return async () => {
if (result) return result;
result = await fn();
return result;
};
}
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as path from "node:path";
import process from "node:process";
import * as util from "node:util";

111
framework/lib/fs.ts Normal file
View file

@ -0,0 +1,111 @@
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
// Node.js sync+promise fs methods. For convenince.
export {
createReadStream,
createWriteStream,
existsSync,
open,
readdir,
readdirSync,
readFile,
readFileSync,
rm,
rmSync,
stat,
statSync,
writeFile,
writeFileSync,
type FileHandle,
};
export function mkdir(dir: string) {
return nodeMkdir(dir, { recursive: true });
}
export function mkdirSync(dir: string) {
return nodeMkdirSync(dir, { recursive: true });
}
export type WriteFileAsyncOptions = Parameters<typeof writeFile>[2];
export async function writeMkdir(
file: string,
contents: Buffer | string,
options?: WriteFileAsyncOptions,
) {
await mkdir(path.dirname(file));
return writeFile(file, contents, options);
}
export function writeMkdirSync(file: string, contents: Buffer | string) {
mkdirSync(path.dirname(file));
return writeFileSync(file, contents);
}
export function readDirRecOptionalSync(dir: string) {
try {
return readdirSync(dir, { recursive: true, encoding: "utf8" });
} catch (err: any) {
if (err.code === "ENOENT") return [];
throw err;
}
}
export async function readJson<T>(file: string) {
return JSON.parse(await readFile(file, "utf-8")) as T;
}
export function readJsonSync<T>(file: string) {
return JSON.parse(readFileSync(file, "utf-8")) as T;
}
export async function removeEmptyDirectories(dir: string, removeRoot = false) {
try {
const entries = await readdir(dir, { withFileTypes: true });
let len = entries.length;
for (const entry of entries) {
if (entry.isDirectory()) {
const subDirPath = path.join(dir, entry.name);
if (await removeEmptyDirectories(subDirPath, true)) len -= 1;
}
}
if (len === 0) {
if (removeRoot) {
await rmdir(dir);
}
return true;
}
} catch (error: any) {
if (error.code === "ENOENT") {
// Directory doesn't exist, ignore
return;
}
throw error;
}
return false;
}
import * as path from "node:path";
import {
createReadStream,
createWriteStream,
existsSync,
mkdirSync as nodeMkdirSync,
readdirSync,
readFileSync,
rmSync,
statSync,
writeFileSync,
} from "node:fs";
import {
mkdir as nodeMkdir,
open,
readdir,
readFile,
rm,
rmdir,
stat,
writeFile,
type FileHandle,
} from "node:fs/promises";
export { Stats } from "node:fs";

192
framework/lib/markdown.tsx Normal file
View file

@ -0,0 +1,192 @@
/* Impementation of CommonMark specification for markdown with support
* for custom syntax extensions via the parser options. Instead of
* returning an AST that has a second conversion pass to JSX, the
* returned value of 'parse' is 'engine.Node' which can be stringified
* via clover's SSR engine. This way, generation optimizations, async
* components, and other features are gained for free here.
*/
function parse(src: string, options: Partial<ParseOpts> = {}) {
}
/* Render markdown content. Same function as 'parse', but JSX components
* only take one argument and must start with a capital letter. */
export function Markdown(
{ src, ...options }: { src: string } & Partial<ParseOpts>,
) {
return parse(src, options);
}
function parseInline(src: string, options: Partial<InlineOpts> = {}) {
const { rules = inlineRules, links = new Map() } = options;
const opts: InlineOpts = { rules, links };
const parts: engine.Node[] = [];
const ruleList = Object.values(rules);
parse: while (true) {
for (const rule of ruleList) {
if (!rule.match) continue;
const match = src.match(rule.match);
if (!match) continue;
const index = UNWRAP(match.index);
const after = src.slice(index + match[0].length);
const parse = rule.parse({ after, match: match[0], opts });
if (!parse) continue;
// parse before
parts.push(src.slice(0, index), parse.result);
src = parse.rest ?? after;
continue parse;
}
break;
}
parts.push(src);
return parts;
}
// -- interfaces --
interface ParseOpts {
gfm: boolean;
blockRules: Record<string, BlockRule>;
inlineRules: Record<string, InlineRule>;
}
interface InlineOpts {
rules: Record<string, InlineRule>;
links: Map<string, LinkRef>;
}
interface InlineRule {
match: RegExp;
parse(opts: {
after: string;
match: string;
opts: InlineOpts;
}): InlineParse | null;
}
interface InlineParse {
result: engine.Node;
rest?: string;
}
interface LinkRef {
href: string;
title: string | null;
}
interface BlockRule {
match: RegExp;
parse(opts: {}): unknown;
}
export const inlineRules: Record<string, InlineRule> = {
code: {
match: /`+/,
// 6.1 - code spans
parse({ after, match }) {
const end = after.indexOf(match);
if (end === -1) return null;
let inner = after.slice(0, end);
const rest = after.slice(end + match.length);
// If the resulting string both begins and ends with a space
// character, but does not consist entirely of space characters,
// a single space character is removed from the front and back.
if (inner.match(/^ [^ ]+ $/)) inner = inner.slice(1, -1);
return { result: <code>{inner}</code>, rest };
},
},
link: {
match: /(?<!!)\[/,
// 6.3 - links
parse({ after, opts }) {
// Match '[' to let the inner-most link win.
const splitText = splitFirst(after, /[[\]]/);
if (!splitText) return null;
if (splitText.delim !== "]") return null;
const { first: textSrc, rest: afterText } = splitText;
let href: string, title: string | null = null, rest: string;
if (afterText[0] === "(") {
// Inline link
const splitTarget = splitFirst(afterText.slice(1), /\)/);
if (!splitTarget) return null;
({ rest } = splitTarget);
const target = parseLinkTarget(splitTarget.first);
if (!target) return null;
({ href, title } = target);
} else if (afterText[0] === "[") {
const splitTarget = splitFirst(afterText.slice(1), /]/);
if (!splitTarget) return null;
const name = splitTarget.first.trim().length === 0
// Collapsed reference link
? textSrc.trim()
// Full Reference Link
: splitTarget.first.trim();
const target = opts.links.get(name);
if (!target) return null;
({ href, title } = target);
({ rest } = splitTarget);
} else {
// Shortcut reference link
const target = opts.links.get(textSrc);
if (!target) return null;
({ href, title } = target);
rest = afterText;
}
return {
result: <a {...{ href, title }}>{parseInline(textSrc, opts)}</a>,
rest,
};
},
},
image: {
match: /!\[/,
// 6.4 - images
parse({ after, opts }) {
// Match '[' to let the inner-most link win.
const splitText = splitFirst(after, /[[\]]/);
if (!splitText) return null;
if (splitText.delim !== "]") return null;
const { first: textSrc, rest: afterText } = splitText;
},
},
emphasis: {
// detect left-flanking delimiter runs, but this expression does not
// consider preceding escapes. instead, those are programatically
// checked inside the parse function.
match: /(?:\*+|(?<!\p{P})_+)(?!\s|\p{P}|$)/u,
// 6.2 - emphasis and strong emphasis
parse({ before, match, after, opts }) {
// find out how long the delim sequence is
// look for 'ends'
},
},
autolink: {},
html: {},
br: {
match: / +\n|\\\n/,
parse() {
return { result: <br /> };
},
},
};
function parseLinkTarget(src: string) {
let href: string, title: string | null = null;
href = src;
return { href, title };
}
/* Find a delimiter while considering backslash escapes. */
function splitFirst(text: string, match: RegExp) {
let first = "", delim: string, escaped: boolean;
do {
const find = text.match(match);
if (!find) return null;
delim = find[0];
const index = UNWRAP(find.index);
let i = index - 1;
escaped = false;
while (i >= 0 && text[i] === "\\") escaped = !escaped, i -= 1;
first += text.slice(0, index - +escaped);
text = text.slice(index + find[0].length);
} while (escaped);
return { first, delim, rest: text };
}
console.log(engine.ssrSync(parseInline("meow `bwaa` `` ` `` `` `z``")));
import * as engine from "#ssr";
import type { ParseOptions } from "node:querystring";

24
framework/lib/meta.ts Normal file
View file

@ -0,0 +1,24 @@
export interface Meta {
title: string;
description?: string | undefined;
openGraph?: OpenGraph;
alternates?: Alternates;
}
export interface OpenGraph {
title?: string;
description?: string | undefined;
type: string;
url: string;
}
export interface Alternates {
canonical: string;
types: { [mime: string]: AlternateType };
}
export interface AlternateType {
url: string;
title: string;
}
export function renderMeta({ title }: Meta): string {
return `<title>${esc(title)}</title>`;
}
import { escapeHtml as esc } from "../engine/ssr.ts";

32
framework/lib/mime.ts Normal file
View file

@ -0,0 +1,32 @@
declare const MIME_INLINE_DATA: never;
const entries = typeof MIME_INLINE_DATA !== "undefined"
? MIME_INLINE_DATA
: fs.readFileSync(path.join(import.meta.dirname, "mime.txt"), "utf8")
.split("\n")
.map((line) => line.trim())
.filter((line) => line && !line.startsWith("#"))
.map((line) => line.split(/\s+/, 2) as [string, string]);
export const rawEntriesText = entries;
const extensions = new Map(entries.filter((x) => x[0].startsWith(".")));
const fullNames = new Map(entries.filter((x) => !x[0].startsWith(".")));
/**
* Accepts:
* - Full file path or basename
* - Extension (with or without dot)
*/
export function contentTypeFor(file: string) {
const slash = file.indexOf("/");
if (slash !== -1) file = file.slice(slash + 1);
const dot = file.indexOf(".");
if (dot === -1) file = "." + file;
else if (dot > 0) {
let entry = fullNames.get(file);
if (entry) return entry;
file = file.slice(dot);
}
return extensions.get(file) ?? "application/octet-stream";
}
import * as fs from "#sitegen/fs";
import * as path from "node:path";

99
framework/lib/mime.txt Normal file
View file

@ -0,0 +1,99 @@
# media types
# https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/MIME_types
.aac audio/x-aac
.aif audio/x-aiff
.aifc audio/x-aiff
.aiff audio/x-aiff
.asm text/x-asm
.avi video/x-msvideo
.bat application/x-msdownload
.c text/x-c
.chat text/x-clover-chatlog
.class application/java-vm
.cmd application/x-msdownload
.com application/x-msdownload
.conf text/plain
.cpp text/x-c
.css text/css
.csv text/csv
.cxx text/x-c
.def text/plain
.diff text/plain
.dll application/x-msdownload
.dmg application/octet-stream
.doc application/msword
.docx application/vnd.openxmlformats-officedocument.wordprocessingml.document
.epub application/epub+zip
.exe application/x-msdownload
.flv video/x-flv
.fbx application/fbx
.gz application/x-gzip
.h text/x-c
.h264 video/h264
.hh text/x-c
.htm text/html;charset=utf-8
.html text/html;charset=utf-8
.ico image/x-icon
.ics text/calendar
.in text/plain
.jar application/java-archive
.java text/x-java-source
.jpeg image/jpeg
.jpg image/jpeg
.jpgv video/jpeg
.jxl image/jxl
.js application/javascript
.json application/json
.latex application/x-latex
.list text/plain
.log text/plain
.m4a audio/mp4
.man text/troff
.mid audio/midi
.midi audio/midi
.mov video/quicktime
.mp3 audio/mpeg
.mp4 video/mp4
.msh model/mesh
.msi application/x-msdownload
.obj application/octet-stream
.ogg audio/ogg
.otf application/x-font-otf
.pdf application/pdf
.png image/png
.ppt application/vnd.ms-powerpoint
.pptx application/vnd.openxmlformats-officedocument.presentationml.presentation
.psd image/vnd.adobe.photoshop
.py text/x-python
.rar application/x-rar-compressed
.rss application/rss+xml
.rtf application/rtf
.rtx text/richtext
.s text/x-asm
.pem application/x-pem-file"
.ser application/java-serialized-object
.sh application/x-sh
.sig application/pgp-signature
.silo model/mesh
.svg image/svg+xml
.t text/troff
.tar application/x-tar
.text text/plain
.tgz application/x-gzip
.tif image/tiff
.tiff image/tiff
.torrent application/x-bittorrent
.ttc application/x-font-ttf
.ttf application/x-font-ttf
.txt text/plain
.urls text/uri-list
.v text/x-v
.wav audio/x-wav
.wmv video/x-ms-wmv
.xls application/vnd.ms-excel
.xlsx application/vnd.openxmlformats-officedocument.spreadsheetml.sheet
.xml application/xml
.xps application/vnd.ms-xpsdocument
# special cased based on file name
rss.xml application/rss+xml

44
framework/lib/sitegen.ts Normal file
View file

@ -0,0 +1,44 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
*/
export interface FileItem {
id: string;
file: string;
}
export interface SitegenRender {
scripts: Set<string>;
}
export function initRender(): SitegenRender {
return {
scripts: new Set(),
};
}
export function getRender() {
return ssr.getUserData<SitegenRender>("sitegen", () => {
throw new Error(
"This function can only be used in a page (static or view)",
);
});
}
export function inRender() {
return "sitegen" in ssr.getCurrentRender();
}
/** Add a client-side script to the page. */
export function addScript(id: ScriptId | { value: ScriptId }) {
getRender().scripts.add(typeof id === "string" ? id : id.value);
}
export interface Section {
root: string;
}
import * as ssr from "../engine/ssr.ts";

View file

@ -10,7 +10,7 @@ export function getDb(file: string) {
if (db) return db;
const fileWithExt = file.includes(".") ? file : file + ".sqlite";
db = new WrappedDatabase(
new DatabaseSync(path.join(".clover/", fileWithExt)),
new DatabaseSync(path.join(process.env.CLOVER_DB ?? ".clover", fileWithExt)),
);
map.set(file, db);
return db;
@ -41,61 +41,95 @@ export class WrappedDatabase {
(key, version) values (?, ?);
`),
));
const { changes, lastInsertRowid } = s.run(name, 1);
console.log(changes, lastInsertRowid);
if (changes === 1) {
this.node.exec(schema);
}
const { changes } = s.run(name, 1);
if (changes === 1) this.node.exec(schema);
}
prepare<Args extends unknown[] = [], Result = unknown>(
query: string,
): Stmt<Args, Result> {
return new Stmt(this.node.prepare(query));
query = query.trim();
const lines = query.split("\n");
const trim = Math.min(
...lines.map((line) =>
line.trim().length === 0 ? Infinity : line.match(/^\s*/)![0].length
),
);
query = lines.map((x) => x.slice(trim)).join("\n");
let prepared;
try {
prepared = this.node.prepare(query);
} catch (err) {
if (err) (err as { query: string }).query = query;
throw err;
}
return new Stmt(prepared);
}
}
export class Stmt<Args extends unknown[] = unknown[], Row = unknown> {
#node: StatementSync;
#class: any | null = null;
query: string;
constructor(node: StatementSync) {
this.#node = node;
this.query = node.sourceSQL;
}
/** Get one row */
get(...args: Args): Row | null {
const item = this.#node.get(...args as any) as Row;
if (!item) return null;
const C = this.#class;
if (C) Object.setPrototypeOf(item, C.prototype);
return item;
return this.#wrap(args, () => {
const item = this.#node.get(...args as any) as Row;
if (!item) return null;
const C = this.#class;
if (C) Object.setPrototypeOf(item, C.prototype);
return item;
});
}
getNonNull(...args: Args) {
const item = this.get(...args);
if (!item) throw new Error("Query returned no result");
if (!item) {
throw this.#wrap(args, () => new Error("Query returned no result"));
}
return item;
}
iter(...args: Args): Iterator<Row> {
return this.array(...args)[Symbol.iterator]();
iter(...args: Args): IterableIterator<Row> {
return this.#wrap(args, () => this.array(...args)[Symbol.iterator]());
}
/** Get all rows */
array(...args: Args): Row[] {
const array = this.#node.all(...args as any) as Row[];
const C = this.#class;
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
return array;
return this.#wrap(args, () => {
const array = this.#node.all(...args as any) as Row[];
const C = this.#class;
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
return array;
});
}
/** Return the number of changes / row ID */
run(...args: Args) {
return this.#node.run(...args as any);
return this.#wrap(args, () => this.#node.run(...args as any));
}
as<R>(Class: { new (): R }): Stmt<Args, R> {
this.#class = Class;
return this as any;
}
#wrap<T>(args: unknown[], fn: () => T) {
try {
return fn();
} catch (err: any) {
if (err && typeof err === "object") {
err.query = this.query;
args = args.flat(Infinity);
err.queryArgs = args.length === 1 ? args[0] : args;
}
throw err;
}
}
}
import { DatabaseSync, StatementSync } from "node:sqlite";
import * as fs from "./fs.ts";
import * as path from "node:path";

3
framework/lib/string.ts Normal file
View file

@ -0,0 +1,3 @@
export function escapeRegExp(source: string) {
return source.replace(/[\$\\]/g, "\\$&");
}

100
framework/lib/view.ts Normal file
View file

@ -0,0 +1,100 @@
// This import is generated by code 'bundle.ts'
export interface View {
component: engine.Component;
meta:
| meta.Meta
| ((props: { context?: hono.Context }) => Promise<meta.Meta> | meta.Meta);
layout?: engine.Component;
inlineCss: string;
scripts: Record<string, string>;
}
let views: Record<string, View> = null!;
let scripts: Record<string, string> = null!;
// An older version of the Clover Engine supported streaming suspense
// boundaries, but those were never used. Pages will wait until they
// are fully rendered before sending.
export async function renderView(
context: hono.Context,
id: string,
props: Record<string, unknown>,
) {
return context.html(await renderViewToString(id, { context, ...props }));
}
export async function renderViewToString(
id: string,
props: Record<string, unknown>,
) {
views ?? ({ views, scripts } = require("$views"));
// The view contains pre-bundled CSS and scripts, but keeps the scripts
// separate for run-time dynamic scripts. For example, the file viewer
// includes the canvas for the current page, but only the current page.
const {
component,
inlineCss,
layout,
meta: metadata,
}: View = UNWRAP(views[id], `Missing view ${id}`);
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata(props) : metadata,
).then((m) => meta.renderMeta(m));
// -- html --
let page: engine.Element = [engine.kElement, component, props];
if (layout) page = [engine.kElement, layout, { children: page }];
const { text: body, addon: { sitegen } } = await engine.ssrAsync(page, {
sitegen: sg.initRender(),
});
// -- join document and send --
return wrapDocument({
body,
head: await renderedMetaPromise,
inlineCss,
scripts: joinScripts(
Array.from(
sitegen.scripts,
(id) => UNWRAP(scripts[id], `Missing script ${id}`),
),
),
});
}
export function provideViewData(v: typeof views, s: typeof scripts) {
views = v;
scripts = s;
}
export function joinScripts(scriptSources: string[]) {
const { length } = scriptSources;
if (length === 0) return "";
if (length === 1) return scriptSources[0];
return scriptSources.map((source) => `{${source}}`).join(";");
}
export function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><html lang=en><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${
scripts ? `<script>${scripts}</script>` : ""
}</body></html>`;
}
import * as meta from "./meta.ts";
import type * as hono from "#hono";
import * as engine from "../engine/ssr.ts";
import * as sg from "./sitegen.ts";

View file

@ -1,13 +0,0 @@
import { resolveMetadata } from "./merge";
import { renderMetadata } from "./render";
import { Metadata } from "./types";
export * from "./types";
export * from "./merge";
export * from "./render";
export function resolveAndRenderMetadata(
...metadata: [Metadata, ...Metadata[]]
) {
return renderMetadata(resolveMetadata(...metadata));
}

View file

@ -1,154 +0,0 @@
import { createDefaultMetadata } from "./nextjs/default-metadata";
import { resolveAsArrayOrUndefined } from "./nextjs/generate/utils";
import {
resolveAlternates,
resolveAppleWebApp,
resolveAppLinks,
resolveRobots,
resolveThemeColor,
resolveVerification,
resolveViewport,
} from "./nextjs/resolvers/resolve-basics";
import { resolveIcons } from "./nextjs/resolvers/resolve-icons";
import {
resolveOpenGraph,
resolveTwitter,
} from "./nextjs/resolvers/resolve-opengraph";
import { resolveTitle } from "./nextjs/resolvers/resolve-title";
import type {
Metadata,
ResolvedMetadata,
} from "./nextjs/types/metadata-interface";
type MetadataAccumulationOptions = {
pathname: string;
};
// Merge the source metadata into the resolved target metadata.
function merge(
target: ResolvedMetadata,
source: Metadata | null,
titleTemplates: {
title?: string | null;
twitter?: string | null;
openGraph?: string | null;
} = {},
) {
const metadataBase = source?.metadataBase || target.metadataBase;
for (const key_ in source) {
const key = key_ as keyof Metadata;
switch (key) {
case "title": {
target.title = resolveTitle(source.title, titleTemplates.title);
break;
}
case "alternates": {
target.alternates = resolveAlternates(source.alternates, metadataBase, {
pathname: (source as any)._pathname ?? "/",
});
break;
}
case "openGraph": {
target.openGraph = resolveOpenGraph(source.openGraph, metadataBase);
if (target.openGraph) {
target.openGraph.title = resolveTitle(
target.openGraph.title,
titleTemplates.openGraph,
);
}
break;
}
case "twitter": {
target.twitter = resolveTwitter(source.twitter, metadataBase);
if (target.twitter) {
target.twitter.title = resolveTitle(
target.twitter.title,
titleTemplates.twitter,
);
}
break;
}
case "verification":
target.verification = resolveVerification(source.verification);
break;
case "viewport": {
target.viewport = resolveViewport(source.viewport);
break;
}
case "icons": {
target.icons = resolveIcons(source.icons);
break;
}
case "appleWebApp":
target.appleWebApp = resolveAppleWebApp(source.appleWebApp);
break;
case "appLinks":
target.appLinks = resolveAppLinks(source.appLinks);
break;
case "robots": {
target.robots = resolveRobots(source.robots);
break;
}
case "themeColor": {
target.themeColor = resolveThemeColor(source.themeColor);
break;
}
case "archives":
case "assets":
case "bookmarks":
case "keywords":
case "authors": {
// FIXME: type inferring
// @ts-ignore
target[key] = resolveAsArrayOrUndefined(source[key]) || null;
break;
}
// directly assign fields that fallback to null
case "applicationName":
case "description":
case "generator":
case "creator":
case "publisher":
case "category":
case "classification":
case "referrer":
case "colorScheme":
case "itunes":
case "formatDetection":
case "manifest":
// @ts-ignore TODO: support inferring
target[key] = source[key] || null;
break;
case "other":
target.other = Object.assign({}, target.other, source.other);
break;
case "metadataBase":
target.metadataBase = metadataBase;
break;
default:
break;
}
}
return target;
}
export interface MetadataWithPathname extends Metadata {
/** Set by framework author to the pathname of the page defining this metadata. */
_pathname?: string;
}
export function resolveMetadata(
...metadata: [MetadataWithPathname, ...MetadataWithPathname[]]
) {
const base = createDefaultMetadata();
for (const item of metadata) {
merge(base, item, {
title: base.title?.template,
twitter: base.twitter?.title?.template,
openGraph: base.openGraph?.title?.template,
});
}
return base;
}

View file

@ -1,15 +0,0 @@
import type { Viewport } from "./types/extra-types";
import type { Icons } from "./types/metadata-types";
export const ViewPortKeys: { [k in keyof Viewport]: string } = {
width: "width",
height: "height",
initialScale: "initial-scale",
minimumScale: "minimum-scale",
maximumScale: "maximum-scale",
viewportFit: "viewport-fit",
userScalable: "user-scalable",
interactiveWidget: "interactive-widget",
} as const;
export const IconKeys: (keyof Icons)[] = ["icon", "shortcut", "apple", "other"];

View file

@ -1,50 +0,0 @@
import type { ResolvedMetadata } from "./types/metadata-interface";
import process from "node:process";
export function createDefaultMetadata(): ResolvedMetadata {
const defaultMetadataBase =
process.env.NODE_ENV === "production" && process.env.VERCEL_URL
? new URL(`https://${process.env.VERCEL_URL}`)
: null;
return {
viewport: "width=device-width, initial-scale=1",
metadataBase: defaultMetadataBase,
// Other values are all null
title: null,
description: null,
applicationName: null,
authors: null,
generator: null,
keywords: null,
referrer: null,
themeColor: null,
colorScheme: null,
creator: null,
publisher: null,
robots: null,
manifest: null,
alternates: {
canonical: null,
languages: null,
media: null,
types: null,
},
icons: null,
openGraph: null,
twitter: null,
verification: {},
appleWebApp: null,
formatDetection: null,
itunes: null,
abstract: null,
appLinks: null,
archives: null,
assets: null,
bookmarks: null,
category: null,
classification: null,
other: {},
};
}

View file

@ -1,72 +0,0 @@
import type { ResolvedMetadata } from "../types/metadata-interface";
import React from "react";
import { AlternateLinkDescriptor } from "../types/alternative-urls-types";
function AlternateLink({
descriptor,
...props
}: {
descriptor: AlternateLinkDescriptor;
} & React.LinkHTMLAttributes<HTMLLinkElement>) {
if (!descriptor.url) return null;
return (
<link
{...props}
{...(descriptor.title && { title: descriptor.title })}
href={descriptor.url.toString()}
/>
);
}
export function AlternatesMetadata({
alternates,
}: {
alternates: ResolvedMetadata["alternates"];
}) {
if (!alternates) return null;
const { canonical, languages, media, types } = alternates;
return (
<>
{canonical
? <AlternateLink rel="canonical" descriptor={canonical} />
: null}
{languages
? Object.entries(languages).map(([locale, descriptors]) => {
return descriptors?.map((descriptor, index) => (
<AlternateLink
rel="alternate"
key={index}
hrefLang={locale}
descriptor={descriptor}
/>
));
})
: null}
{media
? Object.entries(media).map(([mediaName, descriptors]) =>
descriptors?.map((descriptor, index) => (
<AlternateLink
rel="alternate"
key={index}
media={mediaName}
descriptor={descriptor}
/>
))
)
: null}
{types
? Object.entries(types).map(([type, descriptors]) =>
descriptors?.map((descriptor, index) => (
<AlternateLink
rel="alternate"
key={index}
type={type}
descriptor={descriptor}
/>
))
)
: null}
</>
);
}

View file

@ -1,171 +0,0 @@
import type { ResolvedMetadata } from "../types/metadata-interface";
import React from "react";
import { Meta, MultiMeta } from "./meta";
export function BasicMetadata({ metadata }: { metadata: ResolvedMetadata }) {
return (
<>
<meta charSet="utf-8" />
{metadata.title !== null && metadata.title.absolute
? <title>{metadata.title.absolute}</title>
: null}
<Meta name="description" content={metadata.description} />
<Meta name="application-name" content={metadata.applicationName} />
{metadata.authors
? metadata.authors.map((author, index) => (
<React.Fragment key={index}>
{author.url && <link rel="author" href={author.url.toString()} />}
<Meta name="author" content={author.name} />
</React.Fragment>
))
: null}
{metadata.manifest
? <link rel="manifest" href={metadata.manifest.toString()} />
: null}
<Meta name="generator" content={metadata.generator} />
<Meta name="keywords" content={metadata.keywords?.join(",")} />
<Meta name="referrer" content={metadata.referrer} />
{metadata.themeColor
? metadata.themeColor.map((themeColor, index) => (
<Meta
key={index}
name="theme-color"
content={themeColor.color}
media={themeColor.media}
/>
))
: null}
<Meta name="color-scheme" content={metadata.colorScheme} />
<Meta name="viewport" content={metadata.viewport} />
<Meta name="creator" content={metadata.creator} />
<Meta name="publisher" content={metadata.publisher} />
<Meta name="robots" content={metadata.robots?.basic} />
<Meta name="googlebot" content={metadata.robots?.googleBot} />
<Meta name="abstract" content={metadata.abstract} />
{metadata.archives
? metadata.archives.map((archive) => (
<link rel="archives" href={archive} key={archive} />
))
: null}
{metadata.assets
? metadata.assets.map((asset) => (
<link rel="assets" href={asset} key={asset} />
))
: null}
{metadata.bookmarks
? metadata.bookmarks.map((bookmark) => (
<link rel="bookmarks" href={bookmark} key={bookmark} />
))
: null}
<Meta name="category" content={metadata.category} />
<Meta name="classification" content={metadata.classification} />
{metadata.other
? Object.entries(metadata.other).map(([name, content]) => (
<Meta
key={name}
name={name}
content={Array.isArray(content) ? content.join(",") : content}
/>
))
: null}
</>
);
}
export function ItunesMeta({ itunes }: { itunes: ResolvedMetadata["itunes"] }) {
if (!itunes) return null;
const { appId, appArgument } = itunes;
let content = `app-id=${appId}`;
if (appArgument) {
content += `, app-argument=${appArgument}`;
}
return <meta name="apple-itunes-app" content={content} />;
}
const formatDetectionKeys = [
"telephone",
"date",
"address",
"email",
"url",
] as const;
export function FormatDetectionMeta({
formatDetection,
}: {
formatDetection: ResolvedMetadata["formatDetection"];
}) {
if (!formatDetection) return null;
let content = "";
for (const key of formatDetectionKeys) {
if (key in formatDetection) {
if (content) content += ", ";
content += `${key}=no`;
}
}
return <meta name="format-detection" content={content} />;
}
export function AppleWebAppMeta({
appleWebApp,
}: {
appleWebApp: ResolvedMetadata["appleWebApp"];
}) {
if (!appleWebApp) return null;
const { capable, title, startupImage, statusBarStyle } = appleWebApp;
return (
<>
{capable
? <meta name="apple-mobile-web-app-capable" content="yes" />
: null}
<Meta name="apple-mobile-web-app-title" content={title} />
{startupImage
? startupImage.map((image, index) => (
<link
key={index}
href={image.url}
media={image.media}
rel="apple-touch-startup-image"
/>
))
: null}
{statusBarStyle
? (
<meta
name="apple-mobile-web-app-status-bar-style"
content={statusBarStyle}
/>
)
: null}
</>
);
}
export function VerificationMeta({
verification,
}: {
verification: ResolvedMetadata["verification"];
}) {
if (!verification) return null;
return (
<>
<MultiMeta
namePrefix="google-site-verification"
contents={verification.google}
/>
<MultiMeta namePrefix="y_key" contents={verification.yahoo} />
<MultiMeta
namePrefix="yandex-verification"
contents={verification.yandex}
/>
<MultiMeta namePrefix="me" contents={verification.me} />
{verification.other
? Object.entries(verification.other).map(([key, value], index) => (
<MultiMeta key={key + index} namePrefix={key} contents={value} />
))
: null}
</>
);
}

View file

@ -1,62 +0,0 @@
import type { ResolvedMetadata } from "../types/metadata-interface";
import type { Icon, IconDescriptor } from "../types/metadata-types";
import React from "react";
function IconDescriptorLink({ icon }: { icon: IconDescriptor }) {
const { url, rel = "icon", ...props } = icon;
return <link rel={rel} href={url.toString()} {...props} />;
}
function IconLink({ rel, icon }: { rel?: string; icon: Icon }) {
if (typeof icon === "object" && !(icon instanceof URL)) {
if (rel) icon.rel = rel;
return <IconDescriptorLink icon={icon} />;
} else {
const href = icon.toString();
return <link rel={rel} href={href} />;
}
}
export function IconsMetadata({ icons }: { icons: ResolvedMetadata["icons"] }) {
if (!icons) return null;
const shortcutList = icons.shortcut;
const iconList = icons.icon;
const appleList = icons.apple;
const otherList = icons.other;
return (
<>
{shortcutList
? shortcutList.map((icon, index) => (
<IconLink
key={`shortcut-${index}`}
rel="shortcut icon"
icon={icon}
/>
))
: null}
{iconList
? iconList.map((icon, index) => (
<IconLink key={`shortcut-${index}`} rel="icon" icon={icon} />
))
: null}
{appleList
? appleList.map((icon, index) => (
<IconLink
key={`apple-${index}`}
rel="apple-touch-icon"
icon={icon}
/>
))
: null}
{otherList
? otherList.map((icon, index) => (
<IconDescriptorLink key={`other-${index}`} icon={icon} />
))
: null}
</>
);
}

View file

@ -1,124 +0,0 @@
import React from "react";
export function Meta({
name,
property,
content,
media,
}: {
name?: string;
property?: string;
media?: string;
content: string | number | URL | null | undefined;
}): React.ReactElement | null {
if (typeof content !== "undefined" && content !== null && content !== "") {
return (
<meta
{...(name ? { name } : { property })}
{...(media ? { media } : undefined)}
content={typeof content === "string" ? content : content.toString()}
/>
);
}
return null;
}
type ExtendMetaContent = Record<
string,
undefined | string | URL | number | boolean | null | undefined
>;
type MultiMetaContent =
| (ExtendMetaContent | string | URL | number)[]
| null
| undefined;
function camelToSnake(camelCaseStr: string) {
return camelCaseStr.replace(/([A-Z])/g, function (match) {
return "_" + match.toLowerCase();
});
}
function getMetaKey(prefix: string, key: string) {
// Use `twitter:image` and `og:image` instead of `twitter:image:url` and `og:image:url`
// to be more compatible as it's a more common format
if ((prefix === "og:image" || prefix === "twitter:image") && key === "url") {
return prefix;
}
if (prefix.startsWith("og:") || prefix.startsWith("twitter:")) {
key = camelToSnake(key);
}
return prefix + ":" + key;
}
function ExtendMeta({
content,
namePrefix,
propertyPrefix,
}: {
content?: ExtendMetaContent;
namePrefix?: string;
propertyPrefix?: string;
}) {
const keyPrefix = namePrefix || propertyPrefix;
if (!content) return null;
return (
<React.Fragment>
{Object.entries(content).map(([k, v], index) => {
return typeof v === "undefined" ? null : (
<Meta
key={keyPrefix + ":" + k + "_" + index}
{...(propertyPrefix && { property: getMetaKey(propertyPrefix, k) })}
{...(namePrefix && { name: getMetaKey(namePrefix, k) })}
content={typeof v === "string" ? v : v?.toString()}
/>
);
})}
</React.Fragment>
);
}
export function MultiMeta({
propertyPrefix,
namePrefix,
contents,
}: {
propertyPrefix?: string;
namePrefix?: string;
contents?: MultiMetaContent | null;
}) {
if (typeof contents === "undefined" || contents === null) {
return null;
}
const keyPrefix = propertyPrefix || namePrefix;
return (
<>
{contents.map((content, index) => {
if (
typeof content === "string" ||
typeof content === "number" ||
content instanceof URL
) {
return (
<Meta
key={keyPrefix + "_" + index}
{...(propertyPrefix
? { property: propertyPrefix }
: { name: namePrefix })}
content={content}
/>
);
} else {
return (
<ExtendMeta
key={keyPrefix + "_" + index}
namePrefix={namePrefix}
propertyPrefix={propertyPrefix}
content={content}
/>
);
}
})}
</>
);
}

View file

@ -1,316 +0,0 @@
import type { ResolvedMetadata } from "../types/metadata-interface";
import type { TwitterAppDescriptor } from "../types/twitter-types";
import React from "react";
import { Meta, MultiMeta } from "./meta";
export function OpenGraphMetadata({
openGraph,
}: {
openGraph: ResolvedMetadata["openGraph"];
}) {
if (!openGraph) {
return null;
}
let typedOpenGraph;
if ("type" in openGraph) {
switch (openGraph.type) {
case "website":
typedOpenGraph = <Meta property="og:type" content="website" />;
break;
case "article":
typedOpenGraph = (
<>
<Meta property="og:type" content="article" />
<Meta
property="article:published_time"
content={openGraph.publishedTime?.toString()}
/>
<Meta
property="article:modified_time"
content={openGraph.modifiedTime?.toString()}
/>
<Meta
property="article:expiration_time"
content={openGraph.expirationTime?.toString()}
/>
<MultiMeta
propertyPrefix="article:author"
contents={openGraph.authors}
/>
<Meta property="article:section" content={openGraph.section} />
<MultiMeta propertyPrefix="article:tag" contents={openGraph.tags} />
</>
);
break;
case "book":
typedOpenGraph = (
<>
<Meta property="og:type" content="book" />
<Meta property="book:isbn" content={openGraph.isbn} />
<Meta
property="book:release_date"
content={openGraph.releaseDate}
/>
<MultiMeta
propertyPrefix="book:author"
contents={openGraph.authors}
/>
<MultiMeta propertyPrefix="book:tag" contents={openGraph.tags} />
</>
);
break;
case "profile":
typedOpenGraph = (
<>
<Meta property="og:type" content="profile" />
<Meta property="profile:first_name" content={openGraph.firstName} />
<Meta property="profile:last_name" content={openGraph.lastName} />
<Meta property="profile:username" content={openGraph.username} />
<Meta property="profile:gender" content={openGraph.gender} />
</>
);
break;
case "music.song":
typedOpenGraph = (
<>
<Meta property="og:type" content="music.song" />
<Meta
property="music:duration"
content={openGraph.duration?.toString()}
/>
<MultiMeta
propertyPrefix="music:album"
contents={openGraph.albums}
/>
<MultiMeta
propertyPrefix="music:musician"
contents={openGraph.musicians}
/>
</>
);
break;
case "music.album":
typedOpenGraph = (
<>
<Meta property="og:type" content="music.album" />
<MultiMeta propertyPrefix="music:song" contents={openGraph.songs} />
<MultiMeta
propertyPrefix="music:musician"
contents={openGraph.musicians}
/>
<Meta
property="music:release_date"
content={openGraph.releaseDate}
/>
</>
);
break;
case "music.playlist":
typedOpenGraph = (
<>
<Meta property="og:type" content="music.playlist" />
<MultiMeta propertyPrefix="music:song" contents={openGraph.songs} />
<MultiMeta
propertyPrefix="music:creator"
contents={openGraph.creators}
/>
</>
);
break;
case "music.radio_station":
typedOpenGraph = (
<>
<Meta property="og:type" content="music.radio_station" />
<MultiMeta
propertyPrefix="music:creator"
contents={openGraph.creators}
/>
</>
);
break;
case "video.movie":
typedOpenGraph = (
<>
<Meta property="og:type" content="video.movie" />
<MultiMeta
propertyPrefix="video:actor"
contents={openGraph.actors}
/>
<MultiMeta
propertyPrefix="video:director"
contents={openGraph.directors}
/>
<MultiMeta
propertyPrefix="video:writer"
contents={openGraph.writers}
/>
<Meta property="video:duration" content={openGraph.duration} />
<Meta
property="video:release_date"
content={openGraph.releaseDate}
/>
<MultiMeta propertyPrefix="video:tag" contents={openGraph.tags} />
</>
);
break;
case "video.episode":
typedOpenGraph = (
<>
<Meta property="og:type" content="video.episode" />
<MultiMeta
propertyPrefix="video:actor"
contents={openGraph.actors}
/>
<MultiMeta
propertyPrefix="video:director"
contents={openGraph.directors}
/>
<MultiMeta
propertyPrefix="video:writer"
contents={openGraph.writers}
/>
<Meta property="video:duration" content={openGraph.duration} />
<Meta
property="video:release_date"
content={openGraph.releaseDate}
/>
<MultiMeta propertyPrefix="video:tag" contents={openGraph.tags} />
<Meta property="video:series" content={openGraph.series} />
</>
);
break;
case "video.tv_show":
typedOpenGraph = <Meta property="og:type" content="video.tv_show" />;
break;
case "video.other":
typedOpenGraph = <Meta property="og:type" content="video.other" />;
break;
default:
throw new Error("Invalid OpenGraph type: " + (openGraph as any).type);
}
}
return (
<>
<Meta property="og:determiner" content={openGraph.determiner} />
<Meta property="og:title" content={openGraph.title?.absolute} />
<Meta property="og:description" content={openGraph.description} />
<Meta property="og:url" content={openGraph.url?.toString()} />
<Meta property="og:site_name" content={openGraph.siteName} />
<Meta property="og:locale" content={openGraph.locale} />
<Meta property="og:country_name" content={openGraph.countryName} />
<Meta property="og:ttl" content={openGraph.ttl?.toString()} />
<MultiMeta propertyPrefix="og:image" contents={openGraph.images} />
<MultiMeta propertyPrefix="og:video" contents={openGraph.videos} />
<MultiMeta propertyPrefix="og:audio" contents={openGraph.audio} />
<MultiMeta propertyPrefix="og:email" contents={openGraph.emails} />
<MultiMeta
propertyPrefix="og:phone_number"
contents={openGraph.phoneNumbers}
/>
<MultiMeta
propertyPrefix="og:fax_number"
contents={openGraph.faxNumbers}
/>
<MultiMeta
propertyPrefix="og:locale:alternate"
contents={openGraph.alternateLocale}
/>
{typedOpenGraph}
</>
);
}
function TwitterAppItem({
app,
type,
}: {
app: TwitterAppDescriptor;
type: "iphone" | "ipad" | "googleplay";
}) {
return (
<>
<Meta name={`twitter:app:name:${type}`} content={app.name} />
<Meta name={`twitter:app:id:${type}`} content={app.id[type]} />
<Meta
name={`twitter:app:url:${type}`}
content={app.url?.[type]?.toString()}
/>
</>
);
}
export function TwitterMetadata({
twitter,
}: {
twitter: ResolvedMetadata["twitter"];
}) {
if (!twitter) return null;
const { card } = twitter;
return (
<>
<Meta name="twitter:card" content={card} />
<Meta name="twitter:site" content={twitter.site} />
<Meta name="twitter:site:id" content={twitter.siteId} />
<Meta name="twitter:creator" content={twitter.creator} />
<Meta name="twitter:creator:id" content={twitter.creatorId} />
<Meta name="twitter:title" content={twitter.title?.absolute} />
<Meta name="twitter:description" content={twitter.description} />
<MultiMeta namePrefix="twitter:image" contents={twitter.images} />
{card === "player"
? twitter.players.map((player, index) => (
<React.Fragment key={index}>
<Meta
name="twitter:player"
content={player.playerUrl.toString()}
/>
<Meta
name="twitter:player:stream"
content={player.streamUrl.toString()}
/>
<Meta name="twitter:player:width" content={player.width} />
<Meta name="twitter:player:height" content={player.height} />
</React.Fragment>
))
: null}
{card === "app"
? (
<>
<TwitterAppItem app={twitter.app} type="iphone" />
<TwitterAppItem app={twitter.app} type="ipad" />
<TwitterAppItem app={twitter.app} type="googleplay" />
</>
)
: null}
</>
);
}
export function AppLinksMeta({
appLinks,
}: {
appLinks: ResolvedMetadata["appLinks"];
}) {
if (!appLinks) return null;
return (
<>
<MultiMeta propertyPrefix="al:ios" contents={appLinks.ios} />
<MultiMeta propertyPrefix="al:iphone" contents={appLinks.iphone} />
<MultiMeta propertyPrefix="al:ipad" contents={appLinks.ipad} />
<MultiMeta propertyPrefix="al:android" contents={appLinks.android} />
<MultiMeta
propertyPrefix="al:windows_phone"
contents={appLinks.windows_phone}
/>
<MultiMeta propertyPrefix="al:windows" contents={appLinks.windows} />
<MultiMeta
propertyPrefix="al:windows_universal"
contents={appLinks.windows_universal}
/>
<MultiMeta propertyPrefix="al:web" contents={appLinks.web} />
</>
);
}

View file

@ -1,20 +0,0 @@
function resolveArray<T>(value: T): T[] {
if (Array.isArray(value)) {
return value;
}
return [value];
}
function resolveAsArrayOrUndefined<T extends unknown | readonly unknown[]>(
value: T | T[] | undefined | null,
): undefined | T[] {
if (typeof value === "undefined" || value === null) {
return undefined;
}
if (Array.isArray(value)) {
return value;
}
return [value];
}
export { resolveArray, resolveAsArrayOrUndefined };

View file

@ -1,67 +0,0 @@
import { isMetadataRoute, isMetadataRouteFile } from "./is-metadata-route";
import path from "../../shared/lib/isomorphic/path";
import { djb2Hash } from "../../shared/lib/hash";
/*
* If there's special convention like (...) or @ in the page path,
* Give it a unique hash suffix to avoid conflicts
*
* e.g.
* /app/open-graph.tsx -> /open-graph/route
* /app/(post)/open-graph.tsx -> /open-graph/route-[0-9a-z]{6}
*/
export function getMetadataRouteSuffix(page: string) {
let suffix = "";
if ((page.includes("(") && page.includes(")")) || page.includes("@")) {
suffix = djb2Hash(page).toString(36).slice(0, 6);
}
return suffix;
}
/**
* Map metadata page key to the corresponding route
*
* static file page key: /app/robots.txt -> /robots.xml -> /robots.txt/route
* dynamic route page key: /app/robots.tsx -> /robots -> /robots.txt/route
*
* @param page
* @returns
*/
export function normalizeMetadataRoute(page: string) {
let route = page;
if (isMetadataRoute(page)) {
// Remove the file extension, e.g. /route-path/robots.txt -> /route-path
const pathnamePrefix = page.slice(0, -(path.basename(page).length + 1));
const suffix = getMetadataRouteSuffix(pathnamePrefix);
if (route === "/sitemap") {
route += ".xml";
}
if (route === "/robots") {
route += ".txt";
}
if (route === "/manifest") {
route += ".webmanifest";
}
// Support both /<metadata-route.ext> and custom routes /<metadata-route>/route.ts.
// If it's a metadata file route, we need to append /[id]/route to the page.
if (!route.endsWith("/route")) {
const isStaticMetadataFile = isMetadataRouteFile(route, [], true);
const { dir, name: baseName, ext } = path.parse(route);
const isSingleRoute = page.startsWith("/sitemap") ||
page.startsWith("/robots") ||
page.startsWith("/manifest") ||
isStaticMetadataFile;
route = path.join(
dir,
`${baseName}${suffix ? `-${suffix}` : ""}${ext}`,
isSingleRoute ? "" : "[[...__metadata_id__]]",
"route",
);
}
}
return route;
}

View file

@ -1,136 +0,0 @@
export const STATIC_METADATA_IMAGES = {
icon: {
filename: "icon",
extensions: ["ico", "jpg", "jpeg", "png", "svg"],
},
apple: {
filename: "apple-icon",
extensions: ["jpg", "jpeg", "png"],
},
favicon: {
filename: "favicon",
extensions: ["ico"],
},
openGraph: {
filename: "opengraph-image",
extensions: ["jpg", "jpeg", "png", "gif"],
},
twitter: {
filename: "twitter-image",
extensions: ["jpg", "jpeg", "png", "gif"],
},
} as const;
// Match routes that are metadata routes, e.g. /sitemap.xml, /favicon.<ext>, /<icon>.<ext>, etc.
// TODO-METADATA: support more metadata routes with more extensions
const defaultExtensions = ["js", "jsx", "ts", "tsx"];
const getExtensionRegexString = (extensions: readonly string[]) =>
`(?:${extensions.join("|")})`;
// When you only pass the file extension as `[]`, it will only match the static convention files
// e.g. /robots.txt, /sitemap.xml, /favicon.ico, /manifest.json
// When you pass the file extension as `['js', 'jsx', 'ts', 'tsx']`, it will also match the dynamic convention files
// e.g. /robots.js, /sitemap.tsx, /favicon.jsx, /manifest.ts
// When `withExtension` is false, it will match the static convention files without the extension, by default it's true
// e.g. /robots, /sitemap, /favicon, /manifest, use to match dynamic API routes like app/robots.ts
export function isMetadataRouteFile(
appDirRelativePath: string,
pageExtensions: string[],
withExtension: boolean,
) {
const metadataRouteFilesRegex = [
new RegExp(
`^[\\\\/]robots${
withExtension
? `\\.${getExtensionRegexString(pageExtensions.concat("txt"))}`
: ""
}`,
),
new RegExp(
`^[\\\\/]sitemap${
withExtension
? `\\.${getExtensionRegexString(pageExtensions.concat("xml"))}`
: ""
}`,
),
new RegExp(
`^[\\\\/]manifest${
withExtension
? `\\.${
getExtensionRegexString(
pageExtensions.concat("webmanifest", "json"),
)
}`
: ""
}`,
),
new RegExp(`^[\\\\/]favicon\\.ico$`),
// TODO-METADATA: add dynamic routes for metadata images
new RegExp(
`[\\\\/]${STATIC_METADATA_IMAGES.icon.filename}${
withExtension
? `\\.${
getExtensionRegexString(
pageExtensions.concat(STATIC_METADATA_IMAGES.icon.extensions),
)
}`
: ""
}`,
),
new RegExp(
`[\\\\/]${STATIC_METADATA_IMAGES.apple.filename}${
withExtension
? `\\.${
getExtensionRegexString(
pageExtensions.concat(STATIC_METADATA_IMAGES.apple.extensions),
)
}`
: ""
}`,
),
new RegExp(
`[\\\\/]${STATIC_METADATA_IMAGES.openGraph.filename}${
withExtension
? `\\.${
getExtensionRegexString(
pageExtensions.concat(
STATIC_METADATA_IMAGES.openGraph.extensions,
),
)
}`
: ""
}`,
),
new RegExp(
`[\\\\/]${STATIC_METADATA_IMAGES.twitter.filename}${
withExtension
? `\\.${
getExtensionRegexString(
pageExtensions.concat(STATIC_METADATA_IMAGES.twitter.extensions),
)
}`
: ""
}`,
),
];
return metadataRouteFilesRegex.some((r) => r.test(appDirRelativePath));
}
/*
* Remove the 'app' prefix or '/route' suffix, only check the route name since they're only allowed in root app directory
* e.g.
* /app/robots -> /robots
* app/robots -> /robots
* /robots -> /robots
*/
export function isMetadataRoute(route: string): boolean {
let page = route.replace(/^\/?app\//, "").replace(/\/route$/, "");
if (page[0] !== "/") page = "/" + page;
return (
!page.endsWith("/page") &&
isMetadataRouteFile(page, defaultExtensions, false)
);
}

View file

@ -1,58 +0,0 @@
import React from "react";
import {
AppleWebAppMeta,
BasicMetadata,
FormatDetectionMeta,
ItunesMeta,
VerificationMeta,
} from "./generate/basic";
import { AlternatesMetadata } from "./generate/alternate";
import {
AppLinksMeta,
OpenGraphMetadata,
TwitterMetadata,
} from "./generate/opengraph";
import { IconsMetadata } from "./generate/icons";
import { accumulateMetadata, resolveMetadata } from "./resolve-metadata";
import { LoaderTree } from "../../server/lib/app-dir-module";
import { GetDynamicParamFromSegment } from "../../server/app-render/app-render";
// Generate the actual React elements from the resolved metadata.
export async function MetadataTree({
tree,
pathname,
searchParams,
getDynamicParamFromSegment,
}: {
tree: LoaderTree;
pathname: string;
searchParams: { [key: string]: any };
getDynamicParamFromSegment: GetDynamicParamFromSegment;
}) {
const options = {
pathname,
};
const resolvedMetadata = await resolveMetadata({
tree,
parentParams: {},
metadataItems: [],
searchParams,
getDynamicParamFromSegment,
});
const metadata = await accumulateMetadata(resolvedMetadata, options);
return (
<>
<BasicMetadata metadata={metadata} />
<AlternatesMetadata alternates={metadata.alternates} />
<ItunesMeta itunes={metadata.itunes} />
<FormatDetectionMeta formatDetection={metadata.formatDetection} />
<VerificationMeta verification={metadata.verification} />
<AppleWebAppMeta appleWebApp={metadata.appleWebApp} />
<OpenGraphMetadata openGraph={metadata.openGraph} />
<TwitterMetadata twitter={metadata.twitter} />
<AppLinksMeta appLinks={metadata.appLinks} />
<IconsMetadata icons={metadata.icons} />
</>
);
}

View file

@ -1,453 +0,0 @@
import type {
Metadata,
ResolvedMetadata,
ResolvingMetadata,
} from "./types/metadata-interface";
import type { MetadataImageModule } from "../../build/webpack/loaders/metadata/types";
import type { GetDynamicParamFromSegment } from "../../server/app-render/app-render";
import { createDefaultMetadata } from "./default-metadata";
import {
resolveOpenGraph,
resolveTwitter,
} from "./resolvers/resolve-opengraph";
import { resolveTitle } from "./resolvers/resolve-title";
import { resolveAsArrayOrUndefined } from "./generate/utils";
import { isClientReference } from "../client-reference";
import {
getLayoutOrPageModule,
LoaderTree,
} from "../../server/lib/app-dir-module";
import { ComponentsType } from "../../build/webpack/loaders/next-app-loader";
import { interopDefault } from "../interop-default";
import {
resolveAlternates,
resolveAppleWebApp,
resolveAppLinks,
resolveRobots,
resolveThemeColor,
resolveVerification,
resolveViewport,
} from "./resolvers/resolve-basics";
import { resolveIcons } from "./resolvers/resolve-icons";
import { getTracer } from "../../server/lib/trace/tracer";
import { ResolveMetadataSpan } from "../../server/lib/trace/constants";
import { Twitter } from "./types/twitter-types";
import { OpenGraph } from "./types/opengraph-types";
import { PAGE_SEGMENT_KEY } from "../../shared/lib/constants";
import process from "node:process";
type StaticMetadata = Awaited<ReturnType<typeof resolveStaticMetadata>>;
type MetadataResolver = (
_parent: ResolvingMetadata,
) => Metadata | Promise<Metadata>;
export type MetadataItems = [
Metadata | MetadataResolver | null,
StaticMetadata,
][];
function mergeStaticMetadata(
metadata: ResolvedMetadata,
staticFilesMetadata: StaticMetadata,
) {
if (!staticFilesMetadata) return;
const { icon, apple, openGraph, twitter, manifest } = staticFilesMetadata;
if (icon || apple) {
metadata.icons = {
icon: icon || [],
apple: apple || [],
};
}
if (twitter) {
const resolvedTwitter = resolveTwitter(
{ ...metadata.twitter, images: twitter } as Twitter,
metadata.metadataBase,
);
metadata.twitter = resolvedTwitter;
}
if (openGraph) {
const resolvedOpenGraph = resolveOpenGraph(
{ ...metadata.openGraph, images: openGraph } as OpenGraph,
metadata.metadataBase,
);
metadata.openGraph = resolvedOpenGraph;
}
if (manifest) {
metadata.manifest = manifest;
}
return metadata;
}
// Merge the source metadata into the resolved target metadata.
function merge({
target,
source,
staticFilesMetadata,
titleTemplates,
options,
}: {
target: ResolvedMetadata;
source: Metadata | null;
staticFilesMetadata: StaticMetadata;
titleTemplates: {
title: string | null;
twitter: string | null;
openGraph: string | null;
};
options: MetadataAccumulationOptions;
}) {
// If there's override metadata, prefer it otherwise fallback to the default metadata.
const metadataBase = typeof source?.metadataBase !== "undefined"
? source.metadataBase
: target.metadataBase;
for (const key_ in source) {
const key = key_ as keyof Metadata;
switch (key) {
case "title": {
target.title = resolveTitle(source.title, titleTemplates.title);
break;
}
case "alternates": {
target.alternates = resolveAlternates(source.alternates, metadataBase, {
pathname: options.pathname,
});
break;
}
case "openGraph": {
target.openGraph = resolveOpenGraph(source.openGraph, metadataBase);
if (target.openGraph) {
target.openGraph.title = resolveTitle(
target.openGraph.title,
titleTemplates.openGraph,
);
}
break;
}
case "twitter": {
target.twitter = resolveTwitter(source.twitter, metadataBase);
if (target.twitter) {
target.twitter.title = resolveTitle(
target.twitter.title,
titleTemplates.twitter,
);
}
break;
}
case "verification":
target.verification = resolveVerification(source.verification);
break;
case "viewport": {
target.viewport = resolveViewport(source.viewport);
break;
}
case "icons": {
target.icons = resolveIcons(source.icons);
break;
}
case "appleWebApp":
target.appleWebApp = resolveAppleWebApp(source.appleWebApp);
break;
case "appLinks":
target.appLinks = resolveAppLinks(source.appLinks);
break;
case "robots": {
target.robots = resolveRobots(source.robots);
break;
}
case "themeColor": {
target.themeColor = resolveThemeColor(source.themeColor);
break;
}
case "archives":
case "assets":
case "bookmarks":
case "keywords":
case "authors": {
// FIXME: type inferring
// @ts-ignore
target[key] = resolveAsArrayOrUndefined(source[key]) || null;
break;
}
// directly assign fields that fallback to null
case "applicationName":
case "description":
case "generator":
case "creator":
case "publisher":
case "category":
case "classification":
case "referrer":
case "colorScheme":
case "itunes":
case "formatDetection":
case "manifest":
// @ts-ignore TODO: support inferring
target[key] = source[key] || null;
break;
case "other":
target.other = Object.assign({}, target.other, source.other);
break;
case "metadataBase":
target.metadataBase = metadataBase;
break;
default:
break;
}
}
mergeStaticMetadata(target, staticFilesMetadata);
}
async function getDefinedMetadata(
mod: any,
props: any,
route: string,
): Promise<Metadata | MetadataResolver | null> {
// Layer is a client component, we just skip it. It can't have metadata exported.
// Return early to avoid accessing properties error for client references.
if (isClientReference(mod)) {
return null;
}
return (
(mod.generateMetadata
? (parent: ResolvingMetadata) =>
getTracer().trace(
ResolveMetadataSpan.generateMetadata,
{
spanName: `generateMetadata ${route}`,
attributes: {
"next.page": route,
},
},
() => mod.generateMetadata(props, parent),
)
: mod.metadata) || null
);
}
async function collectStaticImagesFiles(
metadata: ComponentsType["metadata"],
props: any,
type: keyof NonNullable<ComponentsType["metadata"]>,
) {
if (!metadata?.[type]) return undefined;
const iconPromises = metadata[type as "icon" | "apple"].map(
async (imageModule: (p: any) => Promise<MetadataImageModule[]>) =>
interopDefault(await imageModule(props)),
);
return iconPromises?.length > 0
? (await Promise.all(iconPromises))?.flat()
: undefined;
}
async function resolveStaticMetadata(components: ComponentsType, props: any) {
const { metadata } = components;
if (!metadata) return null;
const [icon, apple, openGraph, twitter] = await Promise.all([
collectStaticImagesFiles(metadata, props, "icon"),
collectStaticImagesFiles(metadata, props, "apple"),
collectStaticImagesFiles(metadata, props, "openGraph"),
collectStaticImagesFiles(metadata, props, "twitter"),
]);
const staticMetadata = {
icon,
apple,
openGraph,
twitter,
manifest: metadata.manifest,
};
return staticMetadata;
}
// [layout.metadata, static files metadata] -> ... -> [page.metadata, static files metadata]
export async function collectMetadata({
tree,
metadataItems: array,
props,
route,
}: {
tree: LoaderTree;
metadataItems: MetadataItems;
props: any;
route: string;
}) {
const [mod, modType] = await getLayoutOrPageModule(tree);
if (modType) {
route += `/${modType}`;
}
const staticFilesMetadata = await resolveStaticMetadata(tree[2], props);
const metadataExport = mod
? await getDefinedMetadata(mod, props, route)
: null;
array.push([metadataExport, staticFilesMetadata]);
}
export async function resolveMetadata({
tree,
parentParams,
metadataItems,
treePrefix = [],
getDynamicParamFromSegment,
searchParams,
}: {
tree: LoaderTree;
parentParams: { [key: string]: any };
metadataItems: MetadataItems;
/** Provided tree can be nested subtree, this argument says what is the path of such subtree */
treePrefix?: string[];
getDynamicParamFromSegment: GetDynamicParamFromSegment;
searchParams: { [key: string]: any };
}): Promise<MetadataItems> {
const [segment, parallelRoutes, { page }] = tree;
const currentTreePrefix = [...treePrefix, segment];
const isPage = typeof page !== "undefined";
// Handle dynamic segment params.
const segmentParam = getDynamicParamFromSegment(segment);
/**
* Create object holding the parent params and current params
*/
const currentParams =
// Handle null case where dynamic param is optional
segmentParam && segmentParam.value !== null
? {
...parentParams,
[segmentParam.param]: segmentParam.value,
}
// Pass through parent params to children
: parentParams;
const layerProps = {
params: currentParams,
...(isPage && { searchParams }),
};
await collectMetadata({
tree,
metadataItems,
props: layerProps,
route: currentTreePrefix
// __PAGE__ shouldn't be shown in a route
.filter((s) => s !== PAGE_SEGMENT_KEY)
.join("/"),
});
for (const key in parallelRoutes) {
const childTree = parallelRoutes[key];
await resolveMetadata({
tree: childTree,
metadataItems,
parentParams: currentParams,
treePrefix: currentTreePrefix,
searchParams,
getDynamicParamFromSegment,
});
}
return metadataItems;
}
type MetadataAccumulationOptions = {
pathname: string;
};
export async function accumulateMetadata(
metadataItems: MetadataItems,
options: MetadataAccumulationOptions,
): Promise<ResolvedMetadata> {
const resolvedMetadata = createDefaultMetadata();
const resolvers: ((value: ResolvedMetadata) => void)[] = [];
const generateMetadataResults: (Metadata | Promise<Metadata>)[] = [];
let titleTemplates: {
title: string | null;
twitter: string | null;
openGraph: string | null;
} = {
title: null,
twitter: null,
openGraph: null,
};
// Loop over all metadata items again, merging synchronously any static object exports,
// awaiting any static promise exports, and resolving parent metadata and awaiting any generated metadata
let resolvingIndex = 0;
for (let i = 0; i < metadataItems.length; i++) {
const [metadataExport, staticFilesMetadata] = metadataItems[i];
let metadata: Metadata | null = null;
if (typeof metadataExport === "function") {
if (!resolvers.length) {
for (let j = i; j < metadataItems.length; j++) {
const [preloadMetadataExport] = metadataItems[j];
// call each `generateMetadata function concurrently and stash their resolver
if (typeof preloadMetadataExport === "function") {
generateMetadataResults.push(
preloadMetadataExport(
new Promise((resolve) => {
resolvers.push(resolve);
}),
),
);
}
}
}
const resolveParent = resolvers[resolvingIndex];
const generatedMetadata = generateMetadataResults[resolvingIndex++];
// In dev we clone and freeze to prevent relying on mutating resolvedMetadata directly.
// In prod we just pass resolvedMetadata through without any copying.
const currentResolvedMetadata: ResolvedMetadata =
process.env.NODE_ENV === "development"
? Object.freeze(
require(
"next/dist/compiled/@edge-runtime/primitives/structured-clone",
).structuredClone(
resolvedMetadata,
),
)
: resolvedMetadata;
// This resolve should unblock the generateMetadata function if it awaited the parent
// argument. If it didn't await the parent argument it might already have a value since it was
// called concurrently. Regardless we await the return value before continuing on to the next layer
resolveParent(currentResolvedMetadata);
metadata = generatedMetadata instanceof Promise
? await generatedMetadata
: generatedMetadata;
} else if (metadataExport !== null && typeof metadataExport === "object") {
// This metadataExport is the object form
metadata = metadataExport;
}
merge({
options,
target: resolvedMetadata,
source: metadata,
staticFilesMetadata,
titleTemplates,
});
// If the layout is the same layer with page, skip the leaf layout and leaf page
// The leaf layout and page are the last two items
if (i < metadataItems.length - 2) {
titleTemplates = {
title: resolvedMetadata.title?.template || null,
openGraph: resolvedMetadata.openGraph?.title?.template || null,
twitter: resolvedMetadata.twitter?.title?.template || null,
};
}
}
return resolvedMetadata;
}

View file

@ -1,259 +0,0 @@
import type {
AlternateLinkDescriptor,
ResolvedAlternateURLs,
} from "../types/alternative-urls-types";
import type { Metadata, ResolvedMetadata } from "../types/metadata-interface";
import type { ResolvedVerification } from "../types/metadata-types";
import type {
FieldResolver,
FieldResolverWithMetadataBase,
} from "../types/resolvers";
import type { Viewport } from "../types/extra-types";
import path from "path";
import { resolveAsArrayOrUndefined } from "../generate/utils";
import { resolveUrl } from "./resolve-url";
import { ViewPortKeys } from "../constants";
// Resolve with `metadataBase` if it's present, otherwise resolve with `pathname`.
// Resolve with `pathname` if `url` is a relative path.
function resolveAlternateUrl(
url: string | URL,
metadataBase: URL | null,
pathname: string,
) {
if (typeof url === "string" && url.startsWith("./")) {
url = path.resolve(pathname, url);
} else if (url instanceof URL) {
url = new URL(pathname, url);
}
const result = metadataBase ? resolveUrl(url, metadataBase) : url;
return result.toString();
}
export const resolveThemeColor: FieldResolver<"themeColor"> = (themeColor) => {
if (!themeColor) return null;
const themeColorDescriptors: ResolvedMetadata["themeColor"] = [];
resolveAsArrayOrUndefined(themeColor)?.forEach((descriptor) => {
if (typeof descriptor === "string") {
themeColorDescriptors.push({ color: descriptor });
} else if (typeof descriptor === "object") {
themeColorDescriptors.push({
color: descriptor.color,
media: descriptor.media,
});
}
});
return themeColorDescriptors;
};
export const resolveViewport: FieldResolver<"viewport"> = (viewport) => {
let resolved: ResolvedMetadata["viewport"] = null;
if (typeof viewport === "string") {
resolved = viewport;
} else if (viewport) {
resolved = "";
for (const viewportKey_ in ViewPortKeys) {
const viewportKey = viewportKey_ as keyof Viewport;
if (viewportKey in viewport) {
let value = viewport[viewportKey];
if (typeof value === "boolean") value = value ? "yes" : "no";
if (resolved) resolved += ", ";
resolved += `${ViewPortKeys[viewportKey]}=${value}`;
}
}
}
return resolved;
};
function resolveUrlValuesOfObject(
obj:
| Record<string, string | URL | AlternateLinkDescriptor[] | null>
| null
| undefined,
metadataBase: ResolvedMetadata["metadataBase"],
pathname: string,
): null | Record<string, AlternateLinkDescriptor[]> {
if (!obj) return null;
const result: Record<string, AlternateLinkDescriptor[]> = {};
for (const [key, value] of Object.entries(obj)) {
if (typeof value === "string" || value instanceof URL) {
result[key] = [
{
url: resolveAlternateUrl(value, metadataBase, pathname), // metadataBase ? resolveUrl(value, metadataBase)! : value,
},
];
} else {
result[key] = [];
value?.forEach((item, index) => {
const url = resolveAlternateUrl(item.url, metadataBase, pathname);
result[key][index] = {
url,
title: item.title,
};
});
}
}
return result;
}
function resolveCanonicalUrl(
urlOrDescriptor: string | URL | null | AlternateLinkDescriptor | undefined,
metadataBase: URL | null,
pathname: string,
): null | AlternateLinkDescriptor {
if (!urlOrDescriptor) return null;
const url =
typeof urlOrDescriptor === "string" || urlOrDescriptor instanceof URL
? urlOrDescriptor
: urlOrDescriptor.url;
// Return string url because structureClone can't handle URL instance
return {
url: resolveAlternateUrl(url, metadataBase, pathname),
};
}
export const resolveAlternates: FieldResolverWithMetadataBase<
"alternates",
{ pathname: string }
> = (alternates, metadataBase, { pathname }) => {
if (!alternates) return null;
const canonical = resolveCanonicalUrl(
alternates.canonical,
metadataBase,
pathname,
);
const languages = resolveUrlValuesOfObject(
alternates.languages,
metadataBase,
pathname,
);
const media = resolveUrlValuesOfObject(
alternates.media,
metadataBase,
pathname,
);
const types = resolveUrlValuesOfObject(
alternates.types,
metadataBase,
pathname,
);
const result: ResolvedAlternateURLs = {
canonical,
languages,
media,
types,
};
return result;
};
const robotsKeys = [
"noarchive",
"nosnippet",
"noimageindex",
"nocache",
"notranslate",
"indexifembedded",
"nositelinkssearchbox",
"unavailable_after",
"max-video-preview",
"max-image-preview",
"max-snippet",
] as const;
const resolveRobotsValue: (robots: Metadata["robots"]) => string | null = (
robots,
) => {
if (!robots) return null;
if (typeof robots === "string") return robots;
const values: string[] = [];
if (robots.index) values.push("index");
else if (typeof robots.index === "boolean") values.push("noindex");
if (robots.follow) values.push("follow");
else if (typeof robots.follow === "boolean") values.push("nofollow");
for (const key of robotsKeys) {
const value = robots[key];
if (typeof value !== "undefined" && value !== false) {
values.push(typeof value === "boolean" ? key : `${key}:${value}`);
}
}
return values.join(", ");
};
export const resolveRobots: FieldResolver<"robots"> = (robots) => {
if (!robots) return null;
return {
basic: resolveRobotsValue(robots),
googleBot: typeof robots !== "string"
? resolveRobotsValue(robots.googleBot)
: null,
};
};
const VerificationKeys = ["google", "yahoo", "yandex", "me", "other"] as const;
export const resolveVerification: FieldResolver<"verification"> = (
verification,
) => {
if (!verification) return null;
const res: ResolvedVerification = {};
for (const key of VerificationKeys) {
const value = verification[key];
if (value) {
if (key === "other") {
res.other = {};
for (const otherKey in verification.other) {
const otherValue = resolveAsArrayOrUndefined(
verification.other[otherKey],
);
if (otherValue) res.other[otherKey] = otherValue;
}
} else res[key] = resolveAsArrayOrUndefined(value) as (string | number)[];
}
}
return res;
};
export const resolveAppleWebApp: FieldResolver<"appleWebApp"> = (appWebApp) => {
if (!appWebApp) return null;
if (appWebApp === true) {
return {
capable: true,
};
}
const startupImages = appWebApp.startupImage
? resolveAsArrayOrUndefined(appWebApp.startupImage)?.map((item) =>
typeof item === "string" ? { url: item } : item
)
: null;
return {
capable: "capable" in appWebApp ? !!appWebApp.capable : true,
title: appWebApp.title || null,
startupImage: startupImages,
statusBarStyle: appWebApp.statusBarStyle || "default",
};
};
export const resolveAppLinks: FieldResolver<"appLinks"> = (appLinks) => {
if (!appLinks) return null;
for (const key in appLinks) {
// @ts-ignore // TODO: type infer
appLinks[key] = resolveAsArrayOrUndefined(appLinks[key]);
}
return appLinks as ResolvedMetadata["appLinks"];
};

View file

@ -1,34 +0,0 @@
import type { ResolvedMetadata } from "../types/metadata-interface";
import type { Icon, IconDescriptor } from "../types/metadata-types";
import type { FieldResolver } from "../types/resolvers";
import { resolveAsArrayOrUndefined } from "../generate/utils";
import { isStringOrURL } from "./resolve-url";
import { IconKeys } from "../constants";
export function resolveIcon(icon: Icon): IconDescriptor {
if (isStringOrURL(icon)) return { url: icon };
else if (Array.isArray(icon)) return icon;
return icon;
}
export const resolveIcons: FieldResolver<"icons"> = (icons) => {
if (!icons) {
return null;
}
const resolved: ResolvedMetadata["icons"] = {
icon: [],
apple: [],
};
if (Array.isArray(icons)) {
resolved.icon = icons.map(resolveIcon).filter(Boolean);
} else if (isStringOrURL(icons)) {
resolved.icon = [resolveIcon(icons)];
} else {
for (const key of IconKeys) {
const values = resolveAsArrayOrUndefined(icons[key]);
if (values) resolved[key] = values.map(resolveIcon);
}
}
return resolved;
};

View file

@ -1,147 +0,0 @@
import type { Metadata, ResolvedMetadata } from "../types/metadata-interface";
import type {
OpenGraph,
OpenGraphType,
ResolvedOpenGraph,
} from "../types/opengraph-types";
import type { FieldResolverWithMetadataBase } from "../types/resolvers";
import type { ResolvedTwitterMetadata, Twitter } from "../types/twitter-types";
import { resolveAsArrayOrUndefined } from "../generate/utils";
import { isStringOrURL, resolveUrl } from "./resolve-url";
const OgTypeFields = {
article: ["authors", "tags"],
song: ["albums", "musicians"],
playlist: ["albums", "musicians"],
radio: ["creators"],
video: ["actors", "directors", "writers", "tags"],
basic: [
"emails",
"phoneNumbers",
"faxNumbers",
"alternateLocale",
"audio",
"videos",
],
} as const;
function resolveImages(
images: Twitter["images"],
metadataBase: ResolvedMetadata["metadataBase"],
): NonNullable<ResolvedMetadata["twitter"]>["images"];
function resolveImages(
images: OpenGraph["images"],
metadataBase: ResolvedMetadata["metadataBase"],
): NonNullable<ResolvedMetadata["openGraph"]>["images"];
function resolveImages(
images: OpenGraph["images"] | Twitter["images"],
metadataBase: ResolvedMetadata["metadataBase"],
):
| NonNullable<ResolvedMetadata["twitter"]>["images"]
| NonNullable<ResolvedMetadata["openGraph"]>["images"] {
const resolvedImages = resolveAsArrayOrUndefined(images);
resolvedImages?.forEach((item, index, array) => {
if (isStringOrURL(item)) {
array[index] = {
url: resolveUrl(item, metadataBase)!,
};
} else {
// Update image descriptor url
item.url = resolveUrl(item.url, metadataBase)!;
}
});
return resolvedImages;
}
function getFieldsByOgType(ogType: OpenGraphType | undefined) {
switch (ogType) {
case "article":
case "book":
return OgTypeFields.article;
case "music.song":
case "music.album":
return OgTypeFields.song;
case "music.playlist":
return OgTypeFields.playlist;
case "music.radio_station":
return OgTypeFields.radio;
case "video.movie":
case "video.episode":
return OgTypeFields.video;
default:
return OgTypeFields.basic;
}
}
export const resolveOpenGraph: FieldResolverWithMetadataBase<"openGraph"> = (
openGraph: Metadata["openGraph"],
metadataBase: ResolvedMetadata["metadataBase"],
) => {
if (!openGraph) return null;
const url = resolveUrl(openGraph.url, metadataBase);
const resolved = { ...openGraph } as ResolvedOpenGraph;
function assignProps(og: OpenGraph) {
const ogType = og && "type" in og ? og.type : undefined;
const keys = getFieldsByOgType(ogType);
for (const k of keys) {
const key = k as keyof ResolvedOpenGraph;
if (key in og && key !== "url") {
const value = og[key];
if (value) {
const arrayValue = resolveAsArrayOrUndefined(value); /// TODO: improve typing inferring
(resolved as any)[key] = arrayValue;
}
}
}
resolved.images = resolveImages(og.images, metadataBase);
}
assignProps(openGraph);
resolved.url = url;
return resolved;
};
const TwitterBasicInfoKeys = [
"site",
"siteId",
"creator",
"creatorId",
"description",
] as const;
export const resolveTwitter: FieldResolverWithMetadataBase<"twitter"> = (
twitter,
metadataBase,
) => {
if (!twitter) return null;
const resolved = {
...twitter,
card: "card" in twitter ? twitter.card : "summary",
} as ResolvedTwitterMetadata;
for (const infoKey of TwitterBasicInfoKeys) {
resolved[infoKey] = twitter[infoKey] || null;
}
resolved.images = resolveImages(twitter.images, metadataBase);
if ("card" in resolved) {
switch (resolved.card) {
case "player": {
resolved.players = resolveAsArrayOrUndefined(resolved.players) || [];
break;
}
case "app": {
resolved.app = resolved.app || {};
break;
}
default:
break;
}
}
return resolved;
};

View file

@ -1,39 +0,0 @@
import type { Metadata } from "../types/metadata-interface";
import type { AbsoluteTemplateString } from "../types/metadata-types";
function resolveTitleTemplate(
template: string | null | undefined,
title: string,
) {
return template ? template.replace(/%s/g, title) : title;
}
export function resolveTitle(
title: Metadata["title"],
stashedTemplate: string | null | undefined,
): AbsoluteTemplateString {
let resolved;
const template = typeof title !== "string" && title && "template" in title
? title.template
: null;
if (typeof title === "string") {
resolved = resolveTitleTemplate(stashedTemplate, title);
} else if (title) {
if ("default" in title) {
resolved = resolveTitleTemplate(stashedTemplate, title.default);
}
if ("absolute" in title && title.absolute) {
resolved = title.absolute;
}
}
if (title && typeof title !== "string") {
return {
template,
absolute: resolved || "",
};
} else {
return { absolute: resolved || title || "", template };
}
}

View file

@ -1,38 +0,0 @@
import path from "path";
import process from "node:process";
function isStringOrURL(icon: any): icon is string | URL {
return typeof icon === "string" || icon instanceof URL;
}
function resolveUrl(url: null | undefined, metadataBase: URL | null): null;
function resolveUrl(url: string | URL, metadataBase: URL | null): URL;
function resolveUrl(
url: string | URL | null | undefined,
metadataBase: URL | null,
): URL | null;
function resolveUrl(
url: string | URL | null | undefined,
metadataBase: URL | null,
): URL | null {
if (url instanceof URL) return url;
if (!url) return null;
try {
// If we can construct a URL instance from url, ignore metadataBase
const parsedUrl = new URL(url);
return parsedUrl;
} catch (_) {}
if (!metadataBase) {
metadataBase = new URL(`http://localhost:${process.env.PORT || 3000}`);
}
// Handle relative or absolute paths
const basePath = metadataBase.pathname || "";
const joinedPath = path.join(basePath, url);
return new URL(joinedPath, metadataBase);
}
export { isStringOrURL, resolveUrl };

View file

@ -1,450 +0,0 @@
// Reference: https://hreflang.org/what-is-a-valid-hreflang
type LangCode =
| "aa"
| "ab"
| "ae"
| "af"
| "ak"
| "am"
| "an"
| "ar"
| "as"
| "av"
| "ay"
| "az"
| "ba"
| "be"
| "bg"
| "bh"
| "bi"
| "bm"
| "bn"
| "bo"
| "br"
| "bs"
| "ca"
| "ce"
| "ch"
| "co"
| "cr"
| "cs"
| "cu"
| "cv"
| "cy"
| "da"
| "de"
| "dv"
| "dz"
| "ee"
| "el"
| "en"
| "eo"
| "es"
| "et"
| "eu"
| "fa"
| "ff"
| "fi"
| "fj"
| "fo"
| "fr"
| "fy"
| "ga"
| "gd"
| "gl"
| "gn"
| "gu"
| "gv"
| "ha"
| "he"
| "hi"
| "ho"
| "hr"
| "ht"
| "hu"
| "hy"
| "hz"
| "ia"
| "id"
| "ie"
| "ig"
| "ii"
| "ik"
| "io"
| "is"
| "it"
| "iu"
| "ja"
| "jv"
| "ka"
| "kg"
| "ki"
| "kj"
| "kk"
| "kl"
| "km"
| "kn"
| "ko"
| "kr"
| "ks"
| "ku"
| "kv"
| "kw"
| "ky"
| "la"
| "lb"
| "lg"
| "li"
| "ln"
| "lo"
| "lt"
| "lu"
| "lv"
| "mg"
| "mh"
| "mi"
| "mk"
| "ml"
| "mn"
| "mr"
| "ms"
| "mt"
| "my"
| "na"
| "nb"
| "nd"
| "ne"
| "ng"
| "nl"
| "nn"
| "no"
| "nr"
| "nv"
| "ny"
| "oc"
| "oj"
| "om"
| "or"
| "os"
| "pa"
| "pi"
| "pl"
| "ps"
| "pt"
| "qu"
| "rm"
| "rn"
| "ro"
| "ru"
| "rw"
| "sa"
| "sc"
| "sd"
| "se"
| "sg"
| "si"
| "sk"
| "sl"
| "sm"
| "sn"
| "so"
| "sq"
| "sr"
| "ss"
| "st"
| "su"
| "sv"
| "sw"
| "ta"
| "te"
| "tg"
| "th"
| "ti"
| "tk"
| "tl"
| "tn"
| "to"
| "tr"
| "ts"
| "tt"
| "tw"
| "ty"
| "ug"
| "uk"
| "ur"
| "uz"
| "ve"
| "vi"
| "vo"
| "wa"
| "wo"
| "xh"
| "yi"
| "yo"
| "za"
| "zh"
| "zu"
| "af-ZA"
| "am-ET"
| "ar-AE"
| "ar-BH"
| "ar-DZ"
| "ar-EG"
| "ar-IQ"
| "ar-JO"
| "ar-KW"
| "ar-LB"
| "ar-LY"
| "ar-MA"
| "arn-CL"
| "ar-OM"
| "ar-QA"
| "ar-SA"
| "ar-SD"
| "ar-SY"
| "ar-TN"
| "ar-YE"
| "as-IN"
| "az-az"
| "az-Cyrl-AZ"
| "az-Latn-AZ"
| "ba-RU"
| "be-BY"
| "bg-BG"
| "bn-BD"
| "bn-IN"
| "bo-CN"
| "br-FR"
| "bs-Cyrl-BA"
| "bs-Latn-BA"
| "ca-ES"
| "co-FR"
| "cs-CZ"
| "cy-GB"
| "da-DK"
| "de-AT"
| "de-CH"
| "de-DE"
| "de-LI"
| "de-LU"
| "dsb-DE"
| "dv-MV"
| "el-CY"
| "el-GR"
| "en-029"
| "en-AU"
| "en-BZ"
| "en-CA"
| "en-cb"
| "en-GB"
| "en-IE"
| "en-IN"
| "en-JM"
| "en-MT"
| "en-MY"
| "en-NZ"
| "en-PH"
| "en-SG"
| "en-TT"
| "en-US"
| "en-ZA"
| "en-ZW"
| "es-AR"
| "es-BO"
| "es-CL"
| "es-CO"
| "es-CR"
| "es-DO"
| "es-EC"
| "es-ES"
| "es-GT"
| "es-HN"
| "es-MX"
| "es-NI"
| "es-PA"
| "es-PE"
| "es-PR"
| "es-PY"
| "es-SV"
| "es-US"
| "es-UY"
| "es-VE"
| "et-EE"
| "eu-ES"
| "fa-IR"
| "fi-FI"
| "fil-PH"
| "fo-FO"
| "fr-BE"
| "fr-CA"
| "fr-CH"
| "fr-FR"
| "fr-LU"
| "fr-MC"
| "fy-NL"
| "ga-IE"
| "gd-GB"
| "gd-ie"
| "gl-ES"
| "gsw-FR"
| "gu-IN"
| "ha-Latn-NG"
| "he-IL"
| "hi-IN"
| "hr-BA"
| "hr-HR"
| "hsb-DE"
| "hu-HU"
| "hy-AM"
| "id-ID"
| "ig-NG"
| "ii-CN"
| "in-ID"
| "is-IS"
| "it-CH"
| "it-IT"
| "iu-Cans-CA"
| "iu-Latn-CA"
| "iw-IL"
| "ja-JP"
| "ka-GE"
| "kk-KZ"
| "kl-GL"
| "km-KH"
| "kn-IN"
| "kok-IN"
| "ko-KR"
| "ky-KG"
| "lb-LU"
| "lo-LA"
| "lt-LT"
| "lv-LV"
| "mi-NZ"
| "mk-MK"
| "ml-IN"
| "mn-MN"
| "mn-Mong-CN"
| "moh-CA"
| "mr-IN"
| "ms-BN"
| "ms-MY"
| "mt-MT"
| "nb-NO"
| "ne-NP"
| "nl-BE"
| "nl-NL"
| "nn-NO"
| "no-no"
| "nso-ZA"
| "oc-FR"
| "or-IN"
| "pa-IN"
| "pl-PL"
| "prs-AF"
| "ps-AF"
| "pt-BR"
| "pt-PT"
| "qut-GT"
| "quz-BO"
| "quz-EC"
| "quz-PE"
| "rm-CH"
| "ro-mo"
| "ro-RO"
| "ru-mo"
| "ru-RU"
| "rw-RW"
| "sah-RU"
| "sa-IN"
| "se-FI"
| "se-NO"
| "se-SE"
| "si-LK"
| "sk-SK"
| "sl-SI"
| "sma-NO"
| "sma-SE"
| "smj-NO"
| "smj-SE"
| "smn-FI"
| "sms-FI"
| "sq-AL"
| "sr-BA"
| "sr-CS"
| "sr-Cyrl-BA"
| "sr-Cyrl-CS"
| "sr-Cyrl-ME"
| "sr-Cyrl-RS"
| "sr-Latn-BA"
| "sr-Latn-CS"
| "sr-Latn-ME"
| "sr-Latn-RS"
| "sr-ME"
| "sr-RS"
| "sr-sp"
| "sv-FI"
| "sv-SE"
| "sw-KE"
| "syr-SY"
| "ta-IN"
| "te-IN"
| "tg-Cyrl-TJ"
| "th-TH"
| "tk-TM"
| "tlh-QS"
| "tn-ZA"
| "tr-TR"
| "tt-RU"
| "tzm-Latn-DZ"
| "ug-CN"
| "uk-UA"
| "ur-PK"
| "uz-Cyrl-UZ"
| "uz-Latn-UZ"
| "uz-uz"
| "vi-VN"
| "wo-SN"
| "xh-ZA"
| "yo-NG"
| "zh-CN"
| "zh-HK"
| "zh-MO"
| "zh-SG"
| "zh-TW"
| "zu-ZA";
type UnmatchedLang = "x-default";
type HrefLang = LangCode | UnmatchedLang;
type Languages<T> = {
[s in HrefLang]?: T;
};
export type AlternateLinkDescriptor = {
title?: string;
url: string | URL;
};
export type AlternateURLs = {
canonical?: null | string | URL | AlternateLinkDescriptor;
languages?: Languages<null | string | URL | AlternateLinkDescriptor[]>;
media?: {
[media: string]: null | string | URL | AlternateLinkDescriptor[];
};
types?: {
[types: string]: null | string | URL | AlternateLinkDescriptor[];
};
};
export type ResolvedAlternateURLs = {
canonical: null | AlternateLinkDescriptor;
languages: null | Languages<AlternateLinkDescriptor[]>;
media: null | {
[media: string]: null | AlternateLinkDescriptor[];
};
types: null | {
[types: string]: null | AlternateLinkDescriptor[];
};
};

View file

@ -1,104 +0,0 @@
// When rendering applink meta tags add a namespace tag before each array instance
// if more than one member exists.
// ref: https://developers.facebook.com/docs/applinks/metadata-reference
export type AppLinks = {
ios?: AppLinksApple | Array<AppLinksApple>;
iphone?: AppLinksApple | Array<AppLinksApple>;
ipad?: AppLinksApple | Array<AppLinksApple>;
android?: AppLinksAndroid | Array<AppLinksAndroid>;
windows_phone?: AppLinksWindows | Array<AppLinksWindows>;
windows?: AppLinksWindows | Array<AppLinksWindows>;
windows_universal?: AppLinksWindows | Array<AppLinksWindows>;
web?: AppLinksWeb | Array<AppLinksWeb>;
};
export type ResolvedAppLinks = {
ios?: Array<AppLinksApple>;
iphone?: Array<AppLinksApple>;
ipad?: Array<AppLinksApple>;
android?: Array<AppLinksAndroid>;
windows_phone?: Array<AppLinksWindows>;
windows?: Array<AppLinksWindows>;
windows_universal?: Array<AppLinksWindows>;
web?: Array<AppLinksWeb>;
};
export type AppLinksApple = {
url: string | URL;
app_store_id?: string | number;
app_name?: string;
};
export type AppLinksAndroid = {
package: string;
url?: string | URL;
class?: string;
app_name?: string;
};
export type AppLinksWindows = {
url: string | URL;
app_id?: string;
app_name?: string;
};
export type AppLinksWeb = {
url: string | URL;
should_fallback?: boolean;
};
// Apple Itunes APp
// https://developer.apple.com/documentation/webkit/promoting_apps_with_smart_app_banners
export type ItunesApp = {
appId: string;
appArgument?: string;
};
// Viewport meta structure
// https://developer.mozilla.org/en-US/docs/Web/HTML/Viewport_meta_tag
// intentionally leaving out user-scalable, use a string if you want that behavior
export type Viewport = {
width?: string | number;
height?: string | number;
initialScale?: number;
minimumScale?: number;
maximumScale?: number;
userScalable?: boolean;
viewportFit?: "auto" | "cover" | "contain";
interactiveWidget?: "resizes-visual" | "resizes-content" | "overlays-content";
};
// Apple Web App
// https://developer.apple.com/library/archive/documentation/AppleApplications/Reference/SafariHTMLRef/Articles/MetaTags.html
// https://developer.apple.com/library/archive/documentation/AppleApplications/Reference/SafariWebContent/ConfiguringWebApplications/ConfiguringWebApplications.html
export type AppleWebApp = {
// default true
capable?: boolean;
title?: string;
startupImage?: AppleImage | Array<AppleImage>;
// default "default"
statusBarStyle?: "default" | "black" | "black-translucent";
};
export type AppleImage = string | AppleImageDescriptor;
export type AppleImageDescriptor = {
url: string;
media?: string;
};
export type ResolvedAppleWebApp = {
capable: boolean;
title?: string | null;
startupImage?: AppleImageDescriptor[] | null;
statusBarStyle?: "default" | "black" | "black-translucent";
};
// Format Detection
// This is a poorly specified metadata export type that is supposed to
// control whether the device attempts to conver text that matches
// certain formats into links for action. The most supported example
// is how mobile devices detect phone numbers and make them into links
// that can initiate a phone call
// https://www.goodemailcode.com/email-code/template.html
export type FormatDetection = {
telephone?: boolean;
date?: boolean;
address?: boolean;
email?: boolean;
url?: boolean;
};

View file

@ -1,86 +0,0 @@
export type Manifest = {
background_color?: string;
categories?: string[];
description?: string;
display?: "fullscreen" | "standalone" | "minimal-ui" | "browser";
display_override?: string[];
icons?: {
src: string;
type?: string;
sizes?: string;
purpose?: "any" | "maskable" | "monochrome" | "badge";
}[];
id?: string;
launch_handler?: {
platform?: "windows" | "macos" | "linux";
url?: string;
};
name?: string;
orientation?:
| "any"
| "natural"
| "landscape"
| "portrait"
| "portrait-primary"
| "portrait-secondary"
| "landscape-primary"
| "landscape-secondary";
prefer_related_applications?: boolean;
protocol_handlers?: {
protocol: string;
url: string;
title?: string;
}[];
related_applications?: {
platform: string;
url: string;
id?: string;
}[];
scope?: string;
screenshots?: {
src: string;
type?: string;
sizes?: string;
}[];
serviceworker?: {
src?: string;
scope?: string;
type?: string;
update_via_cache?: "import" | "none" | "all";
};
share_target?: {
action?: string;
method?: "get" | "post";
enctype?:
| "application/x-www-form-urlencoded"
| "multipart/form-data"
| "text/plain";
params?: {
name: string;
value: string;
required?: boolean;
}[];
url?: string;
title?: string;
text?: string;
files?: {
accept?: string[];
name?: string;
}[];
};
short_name?: string;
shortcuts?: {
name: string;
short_name?: string;
description?: string;
url: string;
icons?: {
src: string;
type?: string;
sizes?: string;
purpose?: "any" | "maskable" | "monochrome" | "badge";
}[];
}[];
start_url?: string;
theme_color?: string;
};

View file

@ -1,566 +0,0 @@
import type {
AlternateURLs,
ResolvedAlternateURLs,
} from "./alternative-urls-types";
import type {
AppleWebApp,
AppLinks,
FormatDetection,
ItunesApp,
ResolvedAppleWebApp,
ResolvedAppLinks,
Viewport,
} from "./extra-types";
import type {
AbsoluteTemplateString,
Author,
ColorSchemeEnum,
DeprecatedMetadataFields,
Icon,
Icons,
IconURL,
ReferrerEnum,
ResolvedIcons,
ResolvedRobots,
ResolvedVerification,
Robots,
TemplateString,
ThemeColorDescriptor,
Verification,
} from "./metadata-types";
import type { Manifest as ManifestFile } from "./manifest-types";
import type { OpenGraph, ResolvedOpenGraph } from "./opengraph-types";
import type { ResolvedTwitterMetadata, Twitter } from "./twitter-types";
/**
* Metadata interface to describe all the metadata fields that can be set in a document.
* @interface
*/
interface Metadata extends DeprecatedMetadataFields {
/**
* The base path and origin for absolute urls for various metadata links such as OpenGraph images.
*/
metadataBase?: null | URL;
/**
* The document title.
* @example
* ```tsx
* "My Blog"
* <title>My Blog</title>
*
* { default: "Dashboard", template: "%s | My Website" }
* <title>Dashboard | My Website</title>
*
* { absolute: "My Blog", template: "%s | My Website" }
* <title>My Blog</title>
* ```
*/
title?: null | string | TemplateString;
/**
* The document description, and optionally the OpenGraph and twitter descriptions.
* @example
* ```tsx
* "My Blog Description"
* <meta name="description" content="My Blog Description" />
* ```
*/
description?: null | string;
// Standard metadata names
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name
/**
* The application name.
* @example
* ```tsx
* "My Blog"
* <meta name="application-name" content="My Blog" />
* ```
*/
applicationName?: null | string;
/**
* The authors of the document.
* @example
* ```tsx
* [{ name: "Next.js Team", url: "https://nextjs.org" }]
*
* <meta name="author" content="Next.js Team" />
* <link rel="author" href="https://nextjs.org" />
* ```
*/
authors?: null | Author | Array<Author>;
/**
* The generator used for the document.
* @example
* ```tsx
* "Next.js"
*
* <meta name="generator" content="Next.js" />
* ```
*/
generator?: null | string;
/**
* The keywords for the document. If an array is provided, it will be flattened into a single tag with comma separation.
* @example
* ```tsx
* "nextjs, react, blog"
* <meta name="keywords" content="nextjs, react, blog" />
*
* ["react", "server components"]
* <meta name="keywords" content="react, server components" />
* ```
*/
keywords?: null | string | Array<string>;
/**
* The referrer setting for the document.
* @example
* ```tsx
* "origin"
* <meta name="referrer" content="origin" />
* ```
*/
referrer?: null | ReferrerEnum;
/**
* The theme color for the document.
* @example
* ```tsx
* "#000000"
* <meta name="theme-color" content="#000000" />
*
* { media: "(prefers-color-scheme: dark)", color: "#000000" }
* <meta name="theme-color" media="(prefers-color-scheme: dark)" content="#000000" />
*
* [
* { media: "(prefers-color-scheme: dark)", color: "#000000" },
* { media: "(prefers-color-scheme: light)", color: "#ffffff" }
* ]
* <meta name="theme-color" media="(prefers-color-scheme: dark)" content="#000000" />
* <meta name="theme-color" media="(prefers-color-scheme: light)" content="#ffffff" />
* ```
*/
themeColor?: null | string | ThemeColorDescriptor | ThemeColorDescriptor[];
/**
* The color scheme for the document.
* @example
* ```tsx
* "dark"
* <meta name="color-scheme" content="dark" />
* ```
*/
colorScheme?: null | ColorSchemeEnum;
/**
* The viewport setting for the document.
* @example
* ```tsx
* "width=device-width, initial-scale=1"
* <meta name="viewport" content="width=device-width, initial-scale=1" />
*
* { width: "device-width", initialScale: 1 }
* <meta name="viewport" content="width=device-width, initial-scale=1" />
* ```
*/
viewport?: null | string | Viewport;
/**
* The creator of the document.
* @example
* ```tsx
* "Next.js Team"
* <meta name="creator" content="Next.js Team" />
* ```
*/
creator?: null | string;
/**
* The publisher of the document.
* @example
*
* ```tsx
* "Vercel"
* <meta name="publisher" content="Vercel" />
* ```
*/
publisher?: null | string;
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name#other_metadata_names
/**
* The robots setting for the document.
*
* @see https://developer.mozilla.org/en-US/docs/Glossary/Robots.txt
* @example
* ```tsx
* "index, follow"
* <meta name="robots" content="index, follow" />
*
* { index: false, follow: false }
* <meta name="robots" content="noindex, nofollow" />
* ```
*/
robots?: null | string | Robots;
/**
* The canonical and alternate URLs for the document.
* @example
* ```tsx
* { canonical: "https://example.com" }
* <link rel="canonical" href="https://example.com" />
*
* { canonical: "https://example.com", hreflang: { "en-US": "https://example.com/en-US" } }
* <link rel="canonical" href="https://example.com" />
* <link rel="alternate" href="https://example.com/en-US" hreflang="en-US" />
* ```
*
* Multiple titles example for alternate URLs except `canonical`:
* ```tsx
* {
* canonical: "https://example.com",
* types: {
* 'application/rss+xml': [
* { url: 'blog.rss', title: 'rss' },
* { url: 'blog/js.rss', title: 'js title' },
* ],
* },
* }
* <link rel="canonical" href="https://example.com" />
* <link rel="alternate" href="https://example.com/blog.rss" type="application/rss+xml" title="rss" />
* <link rel="alternate" href="https://example.com/blog/js.rss" type="application/rss+xml" title="js title" />
* ```
*/
alternates?: null | AlternateURLs;
/**
* The icons for the document. Defaults to rel="icon".
*
* @see https://developer.mozilla.org/en-US/docs/Web/HTML/Attributes/rel#attr-icon
* @example
* ```tsx
* "https://example.com/icon.png"
* <link rel="icon" href="https://example.com/icon.png" />
*
* { icon: "https://example.com/icon.png", apple: "https://example.com/apple-icon.png" }
* <link rel="icon" href="https://example.com/icon.png" />
* <link rel="apple-touch-icon" href="https://example.com/apple-icon.png" />
*
* [{ rel: "icon", url: "https://example.com/icon.png" }, { rel: "apple-touch-icon", url: "https://example.com/apple-icon.png" }]
* <link rel="icon" href="https://example.com/icon.png" />
* <link rel="apple-touch-icon" href="https://example.com/apple-icon.png" />
* ```
*/
icons?: null | IconURL | Array<Icon> | Icons;
/**
* A web application manifest, as defined in the Web Application Manifest specification.
*
* @see https://developer.mozilla.org/en-US/docs/Web/Manifest
* @example
* ```tsx
* "https://example.com/manifest.json"
* <link rel="manifest" href="https://example.com/manifest.json" />
* ```
*/
manifest?: null | string | URL;
/**
* The Open Graph metadata for the document.
*
* @see https://ogp.me
* @example
* ```tsx
* {
* type: "website",
* url: "https://example.com",
* title: "My Website",
* description: "My Website Description",
* siteName: "My Website",
* images: [{
* url: "https://example.com/og.png",
* }],
* }
*
* <meta property="og:type" content="website" />
* <meta property="og:url" content="https://example.com" />
* <meta property="og:site_name" content="My Website" />
* <meta property="og:title" content="My Website" />
* <meta property="og:description" content="My Website Description" />
* <meta property="og:image" content="https://example.com/og.png" />
* ```
*/
openGraph?: null | OpenGraph;
/**
* The Twitter metadata for the document.
* @example
* ```tsx
* { card: "summary_large_image", site: "@site", creator: "@creator", "images": "https://example.com/og.png" }
*
* <meta name="twitter:card" content="summary_large_image" />
* <meta name="twitter:site" content="@site" />
* <meta name="twitter:creator" content="@creator" />
* <meta name="twitter:title" content="My Website" />
* <meta name="twitter:description" content="My Website Description" />
* <meta name="twitter:image" content="https://example.com/og.png" />
* ```
*/
twitter?: null | Twitter;
/**
* The common verification tokens for the document.
* @example
* ```tsx
* { verification: { google: "1234567890", yandex: "1234567890", "me": "1234567890" } }
* <meta name="google-site-verification" content="1234567890" />
* <meta name="yandex-verification" content="1234567890" />
* <meta name="me" content="@me" />
* ```
*/
verification?: Verification;
/**
* The Apple web app metadata for the document.
*
* @see https://developer.apple.com/library/archive/documentation/AppleApplications/Reference/SafariHTMLRef/Articles/MetaTags.html
* @example
* ```tsx
* { capable: true, title: "My Website", statusBarStyle: "black-translucent" }
* <meta name="apple-mobile-web-app-capable" content="yes" />
* <meta name="apple-mobile-web-app-title" content="My Website" />
* <meta name="apple-mobile-web-app-status-bar-style" content="black-translucent" />
* ```
*/
appleWebApp?: null | boolean | AppleWebApp;
/**
* Indicates if devices should try to interpret various formats and make actionable links out of them. For example it controles
* if telephone numbers on mobile that can be clicked to dial or not.
* @example
* ```tsx
* { telephone: false }
* <meta name="format-detection" content="telephone=no" />
* ```
*/
formatDetection?: null | FormatDetection;
/**
* The metadata for the iTunes App.
* It adds the `name="apple-itunes-app"` meta tag.
*
* @example
* ```tsx
* { app: { id: "123456789", affiliateData: "123456789", appArguments: "123456789" } }
* <meta name="apple-itunes-app" content="app-id=123456789, affiliate-data=123456789, app-arguments=123456789" />
* ```
*/
itunes?: null | ItunesApp;
/**
* A brief description of what this web-page is about. Not recommended, superseded by description.
* It adds the `name="abstract"` meta tag.
*
* @see https://www.metatags.org/all-meta-tags-overview/meta-name-abstract/
* @example
* ```tsx
* "My Website Description"
* <meta name="abstract" content="My Website Description" />
* ```
*/
abstract?: null | string;
/**
* The Facebook AppLinks metadata for the document.
* @example
* ```tsx
* { ios: { appStoreId: "123456789", url: "https://example.com" }, android: { packageName: "com.example", url: "https://example.com" } }
*
* <meta property="al:ios:app_store_id" content="123456789" />
* <meta property="al:ios:url" content="https://example.com" />
* <meta property="al:android:package" content="com.example" />
* <meta property="al:android:url" content="https://example.com" />
* ```
*/
appLinks?: null | AppLinks;
/**
* The archives link rel property.
* @example
* ```tsx
* { archives: "https://example.com/archives" }
* <link rel="archives" href="https://example.com/archives" />
* ```
*/
archives?: null | string | Array<string>;
/**
* The assets link rel property.
* @example
* ```tsx
* "https://example.com/assets"
* <link rel="assets" href="https://example.com/assets" />
* ```
*/
assets?: null | string | Array<string>;
/**
* The bookmarks link rel property.
* @example
* ```tsx
* "https://example.com/bookmarks"
* <link rel="bookmarks" href="https://example.com/bookmarks" />
* ```
*/
bookmarks?: null | string | Array<string>; // This is technically against HTML spec but is used in wild
// meta name properties
/**
* The category meta name property.
* @example
* ```tsx
* "My Category"
* <meta name="category" content="My Category" />
* ```
*/
category?: null | string;
/**
* The classification meta name property.
* @example
* ```tsx
* "My Classification"
* <meta name="classification" content="My Classification" />
* ```
*/
classification?: null | string;
/**
* Arbitrary name/value pairs for the document.
*/
other?: {
[name: string]: string | number | Array<string | number>;
} & DeprecatedMetadataFields;
}
interface ResolvedMetadata extends DeprecatedMetadataFields {
// origin and base path for absolute urls for various metadata links such as
// opengraph-image
metadataBase: null | URL;
// The Document title and template if defined
title: null | AbsoluteTemplateString;
// The Document description, and optionally the opengraph and twitter descriptions
description: null | string;
// Standard metadata names
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name
applicationName: null | string;
authors: null | Array<Author>;
generator: null | string;
// if you provide an array it will be flattened into a single tag with comma separation
keywords: null | Array<string>;
referrer: null | ReferrerEnum;
themeColor: null | ThemeColorDescriptor[];
colorScheme: null | ColorSchemeEnum;
viewport: null | string;
creator: null | string;
publisher: null | string;
// https://developer.mozilla.org/en-US/docs/Web/HTML/Element/meta/name#other_metadata_names
robots: null | ResolvedRobots;
// The canonical and alternate URLs for this location
alternates: null | ResolvedAlternateURLs;
// Defaults to rel="icon" but the Icons type can be used
// to get more specific about rel types
icons: null | ResolvedIcons;
openGraph: null | ResolvedOpenGraph;
manifest: null | string | URL;
twitter: null | ResolvedTwitterMetadata;
// common verification tokens
verification: null | ResolvedVerification;
// Apple web app metadata
// https://developer.apple.com/library/archive/documentation/AppleApplications/Reference/SafariHTMLRef/Articles/MetaTags.html
appleWebApp: null | ResolvedAppleWebApp;
// Should devices try to interpret various formats and make actionable links
// out of them? The canonical example is telephone numbers on mobile that can
// be clicked to dial
formatDetection: null | FormatDetection;
// meta name="apple-itunes-app"
itunes: null | ItunesApp;
// meta name="abstract"
// A brief description of what this web-page is about.
// Not recommended, superceded by description.
// https://www.metatags.org/all-meta-tags-overview/meta-name-abstract/
abstract: null | string;
// Facebook AppLinks
appLinks: null | ResolvedAppLinks;
// link rel properties
archives: null | Array<string>;
assets: null | Array<string>;
bookmarks: null | Array<string>; // This is technically against HTML spec but is used in wild
// meta name properties
category: null | string;
classification: null | string;
// Arbitrary name/value pairs
other:
| null
| ({
[name: string]: string | number | Array<string | number>;
} & DeprecatedMetadataFields);
}
type RobotsFile = {
// Apply rules for all
rules:
| {
userAgent?: string | string[];
allow?: string | string[];
disallow?: string | string[];
crawlDelay?: number;
}
// Apply rules for specific user agents
| Array<{
userAgent: string | string[];
allow?: string | string[];
disallow?: string | string[];
crawlDelay?: number;
}>;
sitemap?: string | string[];
host?: string;
};
type SitemapFile = Array<{
url: string;
lastModified?: string | Date;
}>;
type ResolvingMetadata = Promise<ResolvedMetadata>;
declare namespace MetadataRoute {
export type Robots = RobotsFile;
export type Sitemap = SitemapFile;
export type Manifest = ManifestFile;
}
export { Metadata, MetadataRoute, ResolvedMetadata, ResolvingMetadata };

View file

@ -1,155 +0,0 @@
/**
* Metadata types
*/
export interface DeprecatedMetadataFields {
/**
* Deprecated options that have a preferred method
* @deprecated Use appWebApp to configure apple-mobile-web-app-capable which provides
* @see https://www.appsloveworld.com/coding/iphone/11/difference-between-apple-mobile-web-app-capable-and-apple-touch-fullscreen-ipho
*/
"apple-touch-fullscreen"?: never;
/**
* Obsolete since iOS 7.
* @see https://web.dev/apple-touch-icon/
* @deprecated use icons.apple or instead
*/
"apple-touch-icon-precomposed"?: never;
}
export type TemplateString =
| DefaultTemplateString
| AbsoluteTemplateString
| AbsoluteString;
export type DefaultTemplateString = {
default: string;
template: string;
};
export type AbsoluteTemplateString = {
absolute: string;
template: string | null;
};
export type AbsoluteString = {
absolute: string;
};
export type Author = {
// renders as <link rel="author"...
url?: string | URL;
// renders as <meta name="author"...
name?: string;
};
// does not include "unsafe-URL". to use this users should
// use '"unsafe-URL" as ReferrerEnum'
export type ReferrerEnum =
| "no-referrer"
| "origin"
| "no-referrer-when-downgrade"
| "origin-when-cross-origin"
| "same-origin"
| "strict-origin"
| "strict-origin-when-cross-origin";
export type ColorSchemeEnum =
| "normal"
| "light"
| "dark"
| "light dark"
| "dark light"
| "only light";
type RobotsInfo = {
// all and none will be inferred from index/follow boolean options
index?: boolean;
follow?: boolean;
/** @deprecated set index to false instead */
noindex?: never;
/** @deprecated set follow to false instead */
nofollow?: never;
noarchive?: boolean;
nosnippet?: boolean;
noimageindex?: boolean;
nocache?: boolean;
notranslate?: boolean;
indexifembedded?: boolean;
nositelinkssearchbox?: boolean;
unavailable_after?: string;
"max-video-preview"?: number | string;
"max-image-preview"?: "none" | "standard" | "large";
"max-snippet"?: number;
};
export type Robots = RobotsInfo & {
// if you want to specify an alternate robots just for google
googleBot?: string | RobotsInfo;
};
export type ResolvedRobots = {
basic: string | null;
googleBot: string | null;
};
export type IconURL = string | URL;
export type Icon = IconURL | IconDescriptor;
export type IconDescriptor = {
url: string | URL;
type?: string;
sizes?: string;
/** defaults to rel="icon" unless superseded by Icons map */
rel?: string;
media?: string;
/**
* @see https://developer.mozilla.org/en-US/docs/Web/API/HTMLImageElement/fetchPriority
*/
fetchPriority?: "high" | "low" | "auto";
};
export type Icons = {
/** rel="icon" */
icon?: Icon | Icon[];
/** rel="shortcut icon" */
shortcut?: Icon | Icon[];
/**
* @see https://developer.apple.com/library/archive/documentation/AppleApplications/Reference/SafariWebContent/ConfiguringWebApplications/ConfiguringWebApplications.html
* rel="apple-touch-icon"
*/
apple?: Icon | Icon[];
/** rel inferred from descriptor, defaults to "icon" */
other?: IconDescriptor | IconDescriptor[];
};
export type Verification = {
google?: null | string | number | (string | number)[];
yahoo?: null | string | number | (string | number)[];
yandex?: null | string | number | (string | number)[];
me?: null | string | number | (string | number)[];
// if you ad-hoc additional verification
other?: {
[name: string]: string | number | (string | number)[];
};
};
export type ResolvedVerification = {
google?: null | (string | number)[];
yahoo?: null | (string | number)[];
yandex?: null | (string | number)[];
me?: null | (string | number)[];
other?: {
[name: string]: (string | number)[];
};
};
export type ResolvedIcons = {
icon: IconDescriptor[];
apple: IconDescriptor[];
shortcut?: IconDescriptor[];
other?: IconDescriptor[];
};
export type ThemeColorDescriptor = {
color: string;
media?: string;
};

View file

@ -1,267 +0,0 @@
import type { AbsoluteTemplateString, TemplateString } from "./metadata-types";
export type OpenGraphType =
| "article"
| "book"
| "music.song"
| "music.album"
| "music.playlist"
| "music.radio_station"
| "profile"
| "website"
| "video.tv_show"
| "video.other"
| "video.movie"
| "video.episode";
export type OpenGraph =
| OpenGraphWebsite
| OpenGraphArticle
| OpenGraphBook
| OpenGraphProfile
| OpenGraphMusicSong
| OpenGraphMusicAlbum
| OpenGraphMusicPlaylist
| OpenGraphRadioStation
| OpenGraphVideoMovie
| OpenGraphVideoEpisode
| OpenGraphVideoTVShow
| OpenGraphVideoOther
| OpenGraphMetadata;
// update this type to reflect actual locales
type Locale = string;
type OpenGraphMetadata = {
determiner?: "a" | "an" | "the" | "auto" | "";
title?: string | TemplateString;
description?: string;
emails?: string | Array<string>;
phoneNumbers?: string | Array<string>;
faxNumbers?: string | Array<string>;
siteName?: string;
locale?: Locale;
alternateLocale?: Locale | Array<Locale>;
images?: OGImage | Array<OGImage>;
audio?: OGAudio | Array<OGAudio>;
videos?: OGVideo | Array<OGVideo>;
url?: string | URL;
countryName?: string;
ttl?: number;
};
type OpenGraphWebsite = OpenGraphMetadata & {
type: "website";
};
type OpenGraphArticle = OpenGraphMetadata & {
type: "article";
publishedTime?: string; // datetime
modifiedTime?: string; // datetime
expirationTime?: string; // datetime
authors?: null | string | URL | Array<string | URL>;
section?: null | string;
tags?: null | string | Array<string>;
};
type OpenGraphBook = OpenGraphMetadata & {
type: "book";
isbn?: null | string;
releaseDate?: null | string; // datetime
authors?: null | string | URL | Array<string | URL>;
tags?: null | string | Array<string>;
};
type OpenGraphProfile = OpenGraphMetadata & {
type: "profile";
firstName?: null | string;
lastName?: null | string;
username?: null | string;
gender?: null | string;
};
type OpenGraphMusicSong = OpenGraphMetadata & {
type: "music.song";
duration?: null | number;
albums?: null | string | URL | OGAlbum | Array<string | URL | OGAlbum>;
musicians?: null | string | URL | Array<string | URL>;
};
type OpenGraphMusicAlbum = OpenGraphMetadata & {
type: "music.album";
songs?: null | string | URL | OGSong | Array<string | URL | OGSong>;
musicians?: null | string | URL | Array<string | URL>;
releaseDate?: null | string; // datetime
};
type OpenGraphMusicPlaylist = OpenGraphMetadata & {
type: "music.playlist";
songs?: null | string | URL | OGSong | Array<string | URL | OGSong>;
creators?: null | string | URL | Array<string | URL>;
};
type OpenGraphRadioStation = OpenGraphMetadata & {
type: "music.radio_station";
creators?: null | string | URL | Array<string | URL>;
};
type OpenGraphVideoMovie = OpenGraphMetadata & {
type: "video.movie";
actors?: null | string | URL | OGActor | Array<string | URL | OGActor>;
directors?: null | string | URL | Array<string | URL>;
writers?: null | string | URL | Array<string | URL>;
duration?: null | number;
releaseDate?: null | string; // datetime
tags?: null | string | Array<string>;
};
type OpenGraphVideoEpisode = OpenGraphMetadata & {
type: "video.episode";
actors?: null | string | URL | OGActor | Array<string | URL | OGActor>;
directors?: null | string | URL | Array<string | URL>;
writers?: null | string | URL | Array<string | URL>;
duration?: null | number;
releaseDate?: null | string; // datetime
tags?: null | string | Array<string>;
series?: null | string | URL;
};
type OpenGraphVideoTVShow = OpenGraphMetadata & {
type: "video.tv_show";
};
type OpenGraphVideoOther = OpenGraphMetadata & {
type: "video.other";
};
type OGImage = string | OGImageDescriptor | URL;
type OGImageDescriptor = {
url: string | URL;
secureUrl?: string | URL;
alt?: string;
type?: string;
width?: string | number;
height?: string | number;
};
type OGAudio = string | OGAudioDescriptor | URL;
type OGAudioDescriptor = {
url: string | URL;
secureUrl?: string | URL;
type?: string;
};
type OGVideo = string | OGVideoDescriptor | URL;
type OGVideoDescriptor = {
url: string | URL;
secureUrl?: string | URL;
type?: string;
width?: string | number;
height?: string | number;
};
export type ResolvedOpenGraph =
| ResolvedOpenGraphWebsite
| ResolvedOpenGraphArticle
| ResolvedOpenGraphBook
| ResolvedOpenGraphProfile
| ResolvedOpenGraphMusicSong
| ResolvedOpenGraphMusicAlbum
| ResolvedOpenGraphMusicPlaylist
| ResolvedOpenGraphRadioStation
| ResolvedOpenGraphVideoMovie
| ResolvedOpenGraphVideoEpisode
| ResolvedOpenGraphVideoTVShow
| ResolvedOpenGraphVideoOther
| ResolvedOpenGraphMetadata;
type ResolvedOpenGraphMetadata = {
determiner?: "a" | "an" | "the" | "auto" | "";
title?: AbsoluteTemplateString;
description?: string;
emails?: Array<string>;
phoneNumbers?: Array<string>;
faxNumbers?: Array<string>;
siteName?: string;
locale?: Locale;
alternateLocale?: Array<Locale>;
images?: Array<OGImage>;
audio?: Array<OGAudio>;
videos?: Array<OGVideo>;
url: null | URL | string;
countryName?: string;
ttl?: number;
};
type ResolvedOpenGraphWebsite = ResolvedOpenGraphMetadata & {
type: "website";
};
type ResolvedOpenGraphArticle = ResolvedOpenGraphMetadata & {
type: "article";
publishedTime?: string; // datetime
modifiedTime?: string; // datetime
expirationTime?: string; // datetime
authors?: Array<string>;
section?: string;
tags?: Array<string>;
};
type ResolvedOpenGraphBook = ResolvedOpenGraphMetadata & {
type: "book";
isbn?: string;
releaseDate?: string; // datetime
authors?: Array<string>;
tags?: Array<string>;
};
type ResolvedOpenGraphProfile = ResolvedOpenGraphMetadata & {
type: "profile";
firstName?: string;
lastName?: string;
username?: string;
gender?: string;
};
type ResolvedOpenGraphMusicSong = ResolvedOpenGraphMetadata & {
type: "music.song";
duration?: number;
albums?: Array<OGAlbum>;
musicians?: Array<string | URL>;
};
type ResolvedOpenGraphMusicAlbum = ResolvedOpenGraphMetadata & {
type: "music.album";
songs?: Array<string | URL | OGSong>;
musicians?: Array<string | URL>;
releaseDate?: string; // datetime
};
type ResolvedOpenGraphMusicPlaylist = ResolvedOpenGraphMetadata & {
type: "music.playlist";
songs?: Array<string | URL | OGSong>;
creators?: Array<string | URL>;
};
type ResolvedOpenGraphRadioStation = ResolvedOpenGraphMetadata & {
type: "music.radio_station";
creators?: Array<string | URL>;
};
type ResolvedOpenGraphVideoMovie = ResolvedOpenGraphMetadata & {
type: "video.movie";
actors?: Array<string | URL | OGActor>;
directors?: Array<string | URL>;
writers?: Array<string | URL>;
duration?: number;
releaseDate?: string; // datetime
tags?: Array<string>;
};
type ResolvedOpenGraphVideoEpisode = ResolvedOpenGraphMetadata & {
type: "video.episode";
actors?: Array<string | URL | OGActor>;
directors?: Array<string | URL>;
writers?: Array<string | URL>;
duration?: number;
releaseDate?: string; // datetime
tags?: Array<string>;
series?: string | URL;
};
type ResolvedOpenGraphVideoTVShow = ResolvedOpenGraphMetadata & {
type: "video.tv_show";
};
type ResolvedOpenGraphVideoOther = ResolvedOpenGraphMetadata & {
type: "video.other";
};
type OGSong = {
url: string | URL;
disc?: number;
track?: number;
};
type OGAlbum = {
url: string | URL;
disc?: number;
track?: number;
};
type OGActor = {
url: string | URL;
role?: string;
};

View file

@ -1,17 +0,0 @@
import { Metadata, ResolvedMetadata } from "./metadata-interface";
export type FieldResolver<Key extends keyof Metadata> = (
T: Metadata[Key],
) => ResolvedMetadata[Key];
export type FieldResolverWithMetadataBase<
Key extends keyof Metadata,
Options = undefined,
> = Options extends undefined ? (
T: Metadata[Key],
metadataBase: ResolvedMetadata["metadataBase"],
) => ResolvedMetadata[Key]
: (
T: Metadata[Key],
metadataBase: ResolvedMetadata["metadataBase"],
options: Options,
) => ResolvedMetadata[Key];

View file

@ -1,94 +0,0 @@
// Reference: https://developer.twitter.com/en/docs/twitter-for-websites/cards/overview/markup
import type { AbsoluteTemplateString, TemplateString } from "./metadata-types";
export type Twitter =
| TwitterSummary
| TwitterSummaryLargeImage
| TwitterPlayer
| TwitterApp
| TwitterMetadata;
type TwitterMetadata = {
// defaults to card="summary"
site?: string; // username for account associated to the site itself
siteId?: string; // id for account associated to the site itself
creator?: string; // username for the account associated to the creator of the content on the site
creatorId?: string; // id for the account associated to the creator of the content on the site
description?: string;
title?: string | TemplateString;
images?: TwitterImage | Array<TwitterImage>;
};
type TwitterSummary = TwitterMetadata & {
card: "summary";
};
type TwitterSummaryLargeImage = TwitterMetadata & {
card: "summary_large_image";
};
type TwitterPlayer = TwitterMetadata & {
card: "player";
players: TwitterPlayerDescriptor | Array<TwitterPlayerDescriptor>;
};
type TwitterApp = TwitterMetadata & {
card: "app";
app: TwitterAppDescriptor;
};
export type TwitterAppDescriptor = {
id: {
iphone?: string | number;
ipad?: string | number;
googleplay?: string;
};
url?: {
iphone?: string | URL;
ipad?: string | URL;
googleplay?: string | URL;
};
name?: string;
};
type TwitterImage = string | TwitterImageDescriptor | URL;
type TwitterImageDescriptor = {
url: string | URL;
alt?: string;
secureUrl?: string | URL;
type?: string;
width?: string | number;
height?: string | number;
};
type TwitterPlayerDescriptor = {
playerUrl: string | URL;
streamUrl: string | URL;
width: number;
height: number;
};
type ResolvedTwitterImage = {
url: string | URL;
alt?: string;
secureUrl?: string | URL;
type?: string;
width?: string | number;
height?: string | number;
};
type ResolvedTwitterSummary = {
site: string | null;
siteId: string | null;
creator: string | null;
creatorId: string | null;
description: string | null;
title: AbsoluteTemplateString;
images?: Array<ResolvedTwitterImage>;
};
type ResolvedTwitterPlayer = ResolvedTwitterSummary & {
players: Array<TwitterPlayerDescriptor>;
};
type ResolvedTwitterApp = ResolvedTwitterSummary & {
app: TwitterAppDescriptor;
};
export type ResolvedTwitterMetadata =
| ({ card: "summary" } & ResolvedTwitterSummary)
| ({ card: "summary_large_image" } & ResolvedTwitterSummary)
| ({ card: "player" } & ResolvedTwitterPlayer)
| ({ card: "app" } & ResolvedTwitterApp);

View file

@ -1,5 +0,0 @@
the metadata renderer was written in 2022, and is 3700 lines of code. it
represents the bulk of the code in the framework, which i think is wrong.
a bounty goes to rewriting this codebase into one or two files. merging logic is
surely not needed, and resolution can happen in the same step as rendering.

View file

@ -1,565 +0,0 @@
import type { Icon, ResolvedMetadata } from "./types";
import { escapeHTML as esc } from "./utils";
function Meta(name: string, content: any) {
return `<meta name="${esc(name)}" content="${esc(content)}">`;
}
function MetaProp(name: string, content: any) {
return `<meta property="${esc(name)}" content="${esc(content)}">`;
}
function MetaMedia(name: string, content: any, media: string) {
return `<meta name="${esc(name)}" content="${esc(content)}" media="${
esc(media)
}">`;
}
function Link(rel: string, href: any) {
return `<link rel="${esc(rel)}" href="${esc(href)}" />`;
}
function LinkMedia(rel: string, href: any, media: string) {
return `<link rel="${esc(rel)}" href="${esc(href)}" media="${esc(media)}">`;
}
const resolveUrl = (
url: string | URL,
) => (typeof url === "string" ? url : url.toString());
function IconLink(rel: string, icon: Icon) {
if (typeof icon === "object" && !(icon instanceof URL)) {
const { url, rel: _, ...props } = icon;
return `<link rel="${esc(rel)}" href="${esc(resolveUrl(url))}"${
Object.keys(props)
.map((key) => ` ${key}="${esc(props[key])}"`)
.join("")
}>`;
} else {
const href = resolveUrl(icon);
return Link(rel, href);
}
}
function ExtendMeta(prefix: string, content: any) {
if (
typeof content === "string" || typeof content === "number" ||
content instanceof URL
) {
return MetaProp(prefix, content);
} else {
let str = "";
for (const [prop, value] of Object.entries(content)) {
if (value) {
str += MetaProp(
prefix === "og:image" && prop === "url"
? "og:image"
: prefix + ":" + prop,
value,
);
}
}
return str;
}
}
const formatDetectionKeys = [
"telephone",
"date",
"address",
"email",
"url",
] as const;
export function renderMetadata(meta: ResolvedMetadata): string {
var str = "";
// <BasicMetadata/>
if (meta.title?.absolute) str += `<title>${esc(meta.title.absolute)}</title>`;
if (meta.description) str += Meta("description", meta.description);
if (meta.applicationName) {
str += Meta("application-name", meta.applicationName);
}
if (meta.authors) {
for (var author of meta.authors) {
if (author.url) str += Link("author", author.url);
if (author.name) str += Meta("author", author.name);
}
}
if (meta.manifest) str += Link("manifest", meta.manifest);
if (meta.generator) str += Meta("generator", meta.generator);
if (meta.referrer) str += Meta("referrer", meta.referrer);
if (meta.themeColor) {
for (var themeColor of meta.themeColor) {
str += !themeColor.media
? Meta("theme-color", themeColor.color)
: MetaMedia("theme-color", themeColor.color, themeColor.media);
}
}
if (meta.colorScheme) str += Meta("color-scheme", meta.colorScheme);
if (meta.viewport) str += Meta("viewport", meta.viewport);
if (meta.creator) str += Meta("creator", meta.creator);
if (meta.publisher) str += Meta("publisher", meta.publisher);
if (meta.robots?.basic) str += Meta("robots", meta.robots.basic);
if (meta.robots?.googleBot) str += Meta("googlebot", meta.robots.googleBot);
if (meta.abstract) str += Meta("abstract", meta.abstract);
if (meta.archives) {
for (var archive of meta.archives) {
str += Link("archives", archive);
}
}
if (meta.assets) {
for (var asset of meta.assets) {
str += Link("assets", asset);
}
}
if (meta.bookmarks) {
for (var bookmark of meta.bookmarks) {
str += Link("bookmarks", bookmark);
}
}
if (meta.category) str += Meta("category", meta.category);
if (meta.classification) str += Meta("classification", meta.classification);
if (meta.other) {
for (var [name, content] of Object.entries(meta.other)) {
if (content) {
str += Meta(name, Array.isArray(content) ? content.join(",") : content);
}
}
}
// <AlternatesMetadata />
var alternates = meta.alternates;
if (alternates) {
if (alternates.canonical) {
str += Link("canonical", alternates.canonical.url);
}
if (alternates.languages) {
for (var [locale, urls] of Object.entries(alternates.languages)) {
for (var { url, title } of urls) {
str += `<link rel="alternate" hreflang="${esc(locale)}" href="${
esc(url.toString())
}"${title ? ` title="${esc(title)}"` : ""}>`;
}
}
}
if (alternates.media) {
for (var [media, urls2] of Object.entries(alternates.media)) {
if (urls2) {
for (var { url, title } of urls2) {
str += `<link rel="alternate" media="${esc(media)}" href="${
esc(url.toString())
}"${title ? ` title="${esc(title)}"` : ""}>`;
}
}
}
}
if (alternates.types) {
for (var [type, urls2] of Object.entries(alternates.types)) {
if (urls2) {
for (var { url, title } of urls2) {
str += `<link rel="alternate" type="${esc(type)}" href="${
esc(url.toString())
}"${title ? ` title="${esc(title)}"` : ""}>`;
}
}
}
}
}
// <ItunesMeta />
if (meta.itunes) {
str += Meta(
"apple-itunes-app",
`app-id=${meta.itunes.appId}${
meta.itunes.appArgument
? `, app-argument=${meta.itunes.appArgument}`
: ""
}`,
);
}
// <FormatDetectionMeta />
if (meta.formatDetection) {
var contentStr = "";
for (var key of formatDetectionKeys) {
if (key in meta.formatDetection) {
if (contentStr) contentStr += ", ";
contentStr += `${key}=no`;
}
}
str += Meta("format-detection", contentStr);
}
// <VerificationMeta />
if (meta.verification) {
if (meta.verification.google) {
for (var verificationKey of meta.verification.google) {
str += Meta("google-site-verification", verificationKey);
}
}
if (meta.verification.yahoo) {
for (var verificationKey of meta.verification.yahoo) {
str += Meta("y_key", verificationKey);
}
}
if (meta.verification.yandex) {
for (var verificationKey of meta.verification.yandex) {
str += Meta("yandex-verification", verificationKey);
}
}
if (meta.verification.me) {
for (var verificationKey of meta.verification.me) {
str += Meta("me", verificationKey);
}
}
if (meta.verification.other) {
for (
var [verificationKey2, values] of Object.entries(
meta.verification.other,
)
) {
for (var value of values) {
str += Meta(verificationKey2, value);
}
}
}
}
// <AppleWebAppMeta />
if (meta.appleWebApp) {
const { capable, title, startupImage, statusBarStyle } = meta.appleWebApp;
if (capable) {
str += '<meta name="apple-mobile-web-app-capable" content="yes" />';
}
if (title) str += Meta("apple-mobile-web-app-title", title);
if (startupImage) {
for (const img of startupImage) {
str += !img.media
? Link("apple-touch-startup-image", img.url)
: LinkMedia("apple-touch-startup-image", img.url, img.media);
}
}
if (statusBarStyle) {
str += Meta("apple-mobile-web-app-status-bar-style", statusBarStyle);
}
}
// <OpenGraphMetadata />
if (meta.openGraph) {
const og = meta.openGraph;
if (og.determiner) str += MetaProp("og:determiner", og.determiner);
if (og.title?.absolute) str += MetaProp("og:title", og.title.absolute);
if (og.description) str += MetaProp("og:description", og.description);
if (og.url) str += MetaProp("og:url", og.url.toString());
if (og.siteName) str += MetaProp("og:site_name", og.siteName);
if (og.locale) str += MetaProp("og:locale", og.locale);
if (og.countryName) str += MetaProp("og:country_name", og.countryName);
if (og.ttl) str += MetaProp("og:ttl", og.ttl);
if (og.images) {
for (const item of og.images) {
str += ExtendMeta("og:image", item);
}
}
if (og.videos) {
for (const item of og.videos) {
str += ExtendMeta("og:video", item);
}
}
if (og.audio) {
for (const item of og.audio) {
str += ExtendMeta("og:audio", item);
}
}
if (og.emails) {
for (const item of og.emails) {
str += ExtendMeta("og:email", item);
}
}
if (og.phoneNumbers) {
for (const item of og.phoneNumbers) {
str += MetaProp("og:phone_number", item);
}
}
if (og.faxNumbers) {
for (const item of og.faxNumbers) {
str += MetaProp("og:fax_number", item);
}
}
if (og.alternateLocale) {
for (const item of og.alternateLocale) {
str += MetaProp("og:locale:alternate", item);
}
}
if ("type" in og) {
str += MetaProp("og:type", og.type);
switch (og.type) {
case "website":
break;
case "article":
if (og.publishedTime) {
str += MetaProp("article:published_time", og.publishedTime);
}
if (og.modifiedTime) {
str += MetaProp("article:modified_time", og.modifiedTime);
}
if (og.expirationTime) {
str += MetaProp("article:expiration_time", og.expirationTime);
}
if (og.authors) {
for (const item of og.authors) {
str += MetaProp("article:author", item);
}
}
if (og.section) str += MetaProp("article:section", og.section);
if (og.tags) {
for (const item of og.tags) {
str += MetaProp("article:tag", item);
}
}
break;
case "book":
if (og.isbn) str += MetaProp("book:isbn", og.isbn);
if (og.releaseDate) {
str += MetaProp("book:release_date", og.releaseDate);
}
if (og.authors) {
for (const item of og.authors) {
str += MetaProp("article:author", item);
}
}
if (og.tags) {
for (const item of og.tags) {
str += MetaProp("article:tag", item);
}
}
break;
case "profile":
if (og.firstName) str += MetaProp("profile:first_name", og.firstName);
if (og.lastName) str += MetaProp("profile:last_name", og.lastName);
if (og.username) str += MetaProp("profile:first_name", og.username);
if (og.gender) str += MetaProp("profile:first_name", og.gender);
break;
case "music.song":
if (og.duration) str += MetaProp("music:duration", og.duration);
if (og.albums) {
for (const item of og.albums) {
str += ExtendMeta("music:albums", item);
}
}
if (og.musicians) {
for (const item of og.musicians) {
str += MetaProp("music:musician", item);
}
}
break;
case "music.album":
if (og.songs) {
for (const item of og.songs) {
str += ExtendMeta("music:song", item);
}
}
if (og.musicians) {
for (const item of og.musicians) {
str += MetaProp("music:musician", item);
}
}
if (og.releaseDate) {
str += MetaProp("music:release_date", og.releaseDate);
}
break;
case "music.playlist":
if (og.songs) {
for (const item of og.songs) {
str += ExtendMeta("music:song", item);
}
}
if (og.creators) {
for (const item of og.creators) {
str += MetaProp("music:creator", item);
}
}
break;
case "music.radio_station":
if (og.creators) {
for (const item of og.creators) {
str += MetaProp("music:creator", item);
}
}
break;
case "video.movie":
if (og.actors) {
for (const item of og.actors) {
str += ExtendMeta("video:actor", item);
}
}
if (og.directors) {
for (const item of og.directors) {
str += MetaProp("video:director", item);
}
}
if (og.writers) {
for (const item of og.writers) {
str += MetaProp("video:writer", item);
}
}
if (og.duration) str += MetaProp("video:duration", og.duration);
if (og.releaseDate) {
str += MetaProp("video:release_date", og.releaseDate);
}
if (og.tags) {
for (const item of og.tags) {
str += MetaProp("video:tag", item);
}
}
break;
case "video.episode":
if (og.actors) {
for (const item of og.actors) {
str += ExtendMeta("video:actor", item);
}
}
if (og.directors) {
for (const item of og.directors) {
str += MetaProp("video:director", item);
}
}
if (og.writers) {
for (const item of og.writers) {
str += MetaProp("video:writer", item);
}
}
if (og.duration) str += MetaProp("video:duration", og.duration);
if (og.releaseDate) {
str += MetaProp("video:release_date", og.releaseDate);
}
if (og.tags) {
for (const item of og.tags) {
str += MetaProp("video:tag", item);
}
}
if (og.series) str += MetaProp("video:series", og.series);
break;
case "video.other":
case "video.tv_show":
default:
throw new Error("Invalid OpenGraph type: " + og.type);
}
}
}
// <TwitterMetadata />
if (meta.twitter) {
const twitter = meta.twitter;
if (twitter.card) str += Meta("twitter:card", twitter.card);
if (twitter.site) str += Meta("twitter:site", twitter.site);
if (twitter.siteId) str += Meta("twitter:site:id", twitter.siteId);
if (twitter.creator) str += Meta("twitter:creator", twitter.creator);
if (twitter.creatorId) str += Meta("twitter:creator:id", twitter.creatorId);
if (twitter.title?.absolute) {
str += Meta("twitter:title", twitter.title.absolute);
}
if (twitter.description) {
str += Meta("twitter:description", twitter.description);
}
if (twitter.images) {
for (const img of twitter.images) {
str += Meta("twitter:image", img.url);
if (img.alt) str += Meta("twitter:image:alt", img.alt);
}
}
if (twitter.card === "player") {
for (const player of twitter.players) {
if (player.playerUrl) str += Meta("twitter:player", player.playerUrl);
if (player.streamUrl) {
str += Meta("twitter:player:stream", player.streamUrl);
}
if (player.width) str += Meta("twitter:player:width", player.width);
if (player.height) str += Meta("twitter:player:height", player.height);
}
}
if (twitter.card === "app") {
for (const type of ["iphone", "ipad", "googleplay"]) {
if (twitter.app.id[type]) {
str += Meta(`twitter:app:name:${type}`, twitter.app.name);
str += Meta(`twitter:app:id:${type}`, twitter.app.id[type]);
}
if (twitter.app.url?.[type]) {
str += Meta(`twitter:app:url:${type}`, twitter.app.url[type]);
}
}
}
}
// <AppLinksMeta />
if (meta.appLinks) {
if (meta.appLinks.ios) {
for (var item of meta.appLinks.ios) {
str += ExtendMeta("al:ios", item);
}
}
if (meta.appLinks.iphone) {
for (var item of meta.appLinks.iphone) {
str += ExtendMeta("al:iphone", item);
}
}
if (meta.appLinks.ipad) {
for (var item of meta.appLinks.ipad) {
str += ExtendMeta("al:ipad", item);
}
}
if (meta.appLinks.android) {
for (var item2 of meta.appLinks.android) {
str += ExtendMeta("al:android", item2);
}
}
if (meta.appLinks.windows_phone) {
for (var item3 of meta.appLinks.windows_phone) {
str += ExtendMeta("al:windows_phone", item3);
}
}
if (meta.appLinks.windows) {
for (var item3 of meta.appLinks.windows) {
str += ExtendMeta("al:windows", item3);
}
}
if (meta.appLinks.windows_universal) {
for (var item4 of meta.appLinks.windows_universal) {
str += ExtendMeta("al:windows_universal", item4);
}
}
if (meta.appLinks.web) {
for (const item of meta.appLinks.web) {
str += ExtendMeta("al:web", item);
}
}
}
// <IconsMetadata />
if (meta.icons) {
if (meta.icons.shortcut) {
for (var icon of meta.icons.shortcut) {
str += IconLink("shortcut icon", icon);
}
}
if (meta.icons.icon) {
for (var icon of meta.icons.icon) {
str += IconLink("icon", icon);
}
}
if (meta.icons.apple) {
for (var icon of meta.icons.apple) {
str += IconLink("apple-touch-icon", icon);
}
}
if (meta.icons.other) {
for (var icon of meta.icons.other) {
str += IconLink(icon.rel ?? "icon", icon);
}
}
}
return str;
}

View file

@ -1,57 +0,0 @@
export type {
AlternateURLs,
ResolvedAlternateURLs,
} from "./nextjs/types/alternative-urls-types";
export type {
AppleImage,
AppleImageDescriptor,
AppleWebApp,
AppLinks,
AppLinksAndroid,
AppLinksApple,
AppLinksWeb,
AppLinksWindows,
FormatDetection,
ItunesApp,
ResolvedAppleWebApp,
ResolvedAppLinks,
Viewport,
} from "./nextjs/types/extra-types";
export type {
Metadata,
ResolvedMetadata,
ResolvingMetadata,
} from "./nextjs/types/metadata-interface";
export type {
AbsoluteString,
AbsoluteTemplateString,
Author,
ColorSchemeEnum,
DefaultTemplateString,
Icon,
IconDescriptor,
Icons,
IconURL,
ReferrerEnum,
ResolvedIcons,
ResolvedRobots,
ResolvedVerification,
Robots,
TemplateString,
ThemeColorDescriptor,
Verification,
} from "./nextjs/types/metadata-types";
export type {
OpenGraph,
OpenGraphType,
ResolvedOpenGraph,
} from "./nextjs/types/opengraph-types";
export type {
FieldResolver,
FieldResolverWithMetadataBase,
} from "./nextjs/types/resolvers";
export type {
ResolvedTwitterMetadata,
Twitter,
TwitterAppDescriptor,
} from "./nextjs/types/twitter-types";

View file

@ -1,13 +0,0 @@
// Extracted from @paperdave/utils/string
declare var Bun: any;
export const escapeHTML: (string: string) => string = typeof Bun !== "undefined"
? Bun.escapeHTML
: (string: string) => {
return string
.replaceAll('"', "&quot;")
.replaceAll("&", "&amp;")
.replaceAll("'", "&#x27;")
.replaceAll("<", "&lt;")
.replaceAll(">", "&gt;");
};

View file

@ -1,30 +0,0 @@
const db = new Map(
fs.readFileSync(path.join(import.meta.dirname, "mime.txt"), "utf8")
.split("\n").filter(Boolean).map((line) =>
line.split(/\s+/) as [string, string]
),
);
/**
* Accepts:
* - Full file path
* - Extension (with or without dot)
*/
export function contentTypeFor(file: string) {
if (file.includes("/") || file.includes("\\")) {
// Some file names are special cased.
switch (path.basename(file)) {
case "rss.xml":
return "application/rss+xml";
}
file = path.extname(file);
}
const dot = file.indexOf(".");
if (dot === -1) file = "." + file;
else if (dot > 0) file = file.slice(dot);
return db.get(file) ?? "application/octet-stream";
}
import * as fs from "./fs.ts";
import * as path from "node:path";

View file

@ -1,49 +0,0 @@
// Import this file with 'import * as sg from "#sitegen";'
export type ScriptId = string;
export interface SitegenRender {
scripts: Set<ScriptId>;
}
export function initRender(): SitegenRender {
return {
scripts: new Set(),
};
}
export function getRender() {
return ssr.getUserData<SitegenRender>("sitegen", () => {
throw new Error(
"This function can only be used in a page (static or view)",
);
});
}
/** Add a client-side script to the page. */
export function addScript(id: ScriptId) {
const srcFile: string = util.getCallSites()
.find((site) => !site.scriptName.startsWith(import.meta.dirname))!
.scriptName;
const filePath = hot.resolveFrom(srcFile, id);
if (
!filePath.endsWith(".client.ts") &&
!filePath.endsWith(".client.tsx")
) {
throw new Error("addScript must be a .client.ts or .client.tsx");
}
getRender().scripts.add(filePath);
}
export function Script({ src }: { src: ScriptId }) {
if (!src) throw new Error("Missing 'src' attribute");
addScript(src);
return null;
}
export interface Section {
root: string;
}
import * as ssr from "./engine/ssr.ts";
import * as util from "node:util";
import * as hot from "./hot.ts";

View file

@ -1,310 +0,0 @@
// Sitegen! Clover's static site generator, built with love.
function main() {
return withSpinner({
text: "Recovering State",
successText: ({ elapsed }) =>
"sitegen! update in " + elapsed.toFixed(1) + "s",
failureText: () => "sitegen FAIL",
}, sitegen);
}
/**
* A filesystem object associated with some ID,
* such as a page's route to it's source file.
*/
interface FileItem {
id: string;
file: string;
}
async function sitegen(status: Spinner) {
const startTime = performance.now();
let root = path.resolve(import.meta.dirname, "../src");
const join = (...sub: string[]) => path.join(root, ...sub);
const incr = new Incremental();
// Sitegen reviews every defined section for resources to process
const sections: Section[] =
require(path.join(root, "sections.ts")).siteSections;
// Static files are compressed and served as-is.
// - "{section}/static/*.png"
let staticFiles: FileItem[] = [];
// Pages are rendered then served as static files.
// - "{section}/pages/*.marko"
let pages: FileItem[] = [];
// Views are dynamically rendered pages called via backend code.
// - "{section}/views/*.tsx"
let views: FileItem[] = [];
// Public scripts are bundled for the client as static assets under "/js/[...]"
// This is used for the file viewer's canvases.
// Note that '.client.ts' can be placed anywhere in the file structure.
// - "{section}/scripts/*.client.ts"
let scripts: FileItem[] = [];
// 'backend.ts'
const backendFiles = [];
// -- Scan for files --
status.text = "Scanning Project";
for (const section of sections) {
const { root: sectionRoot } = section;
const sectionPath = (...sub) => path.join(sectionRoot, ...sub);
const rootPrefix = root === sectionRoot
? ""
: path.relative(root, sectionRoot) + "/";
const kinds = [
{
dir: sectionPath("pages"),
list: pages,
prefix: "/",
exclude: [".css", ".client.ts", ".client.tsx"],
},
{ dir: sectionPath("static"), list: staticFiles, prefix: "/", ext: true },
{ dir: sectionPath("scripts"), list: scripts, prefix: rootPrefix },
{
dir: sectionPath("views"),
list: views,
prefix: rootPrefix,
exclude: [".css", ".client.ts", ".client.tsx"],
},
];
for (const { dir, list, prefix, exclude = [], ext = false } of kinds) {
const items = fs.readDirRecOptionalSync(dir);
item: for (const item of items) {
if (item.isDirectory()) continue;
for (const e of exclude) {
if (item.name.endsWith(e)) continue item;
}
const file = path.relative(dir, item.parentPath + "/" + item.name);
const trim = ext
? file
: file.slice(0, -path.extname(file).length).replaceAll(".", "/");
let id = prefix + trim.replaceAll("\\", "/");
if (prefix === "/" && id.endsWith("/index")) {
id = id.slice(0, -"/index".length) || "/";
}
list.push({ id, file: path.join(item.parentPath, item.name) });
}
}
let backendFile = [
sectionPath("backend.ts"),
sectionPath("backend.tsx"),
].find((file) => fs.existsSync(file));
if (backendFile) backendFiles.push(backendFile);
}
scripts = scripts.filter(({ file }) => !file.match(/\.client\.[tj]sx?/));
const globalCssPath = join("global.css");
// TODO: invalidate incremental resources
// -- server side render --
status.text = "Building";
const cssOnce = new OnceMap();
const cssQueue = new Queue<[string, string[], css.Theme], string>({
name: "Bundle",
fn: ([, files, theme]) => css.bundleCssFiles(files, theme),
passive: true,
getItemText: ([id]) => id,
maxJobs: 2,
});
interface RenderResult {
body: string;
head: string;
inlineCss: string;
scriptFiles: string[];
item: FileItem;
}
const renderResults: RenderResult[] = [];
async function loadPageModule({ file }: FileItem) {
require(file);
}
async function renderPage(item: FileItem) {
// -- load and validate module --
let { default: Page, meta: metadata, theme: pageTheme, layout } = require(
item.file,
);
if (!Page) throw new Error("Page is missing a 'default' export.");
if (!metadata) {
throw new Error("Page is missing 'meta' export with a title.");
}
if (layout?.theme) pageTheme = layout.theme;
const theme = {
bg: "#fff",
fg: "#050505",
primary: "#2e7dab",
...pageTheme,
};
// -- metadata --
const renderedMetaPromise = Promise.resolve(
typeof metadata === "function" ? metadata({ ssr: true }) : metadata,
).then((m) => meta.resolveAndRenderMetadata(m));
// -- css --
const cssImports = [globalCssPath, ...hot.getCssImports(item.file)];
const cssPromise = cssOnce.get(
cssImports.join(":") + JSON.stringify(theme),
() => cssQueue.add([item.id, cssImports, theme]),
);
// -- html --
const sitegenApi = sg.initRender();
const bodyPromise = await ssr.ssrAsync(<Page />, {
sitegen: sitegenApi,
});
const [body, inlineCss, renderedMeta] = await Promise.all([
bodyPromise,
cssPromise,
renderedMetaPromise,
]);
if (!renderedMeta.includes("<title>")) {
throw new Error(
"Page is missing 'meta.title'. " +
"All pages need a title tag.",
);
}
// The script content is not ready, allow another page to Render. The page
// contents will be rebuilt at the end. This is more convenient anyways
// because it means client scripts don't re-render the page.
renderResults.push({
body,
head: renderedMeta,
inlineCss,
scriptFiles: Array.from(sitegenApi.scripts),
item: item,
});
}
// This is done in two passes so that a page that throws during evaluation
// will report "Load Render Module" instead of "Render Static Page".
const spinnerFormat = status.format;
status.format = () => "";
const moduleLoadQueue = new Queue({
name: "Load Render Module",
fn: loadPageModule,
getItemText,
maxJobs: 1,
});
moduleLoadQueue.addMany(pages);
await moduleLoadQueue.done({ method: "stop" });
const pageQueue = new Queue({
name: "Render Static Page",
fn: renderPage,
getItemText,
maxJobs: 2,
});
pageQueue.addMany(pages);
await pageQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- bundle scripts --
const referencedScripts = Array.from(
new Set(renderResults.flatMap((r) => r.scriptFiles)),
);
const extraPublicScripts = scripts.map((entry) => entry.file);
const uniqueCount = new Set([
...referencedScripts,
...extraPublicScripts,
]).size;
status.text = `Bundle ${uniqueCount} Scripts`;
await bundle.bundleClientJavaScript(
referencedScripts,
extraPublicScripts,
incr,
);
// -- copy/compress static files --
async function doStaticFile(item: FileItem) {
const body = await fs.readFile(item.file);
incr.putAsset({
srcId: "static:" + item.file,
key: item.id,
body,
});
}
const staticQueue = new Queue({
name: "Load Static",
fn: doStaticFile,
getItemText,
maxJobs: 16,
});
status.format = () => "";
staticQueue.addMany(staticFiles);
await staticQueue.done({ method: "stop" });
status.format = spinnerFormat;
// -- concatenate static rendered pages --
status.text = `Concat ${renderResults.length} Pages`;
await Promise.all(
renderResults.map(
async ({ item: page, body, head, inlineCss, scriptFiles }) => {
const doc = wrapDocument({
body,
head,
inlineCss,
scripts: scriptFiles.map(
(id) =>
UNWRAP(
incr.out.script.get(
path.basename(id).replace(/\.client\.[jt]sx?$/, ""),
),
),
).map((x) => `{${x}}`).join("\n"),
});
incr.putAsset({
srcId: "page:" + page.file,
key: page.id,
body: doc,
headers: {
"Content-Type": "text/html",
},
});
},
),
);
status.format = () => "";
status.text = ``;
// This will wait for all compression jobs to finish, which up
// to this point have been left as dangling promises.
await incr.wait();
// Flush the site to disk.
status.format = spinnerFormat;
status.text = `Incremental Flush`;
incr.flush();
incr.serializeToDisk(); // Allows picking up this state again
return { elapsed: (performance.now() - startTime) / 1000 };
}
function getItemText({ file }: FileItem) {
return path.relative(hot.projectSrc, file).replaceAll("\\", "/");
}
function wrapDocument({
body,
head,
inlineCss,
scripts,
}: {
head: string;
body: string;
inlineCss: string;
scripts: string;
}) {
return `<!doctype html><head>${head}${
inlineCss ? `<style>${inlineCss}</style>` : ""
}</head><body>${body}${scripts ? `<script>${scripts}</script>` : ""}</body>`;
}
import type { Section } from "./sitegen-lib.ts";
import { OnceMap, Queue } from "./queue.ts";
import { Incremental } from "./incremental.ts";
import * as bundle from "./bundle.ts";
import * as css from "./css.ts";
import * as fs from "./fs.ts";
import { Spinner, withSpinner } from "@paperclover/console/Spinner";
import * as meta from "./meta";
import * as ssr from "./engine/ssr.ts";
import * as sg from "./sitegen-lib.ts";
import * as hot from "./hot.ts";
import * as path from "node:path";

View file

@ -0,0 +1,4 @@
<div meow=null />
<div>
wait(${null})
</div>

View file

@ -0,0 +1,6 @@
import Component from './Component.marko';
<h1>web page</h1>
<if=!false>
<Component=null/>
</>

198
framework/watch.ts Normal file
View file

@ -0,0 +1,198 @@
// File watcher and live reloading site generator
const debounceMilliseconds = 25;
export async function main() {
let subprocess: child_process.ChildProcess | null = null;
// Catch up state by running a main build.
const { incr } = await generate.main();
// ...and watch the files that cause invals.
const watch = new Watch(rebuild);
watch.add(...incr.invals.keys());
statusLine();
// ... and then serve it!
serve();
function serve() {
if (subprocess) {
subprocess.removeListener("close", onSubprocessClose);
subprocess.kill();
}
subprocess = child_process.fork(".clover/out/server.js", [
"--development",
], {
stdio: "inherit",
});
subprocess.on("close", onSubprocessClose);
}
function onSubprocessClose(code: number | null, signal: string | null) {
subprocess = null;
const status = code != null ? `code ${code}` : `signal ${signal}`;
console.error(`Backend process exited with ${status}`);
}
process.on("beforeExit", () => {
subprocess?.removeListener("close", onSubprocessClose);
});
function rebuild(files: string[]) {
files = files.map((file) => path.relative(hot.projectRoot, file));
const changed: string[] = [];
for (const file of files) {
let mtimeMs: number | null = null;
try {
mtimeMs = fs.statSync(file).mtimeMs;
} catch (err: any) {
if (err?.code !== "ENOENT") throw err;
}
if (incr.updateStat(file, mtimeMs)) changed.push(file);
}
if (changed.length === 0) {
console.warn("Files were modified but the 'modify' time did not change.");
return;
}
withSpinner<any, Awaited<ReturnType<typeof generate.sitegen>>>({
text: "Rebuilding",
successText: generate.successText,
failureText: () => "sitegen FAIL",
}, async (spinner) => {
console.info("---");
console.info(
"Updated" +
(changed.length === 1
? " " + changed[0]
: changed.map((file) => "\n- " + file)),
);
const result = await generate.sitegen(spinner, incr);
incr.toDisk(); // Allows picking up this state again
for (const file of watch.files) {
const relative = path.relative(hot.projectRoot, file);
if (!incr.invals.has(relative)) watch.remove(file);
}
return result;
}).then((result) => {
// Restart the server if it was changed or not running.
if (
!subprocess ||
result.inserted.some(({ kind }) => kind === "backendReplace")
) {
serve();
} else if (
subprocess &&
result.inserted.some(({ kind }) => kind === "asset")
) {
subprocess.send({ type: "clover.assets.reload" });
}
return result;
}).catch((err) => {
console.error(util.inspect(err));
}).finally(statusLine);
}
function statusLine() {
console.info(
`Watching ${incr.invals.size} files \x1b[36m[last change: ${
new Date().toLocaleTimeString()
}]\x1b[39m`,
);
}
}
class Watch {
files = new Set<string>();
stale = new Set<string>();
onChange: (files: string[]) => void;
watchers: fs.FSWatcher[] = [];
/** Has a trailing slash */
roots: string[] = [];
debounce: ReturnType<typeof setTimeout> | null = null;
constructor(onChange: Watch["onChange"]) {
this.onChange = onChange;
}
add(...files: string[]) {
const { roots, watchers } = this;
let newRoots: string[] = [];
for (let file of files) {
file = path.resolve(file);
if (this.files.has(file)) continue;
this.files.add(file);
// Find an existing watcher
if (roots.some((root) => file.startsWith(root))) continue;
if (newRoots.some((root) => file.startsWith(root))) continue;
newRoots.push(path.dirname(file) + path.sep);
}
if (newRoots.length === 0) return;
// Filter out directories that are already specified
newRoots = newRoots
.sort((a, b) => a.length - b.length)
.filter((dir, i, a) => {
for (let j = 0; j < i; j++) if (dir.startsWith(a[j])) return false;
return true;
});
// Append Watches
let i = roots.length;
for (const root of newRoots) {
this.watchers.push(fs.watch(
root,
{ recursive: true, encoding: "utf-8" },
this.#handleEvent.bind(this, root),
));
this.roots.push(root);
}
// If any new roots shadow over and old one, delete it!
while (i > 0) {
i -= 1;
const root = roots[i];
if (newRoots.some((newRoot) => root.startsWith(newRoot))) {
watchers.splice(i, 1)[0].close();
roots.splice(i, 1);
}
}
}
remove(...files: string[]) {
for (const file of files) this.files.delete(path.resolve(file));
// Find watches that are covering no files
const { roots, watchers } = this;
const existingFiles = Array.from(this.files);
let i = roots.length;
while (i > 0) {
i -= 1;
const root = roots[i];
if (!existingFiles.some((file) => file.startsWith(root))) {
watchers.splice(i, 1)[0].close();
roots.splice(i, 1);
}
}
}
stop() {
for (const w of this.watchers) w.close();
}
#handleEvent(root: string, event: fs.WatchEventType, subPath: string | null) {
if (!subPath) return;
const file = path.join(root, subPath);
if (!this.files.has(file)) return;
this.stale.add(file);
const { debounce } = this;
if (debounce !== null) clearTimeout(debounce);
this.debounce = setTimeout(() => {
this.debounce = null;
this.onChange(Array.from(this.stale));
this.stale.clear();
}, debounceMilliseconds);
}
}
import * as fs from "node:fs";
import { withSpinner } from "@paperclover/console/Spinner";
import * as generate from "./generate.ts";
import * as path from "node:path";
import * as util from "node:util";
import * as hot from "./hot.ts";
import * as child_process from "node:child_process";

2187
package-lock.json generated

File diff suppressed because it is too large Load diff

View file

@ -1,34 +1,45 @@
{
"private": true,
"type": "module",
"dependencies": {
"@hono/node-server": "^1.14.3",
"@mdx-js/mdx": "^3.1.0",
"@paperclover/console": "git+https://git.paperclover.net/clo/console.git",
"blurhash": "^2.0.5",
"codemirror": "^6.0.1",
"devalue": "^5.1.1",
"esbuild": "^0.25.5",
"hls.js": "^1.6.5",
"hono": "^4.7.11",
"marko": "^6.0.20"
"marko": "^6.0.20",
"puppeteer": "^24.10.1",
"sharp": "^0.34.2",
"unique-names-generator": "^4.7.1",
"vscode-oniguruma": "^2.0.1",
"vscode-textmate": "^9.2.0"
},
"devDependencies": {
"@types/node": "^22.15.29",
"@types/node": "^24.0.10",
"typescript": "^5.8.3"
},
"imports": {
"#sitegen": "./framework/sitegen-lib.ts",
"#sqlite": "./framework/sqlite.ts",
"#backend": "./src/backend.ts",
"#debug": "./framework/debug.safe.ts",
"#sitegen": "./framework/lib/sitegen.ts",
"#sitegen/*": "./framework/lib/*.ts",
"#ssr": "./framework/engine/ssr.ts",
"#ssr/jsx-dev-runtime": "./framework/engine/jsx-runtime.ts",
"#ssr/jsx-runtime": "./framework/engine/jsx-runtime.ts",
"#ssr/marko": "./framework/engine/marko-runtime.ts",
"#marko/html": {
"development": "marko/debug/html",
"production": "marko/html"
"types": "marko/html",
"production": "marko/production",
"node": "marko/debug/html"
},
"#hono/platform": {
"#hono": "hono",
"#hono/conninfo": {
"bun": "hono/bun",
"deno": "hono/deno",
"node": "@hono/node-server",
"node": "@hono/node-server/conninfo",
"worker": "hono/cloudflare-workers"
}
}

114
readme.md Normal file
View file

@ -0,0 +1,114 @@
# clover sitegen framework
this repository contains clover's "sitegen" framework, which is a set of tools
that assist building websites. these tools power <https://paperclover.net>.
- **HTML "Server Side Rendering") engine written from scratch.** (~500 lines)
- A more practical JSX runtime (`class` instead of `className`, built-in
`clsx`, `html()` helper over `dangerouslySetInnerHTML` prop, etc).
- Integration with [Marko] for concisely written components.
- TODO: MDX-like compiler for content-heavy pages like blogs.
- Different languages can be used at the same time. Supports `async function`
components, `<Suspense />`, and custom extensions.
- **Incremental static site generator and build system.**
- Build entire production site at start, incremental updates when pages
change; Build system state survives coding sessions.
- The only difference in development and production mode is hidden source-maps
and stripped `console.debug` calls. The site you see locally is the same
site you see deployed.
- (TODO) Tests, Lints, and Type-checking is run alongside, and only re-runs
checks when the files change. For example, changing a component re-tests
only pages that use that component and re-lints only the changed file.
- **Integrated libraries for building complex, content heavy web sites.**
- Static asset serving with ETag and build-time compression.
- Dynamicly rendered pages with static client. (`#import "#sitegen/view"`)
- Databases with a typed SQLite wrapper. (`import "#sitegen/sqlite"`)
- TODO: Meta and Open Graph generation. (`export const meta`)
- TODO: Font subsetting tools to reduce bytes downloaded by fonts.
- **Built on the battle-tested Node.js runtime.**
None of these tools are complex or revolutionary. Rather, this project is the
sum of many years of experience on managing content heavy websites, and an
example on how other over-complicate other frameworks.
[Marko]: https://next.markojs.com
Included is `src`, which contains `paperclover.net`. Website highlights:
- [Question/Answer board, custom markdown parser and components][q+a].
- [File viewer with fast ui/ux + optimized media streaming][file].
- [Personal, friends-only blog with password protection][friends].
[q+a]: https://paperclover.net/q+a
[file]: https://paperclover.net/file
[friends]: https://paperclover.net/friends
## Development
minimum system requirements:
- a cpu with at least 1 core.
- random access memory.
- windows 7 or later, macos, or other operating system.
my development machine, for example, is Dell Inspiron 7348 with Core i7
npm install
# production generation
node run generate
node .clover/out/server
# "development" watch mode
node run watch
<!-- `repl.js` will open a read-eval-print-loop where plugin state is cached (on my -->
<!-- 2014 dev laptop, startup time is 600-1000ms). every file in `framework` and -->
<!-- `src` besides `hot.ts` can be edited and quickly re-run. for example, to run -->
<!-- `framework/generate.ts`, you can type "generate" into the shell. since -->
<!-- top-level await is not supported (plugins are built on `require` as Node has -->
<!-- poor module support), CLIs can include a `main` function, which is executed -->
<!-- when the REPL runs it. -->
for unix systems, the provided `flake.nix` can be used with `nix develop` to
open a shell with all needed system dependencies.
## Deployment
There are two primary server components to be deployed: the web server and the
sourth of truth server. The latter is a singleton that runs on Clover's NAS,
which holds the full contents of the file storage. The web server pulls data
from the source of truth and renders web pages, and can be duplicated to
multiple cloud hosts without issue.
Deployment of the source of truth can be done with Docker Compose:
services:
backend:
container_name: backend
build:
# this uses loopback to hit the self-hosted git server
context: http://127.0.0.1:3000/clo/sitegen.git
dockerfile: src/source-of-truth.dockerfile
environment:
# configuration
- PORT=43200
- CLOVER_DB=/data
- CLOVER_FILE_RAW=/published
- CLOVER_FILE_DERIVED=/data/derived
- CLOVER_SOT_KEY=... # guards private/unreleased content
ports:
- '43200:43200'
restart: unless-stopped
volumes:
- /mnt/storage1/clover/Documents/Config/paperclover:/data
- /mnt/storage1/clover/Published:/published
Due to caching, one may need to manually purge images via
`docker image rm ix-clover-backend -f` when an update is desired
TODO: deployment instructions for a web node
## Contributions
No contributions to `src` accepted, only `framework`.

12
repl.js
View file

@ -11,24 +11,24 @@ process.stderr.write("Loading...");
const { hot } = await import("./run.js"); // get plugins ready
const { errorAllWidgets } = hot.load("@paperclover/console/Widget");
process.stderr.write("\r" + " ".repeat("Loading...".length) + "\r");
hot.load("node:repl").start({
const repl = hot.load("node:repl").start({
prompt: "% ",
eval(code, _global, _id, done) {
evaluate(code)
.catch((err) => {
// TODO: improve @paperclover/console's ability to print AggregateError
// and errors with extra random properties
console.error(inspect(err));
console.error(util.inspect(err, false, 10, true));
})
.then((result) => done(null, result));
},
ignoreUndefined: true,
//completer,
});
repl.setupHistory(".clover/repl-history.txt", () => {});
setTimeout(() => {
hot.reloadRecursive("./framework/engine/ssr.ts");
hot.reloadRecursive("./framework/bundle.ts");
hot.reloadRecursive("./framework/generate.ts");
}, 100);
async function evaluate(code) {
@ -41,11 +41,11 @@ async function evaluate(code) {
if (code[0] === "=") {
try {
const result = await eval(code[1]);
console.log(inspect(result));
console.info(inspect(result));
} catch (err) {
if (err instanceof SyntaxError) {
const result = await eval("(async() => { return " + code + " })()");
console.log(inspect(result));
console.info(inspect(result));
} else {
throw err;
}

90
run.js
View file

@ -1,55 +1,87 @@
// This file allows using Node.js in combination with
// all available plugins. Usage: "node run <script>"
import * as path from "node:path";
import * as util from "node:util";
import * as zlib from "node:zlib";
import * as url from "node:url";
import process from "node:process";
// Disable experimental warnings (Type Stripping, etc)
{
const { emit: originalEmit } = process;
const warnings = ["ExperimentalWarning"];
process.emit = function (event, error) {
return event === "warning" && warnings.includes(error.name)
? false
: originalEmit.apply(process, arguments);
};
if (!zlib.zstdCompress) {
const brand = process.versions.bun
? `bun ${process.versions.bun}`
: process.versions.deno
? `deno ${process.versions.deno}`
: null;
globalThis.console.error(
`sitegen depends on a node.js v24. your runtime is missing feature\n` +
`this is node.js version ${process.version}${
brand ? ` (${brand})` : ""
}\n\n` +
`get node.js --> https://nodejs.org/en/download/current`,
);
process.exit(1);
}
// Disable experimental warnings (Type Stripping, etc)
const { emit: originalEmit } = process;
const warnings = ["ExperimentalWarning"];
process.emit = function (event, error) {
return event === "warning" && warnings.includes(error.name)
? false
: originalEmit.apply(process, arguments);
};
// Init hooks
const hot = await import("./framework/hot.ts");
await import("#debug");
const console = hot.load("@paperclover/console");
globalThis.console.log = console.info;
globalThis.console["log"] = console.info;
globalThis.console.info = console.info;
globalThis.console.warn = console.warn;
globalThis.console.error = console.error;
globalThis.console.debug = console.scoped("dbg");
globalThis.UNWRAP = (t, ...args) => {
if (t == null) {
throw new Error(
args.length > 0 ? util.format(...args) : "UNWRAP(" + t + ")",
);
}
return t;
};
globalThis.ASSERT = (t, ...args) => {
if (!t) {
throw new Error(
args.length > 0 ? util.format(...args) : "Assertion Failed",
);
}
};
// Load with hooks
if (process.argv[1].startsWith(import.meta.filename.slice(0, -".js".length))) {
if (process.argv.length == 2) {
console.error("usage: node run <script> [...args]");
process.exit(1);
}
const file = path.resolve(process.argv[2]);
let found;
for (const dir of ["./", "./src/", "./framework/"]) {
try {
found = hot.resolveFrom(import.meta.filename, dir + process.argv[2]);
break;
} catch (e) {
continue;
}
}
if (!found) {
console.error("Cannot find script: " + process.argv[2]);
process.exit(1);
}
process.argv = [process.argv[0], ...process.argv.slice(2)];
hot.load(file).main?.();
try {
const mod = await hot.load(found);
if (mod.main) mod.main();
else if (mod.default?.fetch) {
const protocol = "http";
const { serve } = hot.load("@hono/node-server");
serve({
fetch: mod.default.fetch,
}, ({ address, port }) => {
if (address === "::") address = "::1";
console.info(url.format({
protocol,
hostname: address,
port,
}));
});
}
} catch (e) {
console.error(util.inspect(e));
}
}
export { hot };

75
src/admin.ts Normal file
View file

@ -0,0 +1,75 @@
const cookieAge = 60 * 60 * 24 * 365; // 1 year
let lastKnownToken: string | null = null;
function compareToken(token: string) {
if (token === lastKnownToken) return true;
lastKnownToken = fs.readFileSync(".clover/admin-token.txt", "utf8").trim();
return token === lastKnownToken;
}
export async function middleware(c: Context, next: Next) {
if (c.req.path.startsWith("/admin")) {
return adminInner(c, next);
}
return next();
}
export function adminInner(c: Context, next: Next) {
const token = c.req.header("Cookie")?.match(/admin-token=([^;]+)/)?.[1];
if (c.req.path === "/admin/login") {
const key = c.req.query("key");
if (key) {
if (compareToken(key)) {
return c.body(null, 303, {
"Location": "/admin",
"Set-Cookie":
`admin-token=${key}; Path=/; HttpOnly; SameSite=Strict; Max-Age=${cookieAge}`,
});
}
return serveAsset(c, "/admin/login/fail", 403);
}
if (token && compareToken(token)) {
return c.redirect("/admin", 303);
}
if (c.req.method === "POST") {
return serveAsset(c, "/admin/login/fail", 403);
} else {
return serveAsset(c, "/admin/login", 200);
}
}
if (c.req.path === "/admin/logout") {
return c.body(null, 303, {
"Location": "/admin/login",
"Set-Cookie":
`admin-token=; Path=/; HttpOnly; SameSite=Strict; Max-Age=0`,
});
}
if (token && compareToken(token)) {
return next();
}
return c.redirect("/admin/login", 303);
}
export function hasAdminToken(c: Context) {
const token = c.req.header("Cookie")?.match(/admin-token=([^;]+)/)?.[1];
return token && compareToken(token);
}
export async function main() {
const key = crypto.randomUUID();
await fs.writeMkdir(".clover/admin-token.txt", key);
const start = ({
win32: "start",
darwin: "open",
} as Record<string, string>)[process.platform] ?? "xdg-open";
child_process.exec(`${start} http://[::1]:3000/admin/login?key=${key}`);
}
import * as fs from "#sitegen/fs";
import type { Context, Next } from "hono";
import { serveAsset } from "#sitegen/assets";
import * as child_process from "node:child_process";

41
src/backend.ts Normal file
View file

@ -0,0 +1,41 @@
// This is the main file for the backend
const app = new Hono();
const logHttp = scoped("http", { color: "magenta" });
// Middleware
app.use(trimTrailingSlash());
app.use(removeDuplicateSlashes);
app.use(logger((msg) => msg.startsWith("-->") && logHttp(msg.slice(4))));
app.use(admin.middleware);
// Backends
app.route("", require("./q+a/backend.ts").app);
app.route("", require("./file-viewer/backend.tsx").app);
// Asset middleware has least precedence
app.use(assets.middleware);
// Handlers
app.notFound(assets.notFound);
export default app;
async function removeDuplicateSlashes(c: Context, next: Next) {
const path = c.req.path;
if (/\/\/+/.test(path)) {
const normalizedPath = path.replace(/\/\/+/g, "/");
const query = c.req.query();
const queryString = Object.keys(query).length > 0
? "?" + new URLSearchParams(query).toString()
: "";
return c.redirect(normalizedPath + queryString, 301);
}
await next();
}
import { type Context, Hono, type Next } from "#hono";
import { logger } from "hono/logger";
import { trimTrailingSlash } from "hono/trailing-slash";
import * as assets from "#sitegen/assets";
import * as admin from "./admin.ts";
import { scoped } from "@paperclover/console";

View file

@ -0,0 +1,10 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-07-08",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";

View file

@ -0,0 +1,290 @@
export const blog: BlogMeta = {
title: "Marko is the coziest HTML templating language",
desc: "...todo...",
date: "2025-06-13",
draft: true,
};
export const meta = formatBlogMeta(blob);
export * as layout from "@/blog/layout.tsx";
I've been recently playing around [Marko][1], and after adding limited support
for it in my website generator, [sitegen][2], I instantly fell in love with how
minimalistic it is in comparison to JSX, Astro components, and Svelte.
## Introduction
If JSX was taking HTML and shoving its syntax into JavaScript, Marko is shoving
JavaScript into HTML. Attributes are JavaScript expressions.
```marko
<div>
// `input` is like props, but given in the top-level scope
<time datetime=input.date.toISOString()>
// Interpolation with JS template string syntax
${formatTimeNicely(input.date)}
</time>
<div>
<a href=`/users/${input.user.id}`>${input.user.name}</a>
</div>
// Capital letter variables for imported components
<MarkdownContent message=input.message />
// Components also can be auto-imported by lowercase.
// This will look upwards for a `tags/` folder containing
// "custom-footer.marko", similar to how Node.js finds
// package names in all upwards `node_modules` folders.
<custom-footer />
</div>
// ESM `import` / `export` just work as expected.
// I prefer my imports at the end, to highlight the markup.
import MarkdownContent from "./MarkdownContent.marko";
import { formatTimeNicely } from "../date-helpers.ts";
```
Tags with the `value` attribute have a shorthand, which is used by the built-in
`<if>` for conditional rendering.
```marko
// Sugar for <input value="string" />
<input="string" />
// and it composes amazingly to the 'if' built-in
<if=input.user>
<UserProfile=input.user />
</if>
```
Tags can also return values into the scope for use in the template using `/`, such as `<id>` for unique ID generation. This is available to components that `<return=output/>`.
```
<id/uniqueId />
<input id=uniqueId type="checkbox" name="allow_trans_rights" />
<label for=uniqueId>click me!</>
// ^ oh, you can also omit the
// closing tag name if you want.
```
It's important that I started with the two forms of "Tag I/O": `=` for input
and `/` for output. With those building blocks, we introduce local variables
with `const`
```
<const/rendered = markdownToHtml(input.value) />
// This is how you insert raw HTML to the document
<inline-html=rendered />
// It supports all of the cozy destructuring syntax JS has
<const/{ id, name } = user />
```
Unlike JSX, when you pass content within a tag (`input.content` instead of
JSX's `children`), instead of it being a JSX element, it is actually a
function. This means that the `for` tag can render the content multiple times.
```
<ul>
<for from=1 to=10>
// Renders a new random number for each iteration.
<li>${Math.random()}</li>
</>
</ul>
```
Since `content` is a function, it can take arguments. This is done with `|`
```
<h1>my friends</h1>
<ul>
// I tend to omit the closing tag names for the built-in control
// flow tags, but I keep them for HTML tags. It's kinda like how
// in JavaScript you just write `}` to close your `if`s and loops.
//
// Anyways <for> also has 'of'
<for|item| of=user.friends>
<li class="friend">${item.name}</li>
</>
// They support the same syntax JavaScript function params allows,
// so you can have destructuring here too, and multiple params.
<for|{ name }, index| of=user.friends>
// By the way you can also use emmet-style class and ID shorthands.
<li.friend>My #${index + 1} friend is ${name}</li>
</>
</ul>
```
Instead of named slots, Marko has attribute tags. These are more powerful than
slots since they are functions, and can also act as sugar for more complicated
attributes.
```
<Layout title="Welcome">
<@header variant="big">
<h1>the next big thing</h1>
</@header>
<p>body text...</p>
</Layout>
// The `input` variable inside of <Layout /> is:
//
// {
// title: "Welcome",
// header: {
// content: /* function rendering "<h1>the next big thing</h1>" */,
// variant: "big",
// },
// content: /* function rendering "<p>body text</p>" */
// }
```
This layout could be implemented as such:
```marko
<main>
<if=input.header />
<const/{ ...headerProps, content }=input.header />
<header ...headerProps>
// Instead of assigning to a variable with a capital letter,
// template interpolation works on tag names. This can also
// be a string to render the native HTML tag of that kind.
<${content} />
</header>
<hr />
</>
<${input.content} />
</main>
```
The last syntax feature missing is calling a tag with parameters. That is done
just like a regular function call, with '('.
```
<Something(item, index) />
```
In fact, attributes can just be sugar over this syntax; _this technically isn't
true but it's close enough for the example_
```
<SpecialButton type="submit" class="red" />
// is equal to
<SpecialButton({ type: "submit", class: "red" }) />
```
All of the above is about how Marko's syntax works, and how it performs HTML
generation with components. Marko also allows interactive components, but an
explaination of that is beyond the scope of this page, mostly since I have not
used it. A brief example of it, modified from their documentation.
```marko
// Reactive variables with <let/> just work...
<let/basicCounter=0 />
<button onClick() { basicCounter += 1 }>${basicCounter}</button>
// ...but a counter is boring.
<let/todos=[
{ id: 0, text: "Learn Marko" },
{ id: 1, text: "Make a Website" },
]/>
// 'by' is like React JSX's "key" property, but it's optional.
<ul><for|todo, i| of=todos by=(todo => todo.id)>
<li.todo>
// this variable remains stable even if the list
// re-orders, because 'by' was specified.
<let/done=false/>
<label>
<span>${todo.text}</span>
// ':=' creates a two-way reactive binding,
// (it passes a callback for `checkedChanged`)
<input type="checkbox" checked:=done />
</label>
<button
title="delete"
disabled=!done
onClick() {
todos = todos.toSpliced(i, 1);
}
> &times; </button>
</li>
</></ul>
// Form example
<let/nextId=2/>
<form onSubmit(e) {
e.preventDefault();
todos = todos.concat({
id: nextId++,
// HTMLFormElement exposes all its named input
// elements as extra properties on the object.
text: e.target.text.value,
});
// And you can clear it with 'reset()'
e.target.reset();
}>
// We don't 'onChange' like a React loser. The form
// value can be read in the submit event like normal.
<input name="text" placeholder="Another Item">
<button type="submit">Add</button>
</form>
```
## Usage on `paperclover.net`
TODO: document a lot of feedback, how i embedded Marko
My website uses statically generated HTML. That is why I have not needed to use
reactive variables. My generator doesn't even try compiling components
client-side.
Here is the actual component used to render [questions on the clover q+a][/q+a].
```marko
// Renders a `Question` entry including its markdown body.
export interface Input {
question: Question;
admin?: boolean;
}
// 2024-12-31 05:00:00 EST
export const transitionDate = 1735639200000;
<const/{ question, admin } = input />
<const/{ id, date, text } = question/>
<${"e-"}
f=(date > transitionDate ? true : undefined)
id=admin ? `q${id}` : undefined
>
<if=admin>
<a
style="margin-right: 0.5rem"
href=`/admin/q+a/${id}`
>[EDIT]</a>
</>
<a>
<time
datetime=formatQuestionISOTimestamp(date)
>${formatQuestionTimestamp(date)}</time>
</a>
<CloverMarkdown ...{ text } />
</>
// this singleton script will make all the '<time>' tags clickable.
client import "./clickable-links.client.ts";
import type { Question } from "@/q+a/models/Question.ts";
import { formatQuestionTimestamp, formatQuestionISOTimestamp } from "@/q+a/format.ts";
import { CloverMarkdown } from "@/q+a/clover-markdown.tsx";
```
import { type BlogMeta, formatBlogMeta } from '@/blog/helpers.ts';

433
src/file-viewer/backend.tsx Normal file
View file

@ -0,0 +1,433 @@
export const app = new Hono();
interface APIDirectoryList {
path: string;
readme: string | null;
files: APIFile[];
}
interface APIFile {
basename: string;
dir: boolean;
time: number;
size: number;
duration: number | null;
}
function checkCotyledonCookie(c: Context) {
const cookie = c.req.header("Cookie");
if (!cookie) return false;
const cookies = cookie.split("; ").map((x) => x.split("="));
return cookies.some(
(kv) => kv[0].trim() === "cotyledon" && kv[1].trim() === "agree",
);
}
function isCotyledonPath(path: string) {
if (path === "/cotyledon") return true;
const year = path.match(/^\/(\d{4})($|\/)/);
if (!year) return false;
const yearInt = parseInt(year[1]);
if (yearInt < 2025 && yearInt >= 2017) return true;
return false;
}
app.post("/file/cotyledon", async (c) => {
c.res = new Response(null, {
status: 200,
headers: {
"Set-Cookie": "cotyledon=agree; Path=/",
},
});
});
app.get("/file/*", async (c, next) => {
const ua = c.req.header("User-Agent")?.toLowerCase() ?? "";
const lofi = ua.includes("msie") || false;
// Discord ignores 'robots.txt' which violates the license agreement.
if (ua.includes("discordbot")) {
return next();
}
let rawFilePath = c.req.path.slice(5) || "/";
if (rawFilePath.endsWith("$partial")) {
return getPartialPage(c, rawFilePath.slice(0, -"$partial".length));
}
let hasCotyledonCookie = checkCotyledonCookie(c);
if (isCotyledonPath(rawFilePath)) {
if (!hasCotyledonCookie) {
return serveAsset(c, "/file/cotyledon_speedbump", 403);
} else if (rawFilePath === "/cotyledon") {
return serveAsset(c, "/file/cotyledon_enterance", 200);
}
}
while (rawFilePath.length > 1 && rawFilePath.endsWith("/")) {
rawFilePath = rawFilePath.slice(0, -1);
}
const file = MediaFile.getByPath(rawFilePath);
if (!file) {
// perhaps a specific 404 page for media files?
return next();
}
const permissions = FilePermissions.getByPrefix(rawFilePath);
if (permissions !== 0) {
const friendAuthChallenge = requireFriendAuth(c);
if (friendAuthChallenge) return friendAuthChallenge;
}
// File listings
if (file.kind === MediaFileKind.directory) {
if (c.req.header("Accept")?.includes("application/json")) {
const json = {
path: file.path,
files: file.getPublicChildren().map((f) => ({
basename: f.basename,
dir: f.kind === MediaFileKind.directory,
time: f.date.getTime(),
size: f.size,
duration: f.duration ? f.duration : null,
})),
readme: file.contents ? file.contents : null,
} satisfies APIDirectoryList;
return c.json(json);
}
c.res = await renderView(c, `file-viewer/${lofi ? "lofi" : "clofi"}`, {
file,
hasCotyledonCookie,
});
return;
}
// Redirect to directory list for regular files if client accepts HTML
let viewMode = c.req.query("view");
if (c.req.query("dl") !== undefined) {
viewMode = "download";
}
if (
viewMode == undefined &&
c.req.header("Accept")?.includes("text/html") &&
!lofi
) {
prefetchFile(file.path);
c.res = await renderView(c, "file-viewer/clofi", {
file,
hasCotyledonCookie,
});
return;
}
const download = viewMode === "download";
const etag = file.hash;
const filePath = file.path;
const expectedSize = file.size;
let encoding = decideEncoding(c.req.header("Accept-Encoding"));
let sizeHeader = encoding === "raw"
? expectedSize
// Size cannot be known because of compression modes
: undefined;
// Etag
{
const ifNoneMatch = c.req.header("If-None-Match");
if (ifNoneMatch && etagMatches(etag, ifNoneMatch)) {
c.res = new Response(null, {
status: 304,
statusText: "Not Modified",
headers: fileHeaders(file, download, sizeHeader),
});
return;
}
}
// Head
if (c.req.method === "HEAD") {
c.res = new Response(null, {
headers: fileHeaders(file, download, sizeHeader),
});
return;
}
// Prevalidate range requests
let rangeHeader = c.req.header("Range") ?? null;
if (rangeHeader) encoding = "raw";
const ifRangeHeader = c.req.header("If-Range");
if (ifRangeHeader && ifRangeOutdated(file, ifRangeHeader)) {
// > If the condition is not fulfilled, the full resource is
// > sent back with a 200 OK status.
rangeHeader = null;
}
let foundFile;
while (true) {
let second = false;
try {
foundFile = await fetchFile(filePath, encoding);
if (second) {
console.warn(`File ${filePath} has missing compression: ${encoding}`);
}
break;
} catch (error) {
if (encoding !== "raw") {
encoding = "raw";
sizeHeader = file.size;
second = true;
continue;
}
return c.text(
"internal server error: this file is present in the database but could not be fetched",
);
}
}
const [streamOrBuffer, actualEncoding, src] = foundFile;
encoding = actualEncoding;
// Range requests
// https://developer.mozilla.org/en-US/docs/Web/HTTP/Range_requests
// Compression is skipped because it's a confusing, but solvable problem.
// See https://stackoverflow.com/questions/33947562/is-it-possible-to-send-http-response-using-gzip-and-byte-ranges-at-the-same-time
if (rangeHeader) {
const ranges = parseRange(rangeHeader, file.size);
// TODO: multiple ranges
if (ranges && ranges.length === 1) {
return (c.res = handleRanges(ranges, file, streamOrBuffer, download));
}
}
// Respond in a streaming fashion
c.res = new Response(streamOrBuffer, {
headers: {
...fileHeaders(file, download, sizeHeader),
...(encoding !== "raw" && {
"Content-Encoding": encoding,
}),
"X-Cache": src,
},
});
});
app.get("/canvas/:script", async (c, next) => {
const script = c.req.param("script");
if (!hasAsset(`/js/canvas/${script}.js`)) {
return next();
}
return renderView(c, "file-viewer/canvas", {
script,
});
});
function decideEncoding(encodings: string | undefined): CompressionFormat {
if (encodings?.includes("zstd")) return "zstd";
if (encodings?.includes("gzip")) return "gzip";
return "raw";
}
function fileHeaders(
file: MediaFile,
download: boolean,
size: number | undefined = file.size,
) {
return {
Vary: "Accept-Encoding, Accept",
"Content-Type": contentTypeFor(file.path),
"Content-Length": size.toString(),
ETag: file.hash,
"Last-Modified": file.date.toUTCString(),
...(download && {
"Content-Disposition": `attachment; filename="${file.basename}"`,
}),
};
}
function ifRangeOutdated(file: MediaFile, ifRangeHeader: string) {
// etag
if (ifRangeHeader[0] === '"') {
return ifRangeHeader.slice(1, -1) !== file.hash;
}
// date
return new Date(ifRangeHeader) < file.date;
}
/** The end is inclusive */
type Ranges = Array<[start: number, end: number]>;
function parseRange(rangeHeader: string, fileSize: number): Ranges | null {
const [unit, ranges] = rangeHeader.split("=");
if (unit !== "bytes") return null;
const result: Array<[start: number, end: number]> = [];
const rangeParts = ranges.split(",");
for (const range of rangeParts) {
const split = range.split("-");
if (split.length !== 2) return null;
const [start, end] = split;
if (start === "" && end === "") return null;
const parsedRange: [number, number] = [
start === "" ? fileSize - +end : +start,
end === "" ? fileSize - 1 : +end,
];
result.push(parsedRange);
}
// Validate that ranges do not intersect
result.sort((a, b) => a[0] - b[0]);
for (let i = 1; i < result.length; i++) {
if (result[i][0] <= result[i - 1][1]) {
return null;
}
}
return result;
}
function handleRanges(
ranges: Ranges,
file: MediaFile,
streamOrBuffer: ReadableStream | Buffer,
download: boolean,
): Response {
// TODO: multiple ranges
const rangeSize = ranges.reduce((a, b) => a + (b[1] - b[0] + 1), 0);
const rangeBody = streamOrBuffer instanceof ReadableStream
? applySingleRangeToStream(streamOrBuffer, ranges)
: applyRangesToBuffer(streamOrBuffer, ranges, rangeSize);
return new Response(rangeBody, {
status: 206,
headers: {
...fileHeaders(file, download, rangeSize),
"Content-Range": `bytes ${ranges[0][0]}-${ranges[0][1]}/${file.size}`,
},
});
}
function applyRangesToBuffer(
buffer: Buffer,
ranges: Ranges,
rangeSize: number,
): Uint8Array {
const result = new Uint8Array(rangeSize);
let offset = 0;
for (const [start, end] of ranges) {
result.set(buffer.subarray(start, end + 1), offset);
offset += end - start + 1;
}
return result;
}
function applySingleRangeToStream(
stream: ReadableStream,
ranges: Ranges,
): ReadableStream {
let reader: ReadableStreamDefaultReader<Uint8Array>;
let position = 0;
const [start, end] = ranges[0];
return new ReadableStream({
async start(controller) {
reader = stream.getReader();
try {
while (position <= end) {
const { done, value } = await reader.read();
if (done) {
controller.close();
return;
}
const buffer = new Uint8Array(value);
const bufferStart = position;
const bufferEnd = position + buffer.length - 1;
position += buffer.length;
if (bufferEnd < start) {
continue;
}
if (bufferStart > end) {
break;
}
const sendStart = Math.max(0, start - bufferStart);
const sendEnd = Math.min(buffer.length - 1, end - bufferStart);
if (sendStart <= sendEnd) {
controller.enqueue(buffer.slice(sendStart, sendEnd + 1));
}
}
controller.close();
} catch (error) {
controller.error(error);
} finally {
reader.releaseLock();
}
},
cancel() {
reader?.releaseLock();
},
});
}
function getPartialPage(c: Context, rawFilePath: string) {
if (isCotyledonPath(rawFilePath)) {
if (!checkCotyledonCookie(c)) {
let root = Speedbump();
// Remove the root element, it's created client side!
root = root[2].children as ssr.Element;
const html = ssr.ssrSync(root).text;
c.header("X-Cotyledon", "true");
return c.html(html);
}
}
const file = MediaFile.getByPath(rawFilePath);
const permissions = FilePermissions.getByPrefix(rawFilePath);
if (permissions !== 0) {
const friendAuthChallenge = requireFriendAuth(c);
if (friendAuthChallenge) return friendAuthChallenge;
}
if (rawFilePath.endsWith("/")) {
rawFilePath = rawFilePath.slice(0, -1);
}
if (!file) {
return c.json({ error: "File not found" }, 404);
}
let root = MediaPanel({
file,
isLast: true,
activeFilename: null,
hasCotyledonCookie: rawFilePath === "" && checkCotyledonCookie(c),
});
// Remove the root element, it's created client side!
root = root[2].children as ssr.Element;
const html = ssr.ssrSync(root).text;
return c.html(html);
}
import { type Context, Hono } from "hono";
import * as ssr from "#ssr";
import { etagMatches, hasAsset, serveAsset } from "#sitegen/assets";
import { renderView } from "#sitegen/view";
import { contentTypeFor } from "#sitegen/mime";
import { requireFriendAuth } from "@/friend-auth.ts";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import { FilePermissions } from "@/file-viewer/models/FilePermissions.ts";
import { MediaPanel } from "@/file-viewer/views/clofi.tsx";
import { Speedbump } from "@/file-viewer/cotyledon.tsx";
import {
type CompressionFormat,
fetchFile,
prefetchFile,
} from "@/file-viewer/cache.ts";

View file

@ -0,0 +1,785 @@
// The file scanner incrementally updates an sqlite database with file
// stats. Additionally, it runs "processors" on files, which precompute
// expensive data such as running `ffprobe` on all media to get the
// duration.
//
// Processors are also used to derive compressed and optimized assets,
// which is how automatic JXL / AV1 encoding is done. Derived files are
// uploaded to the clover NAS to be pulled by VPS instances for hosting.
//
// This is the third iteration of the scanner, hence its name "scan3";
// Remember that any software you want to be maintainable and high
// quality cannot be written with AI.
const root = path.resolve("/Volumes/clover/Published");
const workDir = path.resolve(".clover/derived");
export async function main() {
const start = performance.now();
const timerSpinner = new Spinner({
text: () =>
`paper clover's scan3 [${
((performance.now() - start) / 1000).toFixed(1)
}s]`,
fps: 10,
});
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
// Read a directory or file stat and queue up changed files.
using qList = new async.Queue({
name: "Discover Tree",
async fn(absPath: string) {
const stat = await fs.stat(absPath);
const publicPath = toPublicPath(absPath);
const mediaFile = MediaFile.getByPath(publicPath);
if (stat.isDirectory()) {
const items = await fs.readdir(absPath);
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
if (mediaFile) {
const deleted = mediaFile.getChildren()
.filter((child) => !items.includes(child.basename))
.flatMap((child) =>
child.kind === MediaFileKind.directory
? child.getRecursiveFileChildren()
: child
);
qMeta.addMany(deleted.map((mediaFile) => ({
absPath: path.join(root, mediaFile.path),
publicPath: mediaFile.path,
stat: null,
mediaFile,
})));
}
return;
}
// All processes must be performed again if there is no file.
if (
!mediaFile ||
stat.size !== mediaFile.size ||
stat.mtime.getTime() !== mediaFile.date.getTime()
) {
qMeta.add({ absPath, publicPath, stat, mediaFile });
return;
}
// If the scanners changed, it may mean more processes should be run.
queueProcessors({ absPath, stat, mediaFile });
},
maxJobs: 24,
});
using qMeta = new async.Queue({
name: "Update Metadata",
async fn({ absPath, publicPath, stat, mediaFile }: UpdateMetadataJob) {
if (!stat) {
// File was deleted.
await runUndoProcessors(UNWRAP(mediaFile));
return;
}
// TODO: run scrubLocationMetadata first
const hash = await new Promise<string>((resolve, reject) => {
const reader = fs.createReadStream(absPath);
reader.on("error", reject);
const hasher = crypto.createHash("sha1").setEncoding("hex");
hasher.on("error", reject);
hasher.on("readable", () => resolve(hasher.read()));
reader.pipe(hasher);
});
let date = stat.mtime;
if (
mediaFile &&
mediaFile.date.getTime() < stat.mtime.getTime() &&
(Date.now() - stat.mtime.getTime()) < monthMilliseconds
) {
date = mediaFile.date;
console.warn(
`M-time on ${publicPath} was likely corrupted. ${
formatDate(mediaFile.date)
} -> ${formatDate(stat.mtime)}`,
);
}
mediaFile = MediaFile.createFile({
path: publicPath,
date,
hash,
size: stat.size,
duration: mediaFile?.duration ?? 0,
dimensions: mediaFile?.dimensions ?? "",
contents: mediaFile?.contents ?? "",
});
await queueProcessors({ absPath, stat, mediaFile });
},
getItemText: (job) =>
job.publicPath.slice(1) + (job.stat ? "" : " (deleted)"),
maxJobs: 10,
});
using qProcess = new async.Queue({
name: "Process Contents",
async fn(
{ absPath, stat, mediaFile, processor, index, after }: ProcessJob,
spin,
) {
await processor.run({ absPath, stat, mediaFile, spin });
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
for (const dependantJob of after) {
ASSERT(dependantJob.needs > 0, `dependantJob.needs > 0, ${dependantJob.needs}`);
dependantJob.needs -= 1;
if (dependantJob.needs == 0) qProcess.add(dependantJob);
}
},
getItemText: ({ mediaFile, processor }) =>
`${mediaFile.path.slice(1)} - ${processor.name}`,
maxJobs: 4,
});
function decodeProcessors(input: string) {
return input
.split(";")
.filter(Boolean)
.map(([a, b, c]) => ({
id: a,
hash: (b.charCodeAt(0) << 8) + c.charCodeAt(0),
}));
}
async function queueProcessors(
{ absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">,
) {
const ext = mediaFile.extensionNonEmpty.toLowerCase();
let possible = processors.filter((p) =>
p.include ? p.include.has(ext) : !p.exclude?.has(ext)
);
if (possible.length === 0) return;
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
ASSERT(hash <= 0xFFFF, `${hash.toString(16)} has no bits above 16 set`);
let processed = mediaFile.processed;
// If the hash has changed, migrate the bitfield over.
// This also runs when the processor hash is in it's initial 0 state.
const order = decodeProcessors(mediaFile.processors);
if ((processed & 0xFFFF) !== hash) {
const previous = order.filter((_, i) =>
(processed & (1 << (16 + i))) !== 0
);
processed = hash;
for (const { id, hash } of previous) {
const p = processors.find((p) => p.id === id);
if (!p) continue;
const index = possible.indexOf(p);
if (index !== -1 && p.hash === hash) {
processed |= 1 << (16 + index);
} else {
if (p.undo) await p.undo(mediaFile);
}
}
mediaFile.setProcessors(
processed,
possible.map((p) =>
p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF)
).join(";"),
);
} else {
possible = order.map(({ id }) =>
UNWRAP(possible.find((p) => p.id === id))
);
}
// Queue needed processors.
const jobs: ProcessJob[] = [];
for (let i = 0, { length } = possible; i < length; i += 1) {
if ((processed & (1 << (16 + i))) === 0) {
const job: ProcessJob = {
absPath,
stat,
mediaFile,
processor: possible[i],
index: i,
after: [],
needs: possible[i].depends.length,
};
jobs.push(job);
if (job.needs === 0) qProcess.add(job);
}
}
for (const job of jobs) {
for (const dependId of job.processor.depends) {
const dependJob = jobs.find((j) => j.processor.id === dependId);
if (dependJob) {
dependJob.after.push(job);
} else {
ASSERT(job.needs > 0, `job.needs !== 0, ${job.needs}`);
job.needs -= 1;
if (job.needs === 0) qProcess.add(job);
}
}
}
}
async function runUndoProcessors(mediaFile: MediaFile) {
const { processed } = mediaFile;
const previous = decodeProcessors(mediaFile.processors)
.filter((_, i) => (processed & (1 << (16 + i))) !== 0);
for (const { id } of previous) {
const p = processors.find((p) => p.id === id);
if (!p) continue;
if (p.undo) {
await p.undo(mediaFile);
}
}
mediaFile.delete();
}
// Add the root & recursively iterate!
qList.add(root);
await qList.done();
await qMeta.done();
await qProcess.done();
// Update directory metadata
const dirs = MediaFile.getDirectoriesToReindex()
.sort((a, b) => b.path.length - a.path.length);
for (const dir of dirs) {
const children = dir.getChildren();
// readme.txt
const readmeContent = children.find((x) =>
x.basename === "readme.txt"
)?.contents ?? "";
// dirsort
let dirsort: string[] | null = null;
const dirSortRaw =
children.find((x) => x.basename === ".dirsort")?.contents ?? "";
if (dirSortRaw) {
dirsort = dirSortRaw.split("\n")
.map((x) => x.trim())
.filter(Boolean);
}
// Permissions
if (children.some((x) => x.basename === ".friends")) {
FilePermissions.setPermissions(dir.path, 1);
} else {
FilePermissions.setPermissions(dir.path, 0);
}
// Recursive stats.
let totalSize = 0;
let newestDate = new Date(0);
let allHashes = "";
for (const child of children) {
totalSize += child.size;
allHashes += child.hash;
if (child.basename !== "/readme.txt" && child.date > newestDate) {
newestDate = child.date;
}
}
const dirHash = crypto.createHash("sha1")
.update(dir.path + allHashes)
.digest("hex");
MediaFile.markDirectoryProcessed({
id: dir.id,
timestamp: newestDate,
contents: readmeContent,
size: totalSize,
hash: dirHash,
dirsort,
});
}
// Sync to remote
if ((await fs.readdir(workDir)).length > 0) {
await rsync.spawn({
args: [
"--links",
"--recursive",
"--times",
"--partial",
"--progress",
"--remove-source-files",
"--delay-updates",
workDir + "/",
"clo@zenith:/mnt/storage1/clover/Documents/Config/clover_file/derived/",
],
title: "Uploading Derived Assets",
cwd: process.cwd(),
});
await fs.removeEmptyDirectories(workDir);
} else {
console.info("No new derived assets");
}
console.info(
"Updated file viewer index in \x1b[1m" +
((performance.now() - start) / 1000).toFixed(1) + "s\x1b[0m",
);
MediaFile.db.prepare("VACUUM").run();
const { duration, count } = MediaFile.db.prepare<
[],
{ count: number; duration: number }
>(`
select
count(*) as count,
sum(duration) as duration
from media_files
`).getNonNull();
console.info();
console.info(
"Global Stats:\n" +
`- File Count: \x1b[1m${count}\x1b[0m\n` +
`- Canonical Size: \x1b[1m${formatSize(MediaFile.getByPath("/")!.size)}\x1b[0m\n` +
`- Media Duration: \x1b[1m${formatDurationLong(duration)}\x1b[0m\n`,
);
}
interface Process {
name: string;
enable?: boolean;
include?: Set<string>;
exclude?: Set<string>;
depends?: string[];
version?: number;
/* Perform an action. */
run(args: ProcessFileArgs): Promise<void>;
/* Should detect if `run` was never even run before before undoing state */
undo?(mediaFile: MediaFile): Promise<void>;
}
const execFileRaw = util.promisify(child_process.execFile);
const execFile: typeof execFileRaw = ((
...args: Parameters<typeof execFileRaw>
) =>
execFileRaw(...args).catch((e: any) => {
if (e?.message?.startsWith?.("Command failed")) {
if (e.code > (2 ** 31)) e.code |= 0;
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
}
throw e;
})) as any;
const ffprobeBin = testProgram("ffprobe", "--help");
const ffmpegBin = testProgram("ffmpeg", "--help");
const ffmpegOptions = [
"-hide_banner",
"-loglevel",
"warning",
];
const procDuration: Process = {
name: "calculate duration",
enable: ffprobeBin !== null,
include: rules.extsDuration,
async run({ absPath, mediaFile }) {
const { stdout } = await execFile(ffprobeBin!, [
"-v",
"error",
"-show_entries",
"format=duration",
"-of",
"default=noprint_wrappers=1:nokey=1",
absPath,
]);
const duration = parseFloat(stdout.trim());
if (Number.isNaN(duration)) {
throw new Error("Could not extract duration from " + stdout);
}
mediaFile.setDuration(Math.ceil(duration));
},
};
// NOTE: Never re-order the processors. Add new ones at the end.
const procDimensions: Process = {
name: "calculate dimensions",
enable: ffprobeBin != null,
include: rules.extsDimensions,
async run({ absPath, mediaFile }) {
const ext = path.extname(absPath);
let dimensions;
if (ext === ".svg") {
// Parse out of text data
const content = await fs.readFile(absPath, "utf8");
const widthMatch = content.match(/width="(\d+)"/);
const heightMatch = content.match(/height="(\d+)"/);
if (widthMatch && heightMatch) {
dimensions = `${widthMatch[1]}x${heightMatch[1]}`;
}
} else {
// Use ffprobe to observe streams
const { stdout } = await execFile("ffprobe", [
"-v",
"error",
"-select_streams",
"v:0",
"-show_entries",
"stream=width,height",
"-of",
"csv=s=x:p=0",
absPath,
]);
if (stdout.includes("x")) {
dimensions = stdout.trim();
}
}
mediaFile.setDimensions(dimensions ?? "");
},
};
const procLoadTextContents: Process = {
name: "load text content",
include: rules.extsReadContents,
async run({ absPath, mediaFile, stat }) {
if (stat.size > 1_000_000) return;
const text = await fs.readFile(absPath, "utf-8");
console.log({ text });
mediaFile.setContents(text);
},
};
const procHighlightCode: Process = {
name: "highlight source code",
include: new Set(rules.extsCode.keys()),
async run({ absPath, mediaFile, stat }) {
const language = UNWRAP(
rules.extsCode.get(path.extname(absPath).toLowerCase()),
);
// An issue is that .ts is an overloaded extension, shared between
// 'transport stream' and 'typescript'.
//
// Filter used here is:
// - more than 1mb
// - invalid UTF-8
if (stat.size > 1_000_000) return;
let code;
const buf = await fs.readFile(absPath);
try {
code = new TextDecoder("utf-8", { fatal: true }).decode(buf);
} catch (error) {
mediaFile.setContents("");
return;
}
const content = await highlight.highlightCode(code, language);
mediaFile.setContents(content);
},
};
const procImageSubsets: Process = {
name: "encode image subsets",
include: rules.extsImage,
depends: ["calculate dimensions"],
version: 2,
async run({ absPath, mediaFile, spin }) {
const { width, height } = UNWRAP(mediaFile.parseDimensions());
const targetSizes = transcodeRules.imageSizes.filter((w) => w < width);
const baseStatus = spin.text;
using stack = new DisposableStack();
for (const size of targetSizes) {
const { w, h } = resizeDimensions(width, height, size);
for (const { ext, args } of transcodeRules.imagePresets) {
spin.text = baseStatus +
` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
stack.use(
await produceAsset(
`${mediaFile.hash}/${size}${ext}`,
async (out) => {
await fs.mkdir(path.dirname(out));
await fs.rm(out, { force: true });
await execFile(ffmpegBin!, [
...ffmpegOptions,
"-i",
absPath,
"-vf",
`scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`,
...args,
out,
]);
return [out];
},
),
);
}
}
stack.move();
},
async undo(mediaFile) {
const { width } = UNWRAP(mediaFile.parseDimensions());
const targetSizes = transcodeRules.imageSizes.filter((w) => w < width);
for (const size of targetSizes) {
for (const { ext } of transcodeRules.imagePresets) {
unproduceAsset(`${mediaFile.hash}/${size}${ext}`);
}
}
},
};
const qualityMap: Record<string, string> = {
u: "ultra-high",
h: "high",
m: "medium",
l: "low",
d: "data-saving",
};
const procVideos = transcodeRules.videoFormats.map<Process>((preset) => ({
name: `encode ${preset.codec} ${UNWRAP(qualityMap[preset.id[1]])}`,
include: rules.extsVideo,
enable: ffmpegBin != null,
async run({ absPath, mediaFile, spin }) {
if ((mediaFile.duration ?? 0) < 10) return;
await produceAsset(`${mediaFile.hash}/${preset.id}`, async (base) => {
base = path.dirname(base);
await fs.mkdir(base);
let inputArgs = ["-i", absPath];
try {
const config = await fs.readJson<any>(
path.join(
path.dirname(absPath),
path.basename(absPath, path.extname(absPath)) + ".json",
),
);
if (config.encoder && typeof config.encoder.videoSrc === "string") {
const { videoSrc, audioSrc, rate } = config.encoder;
inputArgs = [
...rate ? ["-r", String(rate)] : [],
"-i",
videoSrc,
...audioSrc ? ["-i", audioSrc] : [],
];
}
} catch (err: any) {
if (err?.code !== "ENOENT") throw err;
}
const args = transcodeRules.getVideoArgs(
preset,
base,
inputArgs,
);
try {
const fakeProgress = new Progress({ text: spin.text, spinner: null });
fakeProgress.stop();
spin.format = (now: number) => fakeProgress.format(now);
// @ts-expect-error
fakeProgress.redraw = () => spin.redraw();
await ffmpeg.spawn({
ffmpeg: ffmpegBin!,
title: fakeProgress.text,
progress: fakeProgress,
args,
cwd: base,
});
return await collectFiles();
} catch (err) {
for (const file of await collectFiles()) {
try {
fs.rm(file);
} catch {}
}
throw err;
}
async function collectFiles(): Promise<string[]> {
return (await fs.readdir(base))
.filter((basename) => basename.startsWith(preset.id))
.map((basename) => path.join(base, basename));
}
});
},
}));
const procCompression = [
{ name: "gzip", fn: () => zlib.createGzip({ level: 9 }) },
{ name: "zstd", fn: () => zlib.createZstdCompress() },
].map(({ name, fn }) => ({
name: `compress ${name}`,
exclude: rules.extsPreCompressed,
async run({ absPath, mediaFile }) {
if ((mediaFile.size ?? 0) < 10) return;
await produceAsset(`${mediaFile.hash}/${name}`, async (base) => {
fs.mkdirSync(path.dirname(base));
await stream.promises.pipeline(
fs.createReadStream(absPath),
fn(),
fs.createWriteStream(base),
);
return [base];
});
},
} satisfies Process as Process));
const processors = [
procDimensions,
procDuration,
procLoadTextContents,
procHighlightCode,
procImageSubsets,
...procVideos,
...procCompression,
]
.map((process, id, all) => {
const strIndex = (id: number) =>
String.fromCharCode("a".charCodeAt(0) + id);
return {
...process as Process,
id: strIndex(id),
// Create a unique key.
hash: new Uint16Array(
crypto.createHash("sha1")
.update(
process.run.toString() +
(process.version ? String(process.version) : ""),
)
.digest().buffer,
).reduce((a, b) => a ^ b),
depends: (process.depends ?? []).map((depend) => {
const index = all.findIndex((p) => p.name === depend);
if (index === -1) throw new Error(`Cannot find depend '${depend}'`);
if (index === id) throw new Error(`Cannot depend on self: '${depend}'`);
return strIndex(index);
}),
};
});
function resizeDimensions(w: number, h: number, desiredWidth: number) {
ASSERT(desiredWidth < w, `${desiredWidth} < ${w}`);
return { w: desiredWidth, h: Math.floor((h / w) * desiredWidth) };
}
async function produceAsset(
key: string,
builder: (prefix: string) => Promise<string[]>,
) {
const asset = AssetRef.putOrIncrement(key);
try {
if (asset.refs === 1) {
const paths = await builder(path.join(workDir, key));
asset.addFiles(
paths.map((file) =>
path.relative(workDir, file)
.replaceAll("\\", "/")
),
);
}
return {
[Symbol.dispose]: () => asset.unref(),
};
} catch (err: any) {
if (err && typeof err === "object") err.assetKey = key;
asset.unref();
throw err;
}
}
async function unproduceAsset(key: string) {
const ref = AssetRef.get(key);
if (ref) {
ref.unref();
console.log(`unref ${key}`);
// TODO: remove associated files from target
}
}
interface UpdateMetadataJob {
absPath: string;
publicPath: string;
stat: fs.Stats | null;
mediaFile: MediaFile | null;
}
interface ProcessFileArgs {
absPath: string;
stat: fs.Stats;
mediaFile: MediaFile;
spin: Spinner;
}
interface ProcessJob {
absPath: string;
stat: fs.Stats;
mediaFile: MediaFile;
processor: typeof processors[0];
index: number;
after: ProcessJob[];
needs: number;
}
export function skipBasename(basename: string): boolean {
// dot files must be incrementally tracked
if (basename === ".dirsort") return true;
if (basename === ".friends") return true;
return (
basename.startsWith(".") ||
basename.startsWith("._") ||
basename.startsWith(".tmp") ||
basename === ".DS_Store" ||
basename.toLowerCase() === "thumbs.db" ||
basename.toLowerCase() === "desktop.ini"
);
}
export function toPublicPath(absPath: string) {
ASSERT(path.isAbsolute(absPath), "non-absolute " + absPath);
if (absPath === root) return "/";
return "/" + path.relative(root, absPath).replaceAll("\\", "/");
}
export function testProgram(name: string, helpArgument: string) {
try {
child_process.spawnSync(name, [helpArgument]);
return name;
} catch (err) {
console.warn(`Missing or corrupt executable '${name}'`);
}
return null;
}
const monthMilliseconds = 30 * 24 * 60 * 60 * 1000;
import { Progress } from "@paperclover/console/Progress";
import { Spinner } from "@paperclover/console/Spinner";
import * as async from "#sitegen/async";
import * as fs from "#sitegen/fs";
import * as path from "node:path";
import * as zlib from "node:zlib";
import * as child_process from "node:child_process";
import * as util from "node:util";
import * as crypto from "node:crypto";
import * as stream from "node:stream";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import { AssetRef } from "@/file-viewer/models/AssetRef.ts";
import { FilePermissions } from "@/file-viewer/models/FilePermissions.ts";
import {
formatDate,
formatDurationLong,
formatSize,
} from "@/file-viewer/format.ts";
import * as rules from "@/file-viewer/rules.ts";
import * as highlight from "@/file-viewer/highlight.ts";
import * as ffmpeg from "@/file-viewer/ffmpeg.ts";
import * as rsync from "@/file-viewer/rsync.ts";
import * as transcodeRules from "@/file-viewer/transcode-rules.ts";

418
src/file-viewer/cache.ts Normal file
View file

@ -0,0 +1,418 @@
import { Agent, get } from "node:https";
import * as fs from "node:fs";
import * as path from "node:path";
import { Buffer } from "node:buffer";
import type { ClientRequest } from "node:http";
import LRUCache from "lru-cache";
import { open } from "node:fs/promises";
import { createHash } from "node:crypto";
import { scoped } from "@paperclover/console";
import { escapeUri } from "./format.ts";
declare const Deno: any;
const sourceOfTruth = "https://nas.paperclover.net:43250";
const caCert = fs.readFileSync("src/file-viewer/cert.pem");
const diskCacheRoot = path.join(import.meta.dirname, "../.clover/filecache/");
const diskCacheMaxSize = 14 * 1024 * 1024 * 1024; // 14GB
const ramCacheMaxSize = 1 * 1024 * 1024 * 1024; // 1.5GB
const loadInProgress = new Map<
string,
Promise<{ stream: ReadableStream }> | { stream: ReadableStream }
>();
// Disk cache serializes the access times
const diskCacheState: Record<string, [size: number, lastAccess: number]> =
loadDiskCacheState();
const diskCache = new LRUCache<string, number>({
maxSize: diskCacheMaxSize,
ttl: 0,
sizeCalculation: (value) => value,
dispose: (_, key) => {
delete diskCacheState[key];
},
onInsert: (size, key) => {
diskCacheState[key] = [size, Date.now()];
},
});
const ramCache = new LRUCache<string, Buffer>({
maxSize: ramCacheMaxSize,
ttl: 0,
sizeCalculation: (value) => value.byteLength,
});
let diskCacheFlush: NodeJS.Timeout | undefined;
{
// Initialize the disk cache by validating all files exist, and then
// inserting them in last to start order. State is repaired pessimistically.
const toDelete = new Set(Object.keys(diskCacheState));
fs.mkdirSync(diskCacheRoot, { recursive: true });
for (
const file of fs.readdirSync(diskCacheRoot, {
recursive: true,
encoding: "utf-8",
})
) {
const key = file.split("/").pop()!;
if (key.length !== 40) continue;
const entry = diskCacheState[key];
if (!entry) {
fs.rmSync(path.join(diskCacheRoot, file), {
recursive: true,
force: true,
});
delete diskCacheState[key];
continue;
}
toDelete.delete(key);
}
for (const key of toDelete) {
delete diskCacheState[key];
}
saveDiskCacheState();
const sorted = Object.keys(diskCacheState).sort((a, b) =>
diskCacheState[b][1] - diskCacheState[a][1]
);
for (const key of sorted) {
diskCache.set(key, diskCacheState[key][0]);
}
}
export type CacheSource = "ram" | "disk" | "miss" | "lan" | "flight";
export type CompressionFormat = "gzip" | "zstd" | "raw";
const compressionFormatMap = {
gzip: "gz",
zstd: "zstd",
raw: "file",
} as const;
const log = scoped("file_cache");
const lanMount = "/Volumes/clover/Published";
const hasLanMount = fs.existsSync(lanMount);
/**
* Fetches a file with the given compression format.
* Uncompressed files are never persisted to disk.
*
* Returns a promise to either:
* - Buffer: the data is from RAM cache
* - ReadableStream: the data is being streamed in from disk/server
*
* Additionally, returns a string indicating the source of the data, for debugging.
*
* Callers must be able to consume both output types.
*/
export async function fetchFile(
pathname: string,
format: CompressionFormat = "raw",
): Promise<
[Buffer | ReadableStream, encoding: CompressionFormat, src: CacheSource]
> {
// 1. Ram cache
const cacheKey = hashKey(`${pathname}:${format}`);
const ramCacheHit = ramCache.get(cacheKey);
if (ramCacheHit) {
log(`ram hit: ${format}${pathname}`);
return [ramCacheHit, format, "ram"];
}
// 2. Tee an existing loading stream.
const inProgress = loadInProgress.get(cacheKey);
if (inProgress) {
const stream = await inProgress;
const [stream1, stream2] = stream.stream.tee();
loadInProgress.set(cacheKey, { stream: stream2 });
log(`in-flight copy: ${format}${pathname}`);
return [stream1, format, "flight"];
}
// 3. Disk cache + Load into ram cache.
if (format !== "raw") {
const diskCacheHit = diskCache.get(cacheKey);
if (diskCacheHit) {
diskCacheState[cacheKey] = [diskCacheHit, Date.now()];
saveDiskCacheStateLater();
log(`disk hit: ${format}/${pathname}`);
return [
startInProgress(
cacheKey,
new ReadableStream({
start: async (controller) => {
const stream = fs.createReadStream(
path.join(diskCacheRoot, cacheKey),
);
const chunks: Buffer[] = [];
stream.on("data", (chunk) => {
controller.enqueue(chunk);
chunks.push(chunk as Buffer);
});
stream.on("end", () => {
controller.close();
ramCache.set(cacheKey, Buffer.concat(chunks));
finishInProgress(cacheKey);
});
stream.on("error", (error) => {
controller.error(error);
});
},
}),
),
format,
"disk",
];
}
}
// 4. Lan Mount (access files that prod may not have)
if (hasLanMount) {
log(`lan hit: ${format}/${pathname}`);
return [
startInProgress(
cacheKey,
new ReadableStream({
start: async (controller) => {
const stream = fs.createReadStream(
path.join(lanMount, pathname),
);
const chunks: Buffer[] = [];
stream.on("data", (chunk) => {
controller.enqueue(chunk);
chunks.push(chunk as Buffer);
});
stream.on("end", () => {
controller.close();
ramCache.set(cacheKey, Buffer.concat(chunks));
finishInProgress(cacheKey);
});
stream.on("error", (error) => {
controller.error(error);
});
},
}),
),
"raw",
"lan",
];
}
// 4. Fetch from server
const url = `${compressionFormatMap[format]}${escapeUri(pathname)}`;
log(`miss: ${format}${pathname}`);
const response = await startInProgress(cacheKey, fetchFileUncached(url));
const [stream1, stream2] = response.tee();
handleDownload(cacheKey, format, stream2);
return [stream1, format, "miss"];
}
export async function prefetchFile(
pathname: string,
format: CompressionFormat = "zstd",
) {
const cacheKey = hashKey(`${pathname}:${format}`);
const ramCacheHit = ramCache.get(cacheKey);
if (ramCacheHit) {
return;
}
if (hasLanMount) return;
const url = `${compressionFormatMap[format]}${pathname}`;
log(`prefetch: ${format}${pathname}`);
const stream2 = await startInProgress(cacheKey, fetchFileUncached(url));
handleDownload(cacheKey, format, stream2);
}
async function handleDownload(
cacheKey: string,
format: CompressionFormat,
stream2: ReadableStream,
) {
let chunks: Buffer[] = [];
if (format !== "raw") {
const file = await open(path.join(diskCacheRoot, cacheKey), "w");
try {
for await (const chunk of stream2) {
await file.write(chunk);
chunks.push(chunk);
}
} finally {
file.close();
}
} else {
for await (const chunk of stream2) {
chunks.push(chunk);
}
}
const final = Buffer.concat(chunks);
chunks.length = 0;
ramCache.set(cacheKey, final);
if (format !== "raw") {
diskCache.set(cacheKey, final.byteLength);
}
finishInProgress(cacheKey);
}
function hashKey(key: string): string {
return createHash("sha1").update(key).digest("hex");
}
function startInProgress<T extends Promise<ReadableStream> | ReadableStream>(
cacheKey: string,
promise: T,
): T {
if (promise instanceof Promise) {
let resolve2: (stream: { stream: ReadableStream }) => void;
let reject2: (error: Error) => void;
const stream2Promise = new Promise<{ stream: ReadableStream }>(
(resolve, reject) => {
resolve2 = resolve;
reject2 = reject;
},
);
const stream1Promise = new Promise<ReadableStream>((resolve, reject) => {
promise.then((stream) => {
const [stream1, stream2] = stream.tee();
const stream2Obj = { stream: stream2 };
resolve2(stream2Obj);
loadInProgress.set(cacheKey, stream2Obj);
resolve(stream1);
}, reject);
});
loadInProgress.set(cacheKey, stream2Promise);
return stream1Promise as T;
} else {
const [stream1, stream2] = promise.tee();
loadInProgress.set(cacheKey, { stream: stream2 });
return stream1 as T;
}
}
function loadDiskCacheState(): Record<
string,
[size: number, lastAccess: number]
> {
try {
const state = JSON.parse(
fs.readFileSync(path.join(diskCacheRoot, "state.json"), "utf-8"),
);
return state;
} catch (error) {
return {};
}
}
function saveDiskCacheStateLater() {
if (diskCacheFlush) {
return;
}
diskCacheFlush = setTimeout(() => {
saveDiskCacheState();
}, 60_000) as NodeJS.Timeout;
if (diskCacheFlush.unref) {
diskCacheFlush.unref();
}
}
process.on("exit", () => {
saveDiskCacheState();
});
function saveDiskCacheState() {
fs.writeFileSync(
path.join(diskCacheRoot, "state.json"),
JSON.stringify(diskCacheState),
);
}
function finishInProgress(cacheKey: string) {
loadInProgress.delete(cacheKey);
}
// Self signed certificate must be trusted to be able to request the above URL.
//
// Unfortunately, Bun and Deno are both not node.js compatible, so those two
// runtimes need fallback implementations. The fallback implementations calls
// fetch with the `agent` value as the RequestInit. Since `fetch` decompresses
// the body for you, it must be disabled.
const agent: any = typeof Bun !== "undefined"
? {
// Bun has two non-standard fetch extensions
decompress: false,
tls: {
ca: caCert,
},
}
// TODO: https://github.com/denoland/deno/issues/12291
// : typeof Deno !== "undefined"
// ? {
// // Deno configures through the non-standard `client` extension
// client: Deno.createHttpClient({
// caCerts: [caCert.toString()],
// }),
// }
// Node.js supports node:http
: new Agent({
ca: caCert,
});
function fetchFileNode(pathname: string): Promise<ReadableStream> {
return new Promise((resolve, reject) => {
const request: ClientRequest = get(`${sourceOfTruth}/${pathname}`, {
agent,
});
request.on("response", (response) => {
if (response.statusCode !== 200) {
reject(new Error(`Failed to fetch ${pathname}`));
return;
}
const stream = new ReadableStream({
start(controller) {
response.on("data", (chunk) => {
controller.enqueue(chunk);
});
response.on("end", () => {
controller.close();
});
response.on("error", (error) => {
controller.error(error);
reject(error);
});
},
});
resolve(stream);
});
request.on("error", (error) => {
reject(error);
});
});
}
async function fetchFileDenoBun(pathname: string): Promise<ReadableStream> {
const req = await fetch(`${sourceOfTruth}/${pathname}`, agent);
if (!req.ok) {
throw new Error(`Failed to fetch ${pathname}`);
}
return req.body!;
}
const fetchFileUncached =
typeof Bun !== "undefined" || typeof Deno !== "undefined"
? fetchFileDenoBun
: fetchFileNode;
export async function toBuffer(
stream: ReadableStream | Buffer,
): Promise<Buffer> {
if (!(stream instanceof ReadableStream)) {
return stream;
}
const chunks: Buffer[] = [];
for await (const chunk of stream) {
chunks.push(chunk);
}
return Buffer.concat(chunks);
}

23
src/file-viewer/cert.pem Normal file
View file

@ -0,0 +1,23 @@
-----BEGIN CERTIFICATE-----
MIIDxTCCAq2gAwIBAgIUBaaOXVkkE+6yarNyvzofETb+WLEwDQYJKoZIhvcNAQEL
BQAwdzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVN0YXRlMQ0wCwYDVQQHDARDaXR5
MRUwEwYDVQQKDAxNZWRpYSBTZXJ2ZXIxDzANBgNVBAMMBnplbml0aDEhMB8GCSqG
SIb3DQEJARYSbWVAcGFwZXJjbG92ZXIubmV0MB4XDTI1MDQyNzIxNTU0MFoXDTM1
MDQyNTIxNTU0MFowdzELMAkGA1UEBhMCVVMxDjAMBgNVBAgMBVN0YXRlMQ0wCwYD
VQQHDARDaXR5MRUwEwYDVQQKDAxNZWRpYSBTZXJ2ZXIxDzANBgNVBAMMBnplbml0
aDEhMB8GCSqGSIb3DQEJARYSbWVAcGFwZXJjbG92ZXIubmV0MIIBIjANBgkqhkiG
9w0BAQEFAAOCAQ8AMIIBCgKCAQEAv7lLwx8XwsuTeaIxTsHDL+Lx7eblsJ0XylVm
0/iIJS1Mrq6Be9St6vDWK/BWqqAn+MdqzSfLMy8EKazuHKtbTm2vlUIkjw28SoWP
6cRSCLx4hFGbF4tmRO+Bo+/4PpHPnheeolkjJ+CLO87tZ752D9JzjVND+WIj1QO+
bm+JBIi1TFREPh22/fSZBRpaRgqHcUEhICaiXaufvxQ6eihQfGSe00I7zRzGgnMl
51xjzkKkXd+r/FwTykd8ScJN25FMVDLsfJR59//geAZXYS25gQ4YL6R8u7ijidlS
IoDG8N+Fzw7W4yI+y8fIN4W1x/HsjiQ665CuWY3TMYo98OaGwwIDAQABo0kwRzAm
BgNVHREEHzAdggZ6ZW5pdGiCE25hcy5wYXBlcmNsb3Zlci5uZXQwHQYDVR0OBBYE
FDXkgNsMYZv1Pr+95RCCk7eHACGOMA0GCSqGSIb3DQEBCwUAA4IBAQB6942odKyD
TudifxRXbvcVe9LxSd7NimxRZzM5wTgA5KkxQT4CBM2wEPH/7e7Q/8scB9HbH2uP
f2vixoCM+Z3BWiYHFFk+1pf2myUdiFV2BC9g80txEerRkGLc18V6CdYNJ9wNPkiO
LW/RzXfEv+sqhaXh8dA46Ruz6SAbmscTMMYW4e9VYR+1p4Sm5UpTxrHzeg21YJKn
ud8kO1r7RhVgUGzkAzNaIMiBuJqGGdD5yV7Ng5C/DlJ9AAeYu1diM5LkIKjf+/8M
t/3l4eXS3Lda6+21rDvmfoK4Za6CAhcwgXIpqiRixE2MQNsxZ2XiJBVQHPrh8xYk
L5fq8KTGFwtd
-----END CERTIFICATE-----

View file

@ -0,0 +1,398 @@
// WARNING
// -------
// This file contains spoilers for COTYLEDON
// Consider reading through the entire archive before picking apart this
// code, as this contains the beginning AND the ending sections, which
// contains very percise storytelling. You've been warned...
//
// --> https://paperclover.net/file/cotyledon <--
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
//
// SPEEDBUMP
export function Speedbump() {
return (
<div class="panel last">
<div className="header">
an interlude
</div>
<div className="content file-view file-view-text speedbump">
<canvas
style="linear-gradient(45deg, #111318, #181f20)"
data-canvas="cotyledon"
>
</canvas>
<header>
<h1>cotyledon</h1>
</header>
<div id="captcha" style="display: none;">
<p style="max-width:480px">
please prove you're not a robot by selecting all of the images with
four-leaf clovers, until there are only regular clovers.
<noscript>
this will require javascript enabled on your computer to verify
the mouse clicks.
</noscript>
</p>
<div className="enter-container">
<div className="image-grid">
<button>
<img src="/captcha/1.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/2.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/3.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/4.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/5.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/6.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/7.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/8.jpeg" alt="a four-leaf clover" />
</button>
<button>
<img src="/captcha/9.jpeg" alt="a four-leaf clover" />
</button>
</div>
</div>
<div class="enter-container">
<button id="enter2">all done</button>
</div>
</div>
<div id="first">
<p>
this place is sacred, but dangerous. i have to keep visitors to an
absolute minimum; you'll get dust on all the artifacts.
</p>
<p>
by entering our museum, you agree not to use your camera. flash off
isn't enough; the bits and bytes are alergic even to a camera's
sensor
</p>
<p style="font-size:0.9rem;">
(in english: please do not store downloads after you're done viewing
them)
</p>
<div class="enter-container">
<button id="enter">break my boundaries</button>
</div>
</div>
</div>
</div>
);
}
// OPENING
export function Readme() {
return (
<div class="panel last">
<div className="header">
cotyledon
</div>
<div className="content file-view file-view-text">
<div style="max-width: 71ch;padding:3rem;font-family:rmo,monospace">
<p style="margin-top:0">
welcome to the archive. if this is your first time here, i recommend
starting in '<a href="/file/2017">2017</a>' and going
chronologically from there. however, there is truly no wrong way to
explore.
</p>
<p>
note that there is a blanket trigger warning for everything in this
archive: while there is nothing visually offensive, some portions of
the text and emotions conveyed through this may hit extremely hard.
you are warned.
</p>
<p>
all file dates are real. at least as real as i could figure out.
when i moved data across drives over my years, i accidentally had a
few points where i stamped over all the dates with the day that
moved the files. even fucked it up a final time in february 2025,
while in the process of unfucking things.
</p>
<p>
thankfully, my past self knew i'd want to assemble this kind of
site, and because of that they were crazy about storing the dates of
things inside of html, json/yaml files, and even in fucking
databases. i'm glad it was all stored though, but jeez what a nerd.
</p>
<p>
a few files were touched up for privacy, or otherwise re-encoded.
some of them i added extra metadata.
</p>
<p>
from the bottom of my heart: i hope you enjoy. it has been a
nightmare putting this all together. technically and emotionally
speaking. i'm glad we can put this all behind us, mark it as
completed, and get started with the good shit.
</p>
<p>
love,<br />clo
</p>
<br />
<p>
start here -&gt; <a href="/file/2017">2017</a>
</p>
</div>
</div>
</div>
);
}
// TRUE ENDING. Written in Apple Notes.
export function ForEveryone() {
return (
<>
<div class="for_everyone">
<p>today is my 21st birthday. april 30th, 2025.</p>
<p>it's been nearly six months starting hormones.</p>
<p>sometimes i feel great,</p>
<p>sometimes i get dysphoria.</p>
<p>with the walls around me gone</p>
<p>that shit hits way harder than it did before.</p>
<p>ugh..</p>
<p>i'm glad the pain i felt is now explained,</p>
<p>but now rendered in high definition.</p>
<p>the smallest strands of hair on my face and belly act</p>
<p>as sharpened nails to pierce my soul.</p>
<p></p>
<p>it's all a pathway to better days; the sun had risen.</p>
<p>one little step at a time for both of us.</p>
<p>today i quit my job. free falling, it feels so weird.</p>
<p>like sky diving.</p>
<p>the only thing i feel is cold wind.</p>
<p>the only thing i see is everything,</p>
<p>and it's beautiful.</p>
<p>i have a month of falling before the parachute activates,</p>
<p>gonna spend as much time of it on art as i can.</p>
<p>that was, after all, my life plan:</p>
<p>i wanted to make art, all the time,</p>
<p>for everyone.</p>
<p></p>
<p>then you see what happened</p>
<p>to the world and the internet.</p>
<p>i never really got to live through that golden age,</p>
<p>it probably sucked back then too.</p>
<p>but now the big sites definitely stopped being fun.</p>
<p>they slide their cold hands up my body</p>
<p>and feel me around. it's unwelcoming, and</p>
<p>inconsiderate to how sensitive my skin is.</p>
<p>i'm so fucking glad i broke up with YouTube</p>
<p>and their devilish friends.</p>
<p>my NAS is at 5 / 24 TB</p>
<p>and probably wont fill for the next decade.</p>
<p></p>
<p>it took 2 months for me to notice my body changed.</p>
<p>that day was really nice, but it hurt a lot.</p>
<p>a sharp, satisfying pain in my chest gave me life.</p>
<p>learned new instincts for my arms</p>
<p>so they'd stop poking my new shape.</p>
<p>when i look at my face</p>
<p>it's like a different person.</p>
<p>she was the same as before, but completely new.</p>
<p>something changed</p>
<p>or i'm now used to seeing what makes me smile.</p>
<p>regardless, whatever i see in the mirror, i smile.</p>
<p>and, i don't hear that old name much anymore</p>
<p>aside from nightmares. and you'll never repeat it, ok?</p>
<p>okay.</p>
<p></p>
<p>been playing 'new canaan' by 'bill wurtz' on loop</p>
<p>in the background.</p>
<p>it kinda just feels right.</p>
<p>especially when that verse near the end comes on.</p>
<p></p>
<p>more people have been allowed to visit me.</p>
<p>my apartment used to be just for me,</p>
<p>but the more i felt like a person</p>
<p>the more i felt like having others over.</p>
<p>still have to decorate and clean it a little,</p>
<p>but it isn't a job to do alone.</p>
<p>we dragged a giant a rug across the city one day,</p>
<p>and it felt was like anything was possible.</p>
<p>sometimes i have ten people visit in a day,</p>
<p>or sometimes i focus my little eyes on just one.</p>
<p>i never really know what i want to do</p>
<p>until the time actually comes.</p>
<p></p>
{/* FILIP */}
<p>i think about the times i was by the water with you.</p>
<p>the sun setting warmly, icy air fell on our shoulders.</p>
{/* NATALIE */}
<p>and how we walked up to the top of that hill,</p>
<p>you picked up and disposed a nail on the ground,</p>
<p>walking the city thru places i've never been.</p>
{/* BEN */}
<p>or hiking through the park talking about compilers,</p>
<p>tiring me out until i'd fall asleep in your arms.</p>
{/* ELENA */}
<p>and the way you held on to my hand as i woke up,</p>
<p>noticing how i was trying to hide nightmare's tears.</p>
<p></p>
{/* HIGH SCHOOL */}
<p>i remember we were yelling lyrics loudly,</p>
<p>out of key yet cheered on because it was fun.</p>
{/* ADVAITH/NATALIE */}
<p>and when we all toured the big corporate office,</p>
{/* AYU/HARRIS */}
<p>then snuck in to some startup's office after hours;</p>
<p>i don't remember what movie we watched.</p>
{/* COLLEGE, DAY 1 IN EV's ROOM */}
<p>i remember laying on the bunk bed,</p>
<p>while the rest played a card game.</p>
{/* MEGHAN/MORE */}
<p>with us all laying on the rug, staring at the TV</p>
<p>
as the ending twist to {/* SEVERANCE */ "that show"} was revealed.
</p>
<p></p>
<p>all the moments i cherish,</p>
<p>i love because it was always me.</p>
<p>i didn't have to pretend,</p>
<p>even if i didn't know who i was at the time.</p>
<p>you all were there. for me.</p>
<p></p>
<p>i don't want to pretend any more</p>
<p>i want to be myself. for everyone.</p>
<p></p>
<p>oh, the song ended. i thought it was on loop?</p>
<p>it's late... can hear the crickets...</p>
<p>and i can almost see the moon... mmmm...</p>
<p>...nah, too much light pollution.</p>
<p></p>
<p>one day. one day.</p>
<p></p>
<p class="normal">
before i go, i want to show the uncensored version of "journal about a
girl", because i can trust you at least. keep in mind, i think you're
one of the first people to ever see this.
</p>
</div>
<div class="for_everyone" style="max-width:80ch;">
<blockquote>
<p>journal - 2024-09-14</p>
<p>
been at HackMIT today on behalf of the company. it's fun. me and
zack were running around looking for people that might be good
hires. he had this magic arbitrary criteria to tell "oh this person
is probably cracked let's talk to them" and we go to the first one.
they were a nerd, perfect. they seemed to be extremely talented with
some extreme software projects.<br />
okay.. oof... its still clouding my mind<br />
i cant shake that feeling away
</p>
<p>hold on...</p>
<p>
at some point they open one of their profiles to navigate to some
code, and it displays for a couple of seconds: "pronouns: she/they".
i don't actually know anything about this person, but it was my
perception that she is trans. their appearance, physique, and age
felt similar to me, which tends makes people think you are male.
</p>
<p>
but... she was having fun being herself. being a legend of identity
and of her skill in computer science. winning the physics major.
making cool shit at the hackathon, and probably in life. my
perception of her was the exact essence of who i myself wanted to
be. i was jealous of her life.
</p>
<p>
i tried hard to avoid a breakdown. success. but i was feeling
distant. the next hour or so was disorienting, trying not to think
about it too hard. i think there was one possibly interesting person
we talked to. i don't remember any of the other conversations. they
were not important. but i couldn't think through them regardless.
</p>
<p>
later, i decided to read some of her code. i either have a huge
dislike towards the Rust programming language and/or it was not high
quality code. welp, so just is a person studying. my perception was
just a perception, inaccurate but impacting. i know i need to become
myself, whoever that is. otherwise, i'm just going to feel this shit
at higher doses. i think about this every day, and the amount of
time i feel being consumed by these problems only grows.
</p>
<p>
getting through it all is a lonely feeling. not because no one is
around, but because i am isolated emotionally. i know other people
hit these feelings, but we all are too afraid to speak up, and it's
all lonely.
</p>
<p>
waiting on a reply from someone from healthcare. it'll be slow, but
it will be okay.
</p>
</blockquote>
</div>
<div class="for_everyone">
<p class="normal">
i've learned that even when i feel alone, it doesn't have to feel
lonely. i know it's hard, dear. i know it's scary. but i promise it's
possible. we're all in this together. struggling together. sacrificing
together. we dedicate our lives to each you, and our art for everyone.
</p>
<p
class="normal"
style="font-size:2rem;color:#9C91FF;font-family:times,serif;font-style:italic"
>
and then we knew,<br />
just like paper airplanes: that we could fly...
</p>
<br />
<p class="normal">
<a
href="/"
style="text-decoration:underline;text-underline-offset:0.2em;"
>
fin.
</a>
</p>
</div>
</>
);
}
ForEveryone.class = "text";

165
src/file-viewer/ffmpeg.ts Normal file
View file

@ -0,0 +1,165 @@
// Utilities for spawning ffmpeg and consuming its output as a `Progress`
// A headless parser is available with `Parse`
export type Line =
| { kind: "ignore" }
| { kind: "log"; level: "info" | "warn" | "error"; message: string }
| {
kind: "progress";
frame: number;
totalFrames: number;
speed: string | null;
fps: number | null;
rest: Record<string, string>;
};
export const defaultExtraOptions = [
"-hide_banner",
"-stats",
];
export interface SpawnOptions {
args: string[];
title: string;
ffmpeg?: string;
progress?: Progress;
cwd: string;
}
export async function spawn(options: SpawnOptions) {
const { ffmpeg = "ffmpeg", args, title, cwd } = options;
const proc = child_process.spawn(ffmpeg, [...defaultExtraOptions, ...args], {
stdio: ["ignore", "inherit", "pipe"],
env: { ...process.env, SVT_LOG: "2" },
cwd,
});
const parser = new Parse();
const bar = options.progress ?? new Progress({ text: title });
let running = true;
const splitter = readline.createInterface({ input: proc.stderr });
splitter.on("line", (line) => {
const result = parser.onLine(line);
if (result.kind === "ignore") {
return;
} else if (result.kind === "log") {
console[result.level](result.message);
} else if (result.kind === "progress") {
if (!running) return;
const { frame, totalFrames, fps, speed } = result;
bar.value = frame;
bar.total = totalFrames;
const extras = [
`${fps} fps`,
speed,
parser.hlsFile,
].filter(Boolean).join(", ");
bar.text = `${title} ${frame}/${totalFrames} ${
extras.length > 0 ? `(${extras})` : ""
}`;
} else result satisfies never;
});
const [code, signal] = await events.once(proc, "close");
running = false;
if (code !== 0) {
const fmt = code ? `code ${code}` : `signal ${signal}`;
const e: any = new Error(`ffmpeg failed with ${fmt}`);
e.args = [ffmpeg, ...args].join(" ");
e.code = code;
e.signal = signal;
bar.error(e.message);
return e;
}
bar.success(title);
}
export class Parse {
parsingStart = true;
inIndentedIgnore: null | "out" | "inp" | "other" = null;
durationTime = 0;
targetFps: number | null = null;
hlsFile: string | null = null;
durationFrames = 0;
onLine(line: string): Line {
line = line.trimEnd();
if (/^frame=/.test(line)) {
if (this.parsingStart) {
this.parsingStart = false;
this.durationFrames = Math.ceil(
(this.targetFps ?? 25) * this.durationTime,
);
}
const parts = Object.fromEntries(
[...line.matchAll(/\b([a-z0-9]+)=\s*([^ ]+)(?= |$)/ig)].map((
[, k, v],
) => [k, v]),
);
const { frame, fps, speed, ...rest } = parts;
return {
kind: "progress",
frame: Number(frame),
totalFrames: this.durationFrames,
fps: Number(fps),
speed,
rest,
};
}
if (this.parsingStart) {
if (this.inIndentedIgnore) {
if (line.startsWith(" ") || line.startsWith("\t")) {
line = line.trimStart();
if (this.inIndentedIgnore === "inp") {
const match = line.match(/^Duration: (\d+):(\d+):(\d+\.\d+)/);
if (match) {
const [h, m, s] = match.slice(1).map((x) => Number(x));
this.durationTime = Math.max(
this.durationTime,
h * 60 * 60 + m * 60 + s,
);
}
if (!this.targetFps) {
const match = line.match(/^Stream.*, (\d+) fps/);
if (match) this.targetFps = Number(match[1]);
}
}
return { kind: "ignore" };
}
this.inIndentedIgnore = null;
}
if (line === "Press [q] to stop, [?] for help") {
return { kind: "ignore" };
}
if (line === "Stream mapping:") {
this.inIndentedIgnore = "other";
return { kind: "ignore" };
}
if (line.startsWith("Output #") || line.startsWith("Input #")) {
this.inIndentedIgnore = line.slice(0, 3).toLowerCase() as "inp" | "out";
return { kind: "ignore" };
}
}
const hlsMatch = line.match(/^\[hls @ .*Opening '(.+)' for writing/);
if (hlsMatch) {
if (!hlsMatch[1].endsWith(".tmp")) {
this.hlsFile = path.basename(hlsMatch[1]);
}
return { kind: "ignore" };
}
let level: Extract<Line, { kind: "log" }>["level"] = "info";
if (line.toLowerCase().includes("err")) level = "error";
else if (line.toLowerCase().includes("warn")) level = "warn";
return { kind: "log", level, message: line };
}
}
import * as child_process from "node:child_process";
import * as readline from "node:readline";
import * as process from "node:process";
import events from "node:events";
import * as path from "node:path";
import { Progress } from "@paperclover/console/Progress";

288
src/file-viewer/format.ts Normal file
View file

@ -0,0 +1,288 @@
const findDomain = "paperclover.net";
export function formatSize(bytes: number) {
if (bytes < 1024) return `${bytes} bytes`;
if (bytes < 1024 * 1024) return `${(bytes / 1024).toFixed(1)} KB`;
if (bytes < 1024 * 1024 * 1024) {
return `${(bytes / 1024 / 1024).toFixed(1)} MB`;
}
return `${(bytes / 1024 / 1024 / 1024).toFixed(1)} GB`;
}
// export function formatDateDefined(date: Date) {
// // YYYY-MM-DD, format in PST timezone
// return date.toLocaleDateString("sv", { timeZone: "America/Los_Angeles" });
// }
//
// export function formatShortDate(date: Date) {
// // YY-MM-DD, format in PST timezone
// return formatDate(date).slice(2);
// }
export function formatDuration(seconds: number) {
const minutes = Math.floor(seconds / 60);
const remainingSeconds = seconds % 60;
return `${minutes}:${remainingSeconds.toString().padStart(2, "0")}`;
}
export function formatDurationLong(seconds: number) {
const hours = Math.floor(seconds / 3600);
const minutes = Math.floor((seconds % 3600) / 60);
const remainingSeconds = seconds % 60;
return `${hours}:${minutes.toString().padStart(2, "0")}:${remainingSeconds.toString().padStart(2, "0")}`;
}
export function escapeUri(uri: string) {
return encodeURIComponent(uri)
.replace(/%2F/gi, "/")
.replace(/%3A/gi, ":")
.replace(/%2B/gi, "+")
.replace(/%40/gi, "@")
.replace(/%2D/gi, "-")
.replace(/%5F/gi, "_")
.replace(/%2E/gi, ".")
.replace(/%2C/gi, ",");
}
// Returns escaped HTML
// Features:
// - autolink detection
// - via \bpaperclover.net/[a-zA-Z0-9_\.+-]+
// - via \b/file/[a-zA-Z0-9_\.+-]+
// - via \bhttps://...
// - via name of a sibling file's basename
// - reformat (c) into ©
//
// This formatter was written with AI. Then manually fixed since AI does not work.
export function highlightLinksInTextView(
text: string,
siblingFiles: MediaFile[] = [],
) {
const siblingLookup = Object.fromEntries(
siblingFiles
.filter((f) => f.basename !== "readme.txt")
.map((f) => [f.basename, f]),
);
// First escape the HTML to prevent XSS
let processedText = escapeHtml(text);
// Replace (c) with ©
processedText = processedText.replace(/\(c\)/gi, "©");
// Process all URL patterns in a single pass to avoid nested links
// This regex matches:
// 1. https:// or http:// URLs
// 2. domain URLs without protocol (e.g., paperclover.net/path)
// 3. /file/ URLs
// 4. ./ relative paths
// We'll use a function to determine what kind of URL it is and format accordingly
const urlRegex = new RegExp(
"(" +
// Group 1: https:// or http:// URLs
"\\bhttps?:\\/\\/[a-zA-Z0-9_\\.\\-]+\\.[a-zA-Z0-9_\\.\\-]+[a-zA-Z0-9_\\.\\-\\/\\?=&%+#]*" +
"|" +
// Group 2: domain URLs without protocol
findDomain +
"\\/\\/[a-zA-Z0-9_\\.\\+\\-]+" +
"|" +
// Group 3: /file/ URLs
"\\/file\\/[a-zA-Z0-9_\\.\\+\\-\\/]+" +
")\\b" +
"|" +
// Group 4: ./ relative paths (not word-bounded)
"(?<=\\s|^)\\.\\/[\\w\\-\\.]+",
"g",
);
processedText = processedText.replace(urlRegex, (match: string) => {
// Case 1: https:// or http:// URLs
if (match.startsWith("http")) {
if (match.includes(findDomain)) {
return `<a href="${
match
.replace(/https?:\/\/paperclover\.net\/+/, "/")
.replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return `<a href="${
match.replace(/\/\/+/g, "/")
}" target="_blank" rel="noopener noreferrer">${match}</a>`;
}
// Case 2: domain URLs without protocol
if (match.startsWith(findDomain)) {
return `<a href="${
match.replace(findDomain + "/", "/").replace(/\/\/+/g, "/")
}">${match}</a>`;
}
// Case 3: /file/ URLs
if (match.startsWith("/file/")) {
return `<a href="${match}">${match}</a>`;
}
// Case 4: ./ relative paths
if (match.startsWith("./")) {
const filename = match.substring(2);
const siblingFile = siblingFiles.find((f) => f.basename === filename);
if (siblingFile) {
return `<a href="/file/${siblingFile.path}">${match}</a>`;
}
if (siblingFiles.length > 0) {
const currentDir = siblingFiles[0].path
.split("/")
.slice(0, -1)
.join("/");
return `<a href="/file/${currentDir}/${filename}">${match}</a>`;
}
}
return match;
});
// Match sibling file names (only if they're not already part of a link)
if (siblingFiles.length > 0) {
const escapedBasenames = siblingFiles.map((f) =>
f.basename.replace(/[.*+?^${}()|[\]\\]/g, "\\$&")
);
const pattern = new RegExp(`\\b(${escapedBasenames.join("|")})\\b`, "g");
const parts = processedText.split(/(<[^>]*>)/);
for (let i = 0; i < parts.length; i += 2) {
if (i < parts.length) {
parts[i] = parts[i].replace(pattern, (match: string) => {
const file = siblingLookup[match];
if (file) {
return `<a href="/file/${
file.path.replace(/^\//, "").replace(/\/\/+/g, "/")
}">${match}</a>`;
}
return match;
});
}
}
processedText = parts.join("");
}
return processedText;
}
export function highlightConvo(text: string) {
text = text.replace(/^#mode=convo\n/, "");
const lines = text.split("\n");
const paras: { speaker: string | null; lines: string[] }[] = [];
let currentPara: string[] = [];
let currentSpeaker: string | null = null;
let firstSpeaker = null;
const speakers: Record<string, string> = {};
const getSpeaker = (s: string) => {
if (s[1] === " " && speakers[s[0]]) {
return s[0];
}
return null;
};
for (const line of lines) {
let trimmed = line.trim();
if (line.startsWith("#")) {
// parse #X=Y
const [_, speaker, color] = trimmed.match(/^#(.)=(.*)$/)!;
speakers[speaker] = color;
continue;
}
if (trimmed === "") {
continue;
}
let speaker = getSpeaker(trimmed);
if (speaker) {
trimmed = trimmed.substring(speaker.length).trimStart();
speaker = speakers[speaker];
} else {
speaker = "me";
}
trimmed = trimmed.replace(
/\[IMG:(\/file\/[^\]]+)\]/g,
'<img src="$1" alt="attachment" class="convo-img" width="300" />',
);
if (trimmed === "---" && speaker === "me") {
trimmed = "<hr/>";
}
if (speaker === currentSpeaker) {
currentPara.push(trimmed);
} else {
if (currentPara.length > 0) {
paras.push({
speaker: currentSpeaker,
lines: currentPara,
});
currentPara = [];
}
currentPara = [trimmed];
currentSpeaker = speaker;
firstSpeaker ??= speaker;
}
}
if (currentPara.length > 0) {
paras.push({
speaker: currentSpeaker,
lines: currentPara,
});
}
return paras
.map(({ speaker, lines }) => {
return `<div class="s-${speaker}">${
lines
.map((line) => `<div class="line">${line}</div>`)
.join("\n")
}</div>`;
})
.join("\n");
}
export function highlightHashComments(text: string) {
const lines = text.split("\n");
return lines
.map((line) => {
if (line.startsWith("#")) {
return `<div style="color: var(--primary);">${line}</div>`;
}
return `<div>${line.trimEnd() || "&nbsp;"}</div>`;
})
.join("\n");
}
const unknownDate = new Date("1970-01-03");
const unknownDateWithKnownYear = new Date("1970-02-20");
export function formatDate(dateTime: Date) {
return dateTime < unknownDateWithKnownYear
? (
dateTime < unknownDate
? (
"??.??.??"
)
: `xx.xx.${21 + Math.floor(dateTime.getTime() / 86400000)}`
)
: (
`${(dateTime.getMonth() + 1).toString().padStart(2, "0")}.${
dateTime
.getDate()
.toString()
.padStart(2, "0")
}.${dateTime.getFullYear().toString().slice(2)}`
);
}
import type { MediaFile } from "@/file-viewer/models/MediaFile.ts";
import { escapeHtml } from "#ssr";

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1,268 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>patch</string>
<string>diff</string>
<string>rej</string>
</array>
<key>firstLineMatch</key>
<string>(?x)^
(===\ modified\ file
|==== \s* // .+ \s - \s .+ \s+ ====
|Index:\
|---\ [^%\n]
|\*\*\*.*\d{4}\s*$
|\d+(,\d+)* (a|d|c) \d+(,\d+)* $
|diff\ --git\
|commit\ [0-9a-f]{40}$
)</string>
<key>keyEquivalent</key>
<string>^~D</string>
<key>name</key>
<string>Diff</string>
<key>patterns</key>
<array>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.separator.diff</string>
</dict>
</dict>
<key>match</key>
<string>^((\*{15})|(={67})|(-{3}))$\n?</string>
<key>name</key>
<string>meta.separator.diff</string>
</dict>
<dict>
<key>match</key>
<string>^\d+(,\d+)*(a|d|c)\d+(,\d+)*$\n?</string>
<key>name</key>
<string>meta.diff.range.normal</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>meta.toc-list.line-number.diff</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(@@)\s*(.+?)\s*(@@.*)($\n?)?</string>
<key>name</key>
<string>meta.diff.range.unified</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
<key>7</key>
<dict>
<key>name</key>
<string>punctuation.definition.range.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((\-{3}) .+ (\-{4}))|((\*{3}) .+ (\*{4})))$\n?</string>
<key>name</key>
<string>meta.diff.range.context</string>
</dict>
<dict>
<key>match</key>
<string>^diff --git a/.*$\n?</string>
<key>name</key>
<string>meta.diff.header.git</string>
</dict>
<dict>
<key>match</key>
<string>^diff (-|\S+\s+\S+).*$\n?</string>
<key>name</key>
<string>meta.diff.header.command</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
<key>7</key>
<dict>
<key>name</key>
<string>punctuation.definition.from-file.diff</string>
</dict>
</dict>
<key>match</key>
<string>(^(((-{3}) .+)|((\*{3}) .+))$\n?|^(={4}) .+(?= - ))</string>
<key>name</key>
<string>meta.diff.header.from-file</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.to-file.diff</string>
</dict>
</dict>
<key>match</key>
<string>(^(\+{3}) .+$\n?| (-) .* (={4})$\n?)</string>
<key>name</key>
<string>meta.diff.header.to-file</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.inserted.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.inserted.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((&gt;)( .*)?)|((\+).*))$\n?</string>
<key>name</key>
<string>markup.inserted.diff</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.changed.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(!).*$\n?</string>
<key>name</key>
<string>markup.changed.diff</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.deleted.diff</string>
</dict>
<key>6</key>
<dict>
<key>name</key>
<string>punctuation.definition.deleted.diff</string>
</dict>
</dict>
<key>match</key>
<string>^(((&lt;)( .*)?)|((-).*))$\n?</string>
<key>name</key>
<string>markup.deleted.diff</string>
</dict>
<dict>
<key>begin</key>
<string>^(#)</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.diff</string>
</dict>
</dict>
<key>comment</key>
<string>Git produces unified diffs with embedded comments"</string>
<key>end</key>
<string>\n</string>
<key>name</key>
<string>comment.line.number-sign.diff</string>
</dict>
<dict>
<key>match</key>
<string>^index [0-9a-f]{7,40}\.\.[0-9a-f]{7,40}.*$\n?</string>
<key>name</key>
<string>meta.diff.index.git</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.diff</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>meta.toc-list.file-name.diff</string>
</dict>
</dict>
<key>match</key>
<string>^Index(:) (.+)$\n?</string>
<key>name</key>
<string>meta.diff.index</string>
</dict>
<dict>
<key>match</key>
<string>^Only in .*: .*$\n?</string>
<key>name</key>
<string>meta.diff.only-in</string>
</dict>
</array>
<key>scopeName</key>
<string>source.diff</string>
<key>uuid</key>
<string>7E848FF4-708E-11D9-97B4-0011242E4184</string>
</dict>
</plist>

View file

@ -0,0 +1,169 @@
<?xml version="1.0" encoding="UTF-8" ?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>uuid</key>
<string>E07EC438-7B75-4437-8AA1-DA94C1E6EACC</string>
<key>patterns</key>
<array>
<dict>
<key>name</key>
<string>keyword.command.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:append|assoc|at|attrib|break|cacls|cd|chcp|chdir|chkdsk|chkntfs|cls|cmd|color|comp|compact|convert|copy|date|del|dir|diskcomp|diskcopy|doskey|echo|endlocal|erase|fc|find|findstr|format|ftype|graftabl|help|keyb|label|md|mkdir|mode|more|move|path|pause|popd|print|prompt|pushd|rd|recover|ren|rename|replace|restore|rmdir|set|setlocal|shift|sort|start|subst|time|title|tree|type|ver|verify|vol|xcopy)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.statement.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:goto|call|exit)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.conditional.if.dosbatch</string>
<key>match</key>
<string>\b(?i)if\s+((not)\s+)(exist|defined|errorlevel|cmdextversion)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.conditional.dosbatch</string>
<key>match</key>
<string>\b(?i)(?:if|else)\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.control.repeat.dosbatch</string>
<key>match</key>
<string>\b(?i)for\b</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.dosbatch</string>
<key>match</key>
<string>\b(?:EQU|NEQ|LSS|LEQ|GTR|GEQ)\b</string>
</dict>
<dict>
<key>name</key>
<string>comment.line.rem.dosbatch</string>
<key>match</key>
<string>\b(?i)rem(?:$|\s.*$)</string>
</dict>
<dict>
<key>name</key>
<string>comment.line.colons.dosbatch</string>
<key>match</key>
<string>\s*:\s*:.*$</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.parameter.function.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.parameter.function.dosbatch</string>
<key>match</key>
<string>(?i)(%)(~(?:f|d|p|n|x|s|a|t|z|\$[^:]*:)*)?\d</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.parameter.loop.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.parameter.loop.dosbatch</string>
<key>match</key>
<string>(?i)(%%)(~(?:f|d|p|n|x|s|a|t|z|\$[^:]*:)*)?[a-z]</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.parsetime.begin.shell</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>variable.other.parsetime.end.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.other.parsetime.dosbatch</string>
<key>match</key>
<string>(%)[^%]+(%)</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.delayed.begin.shell</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>variable.other.delayed.end.shell</string>
</dict>
</dict>
<key>name</key>
<string>variable.other.delayed.dosbatch</string>
<key>match</key>
<string>(!)[^!]+(!)</string>
</dict>
<dict>
<key>begin</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.shell</string>
</dict>
</dict>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.shell</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.dosbatch</string>
<key>end</key>
<string>"|$</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.pipe.dosbatch</string>
<key>match</key>
<string>[|]</string>
</dict>
<dict>
<key>name</key>
<string>keyword.operator.redirect.shell</string>
<key>match</key>
<string>&amp;&gt;|\d*&gt;&amp;\d*|\d*(&gt;&gt;|&gt;|&lt;)|\d*&lt;&amp;|\d*&lt;&gt;</string>
</dict>
</array>
<key>name</key>
<string>Batch File</string>
<key>scopeName</key>
<string>source.dosbatch</string>
<key>fileTypes</key>
<array>
<string>bat</string>
</array>
</dict>
</plist>

View file

@ -0,0 +1,386 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>json</string>
<string>sublime-settings</string>
<string>sublime-menu</string>
<string>sublime-keymap</string>
<string>sublime-mousemap</string>
<string>sublime-theme</string>
<string>sublime-build</string>
<string>sublime-project</string>
<string>sublime-completions</string>
</array>
<key>foldingStartMarker</key>
<string>(?x) # turn on extended mode
^ # a line beginning with
\s* # some optional space
[{\[] # the start of an object or array
(?! # but not followed by
.* # whatever
[}\]] # and the close of an object or array
,? # an optional comma
\s* # some optional space
$ # at the end of the line
)
| # ...or...
[{\[] # the start of an object or array
\s* # some optional space
$ # at the end of the line</string>
<key>foldingStopMarker</key>
<string>(?x) # turn on extended mode
^ # a line beginning with
\s* # some optional space
[}\]] # and the close of an object or array</string>
<key>keyEquivalent</key>
<string>^~J</string>
<key>name</key>
<string>JSON (Javascript Next)</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#value</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>array</key>
<dict>
<key>begin</key>
<string>\[</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>\]</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.end.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.array.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#value</string>
</dict>
<dict>
<key>match</key>
<string>,</string>
<key>name</key>
<string>punctuation.separator.array.json</string>
</dict>
<dict>
<key>match</key>
<string>[^\s\]]</string>
<key>name</key>
<string>invalid.illegal.expected-array-separator.json</string>
</dict>
</array>
</dict>
<key>comments</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>/\*\*(?!/)</string>
<key>captures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>end</key>
<string>\*/</string>
<key>name</key>
<string>comment.block.documentation.json</string>
</dict>
<dict>
<key>begin</key>
<string>/\*</string>
<key>captures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>end</key>
<string>\*/</string>
<key>name</key>
<string>comment.block.json</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.json</string>
</dict>
</dict>
<key>match</key>
<string>(//).*$\n?</string>
<key>name</key>
<string>comment.line.double-slash.js</string>
</dict>
</array>
</dict>
<key>constant</key>
<dict>
<key>match</key>
<string>\b(?:true|false|null)\b</string>
<key>name</key>
<string>constant.language.json</string>
</dict>
<key>number</key>
<dict>
<key>match</key>
<string>(?x) # turn on extended mode
-? # an optional minus
(?:
0 # a zero
| # ...or...
[1-9] # a 1-9 character
\d* # followed by zero or more digits
)
(?:
(?:
\. # a period
\d+ # followed by one or more digits
)?
(?:
[eE] # an e character
[+-]? # followed by an option +/-
\d+ # followed by one or more digits
)? # make exponent optional
)? # make decimal portion optional</string>
<key>name</key>
<string>constant.numeric.json</string>
</dict>
<key>object</key>
<dict>
<key>begin</key>
<string>\{</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.dictionary.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>\}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.dictionary.end.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.dictionary.json</string>
<key>patterns</key>
<array>
<dict>
<key>comment</key>
<string>the JSON object key</string>
<key>include</key>
<string>#objectkey</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>begin</key>
<string>:</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.dictionary.key-value.json</string>
</dict>
</dict>
<key>end</key>
<string>(,)|(?=\})</string>
<key>endCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.separator.dictionary.pair.json</string>
</dict>
</dict>
<key>name</key>
<string>meta.structure.dictionary.value.json</string>
<key>patterns</key>
<array>
<dict>
<key>comment</key>
<string>the JSON object value</string>
<key>include</key>
<string>#value</string>
</dict>
<dict>
<key>match</key>
<string>[^\s,]</string>
<key>name</key>
<string>invalid.illegal.expected-dictionary-separator.json</string>
</dict>
</array>
</dict>
<dict>
<key>match</key>
<string>[^\s\}]</string>
<key>name</key>
<string>invalid.illegal.expected-dictionary-separator.json</string>
</dict>
</array>
</dict>
<key>string</key>
<dict>
<key>begin</key>
<string>"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.json</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#stringcontent</string>
</dict>
</array>
</dict>
<key>objectkey</key>
<dict>
<key>begin</key>
<string>"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.support.type.property-name.begin.json</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.support.type.property-name.end.json</string>
</dict>
</dict>
<key>name</key>
<string>string.json support.type.property-name.json</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#stringcontent</string>
</dict>
</array>
</dict>
<key>stringcontent</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>(?x) # turn on extended mode
\\ # a literal backslash
(?: # ...followed by...
["\\/bfnrt] # one of these characters
| # ...or...
u # a u
[0-9a-fA-F]{4}) # and four hex digits</string>
<key>name</key>
<string>constant.character.escape.json</string>
</dict>
<dict>
<key>match</key>
<string>\\.</string>
<key>name</key>
<string>invalid.illegal.unrecognized-string-escape.json</string>
</dict>
</array>
</dict>
<key>value</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#constant</string>
</dict>
<dict>
<key>include</key>
<string>#number</string>
</dict>
<dict>
<key>include</key>
<string>#string</string>
</dict>
<dict>
<key>include</key>
<string>#array</string>
</dict>
<dict>
<key>include</key>
<string>#object</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
</array>
</dict>
</dict>
<key>scopeName</key>
<string>source.json</string>
<key>uuid</key>
<string>8f97457b-516e-48ce-83c7-08ae12fb327a</string>
</dict>
</plist>

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,736 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>fileTypes</key>
<array>
<string>toml</string>
</array>
<key>keyEquivalent</key>
<string>^~T</string>
<key>name</key>
<string>TOML</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#groups</string>
</dict>
<dict>
<key>include</key>
<string>#key_pair</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
<key>repository</key>
<dict>
<key>comments</key>
<dict>
<key>begin</key>
<string>(^[ \t]+)?(?=#)</string>
<key>beginCaptures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.whitespace.comment.leading.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?!\G)</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>#</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.comment.toml</string>
</dict>
</dict>
<key>end</key>
<string>\n</string>
<key>name</key>
<string>comment.line.number-sign.toml</string>
</dict>
</array>
</dict>
<key>groups</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>[^\s.]+</string>
<key>name</key>
<string>entity.name.section.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
</dict>
<key>match</key>
<string>^\s*(\[)([^\[\]]*)(\])</string>
<key>name</key>
<string>meta.group.toml</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>[^\s.]+</string>
<key>name</key>
<string>entity.name.section.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.section.begin.toml</string>
</dict>
</dict>
<key>match</key>
<string>^\s*(\[\[)([^\[\]]*)(\]\])</string>
<key>name</key>
<string>meta.group.double.toml</string>
</dict>
</array>
</dict>
<key>invalid</key>
<dict>
<key>match</key>
<string>\S+(\s*(?=\S))?</string>
<key>name</key>
<string>invalid.illegal.not-allowed-here.toml</string>
</dict>
<key>key_pair</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>([A-Za-z0-9_-]+)\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>((")(.*?)("))\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>3</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>"</string>
<key>name</key>
<string>invalid.illegal.not-allowed-here.toml</string>
</dict>
</array>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>((')([^']*)('))\s*(=)\s*</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>4</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
<key>5</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>(?x)
(
(
(?:
[A-Za-z0-9_-]+ # Bare key
| " (?:[^"\\]|\\.)* " # Double quoted key
| ' [^']* ' # Sindle quoted key
)
(?:
\s* \. \s* # Dot
| (?= \s* =) # or look-ahead for equals
)
){2,} # Ensure at least one dot
)
\s*(=)\s*
</string>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>variable.other.key.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\.</string>
<key>name</key>
<string>punctuation.separator.variable.toml</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
</dict>
<key>match</key>
<string>(")((?:[^"\\]|\\.)*)(")</string>
</dict>
<dict>
<key>captures</key>
<dict>
<key>1</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.begin.toml</string>
</dict>
<key>2</key>
<dict>
<key>name</key>
<string>punctuation.definition.variable.end.toml</string>
</dict>
</dict>
<key>match</key>
<string>(')[^']*(')</string>
</dict>
</array>
</dict>
<key>3</key>
<dict>
<key>name</key>
<string>punctuation.separator.key-value.toml</string>
</dict>
</dict>
<key>comment</key>
<string>Dotted key</string>
<key>end</key>
<string>(?&lt;=\S)(?&lt;!=)|$</string>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
</array>
</dict>
</array>
</dict>
<key>primatives</key>
<dict>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>\G"""</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>"{3,5}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.triple.double.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\\n]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G"</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>"</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.double.toml</string>
<key>patterns</key>
<array>
<dict>
<key>match</key>
<string>\\([btnfr"\\]|u[0-9A-Fa-f]{4}|U[0-9A-Fa-f]{8})</string>
<key>name</key>
<string>constant.character.escape.toml</string>
</dict>
<dict>
<key>match</key>
<string>\\[^btnfr"\\]</string>
<key>name</key>
<string>invalid.illegal.escape.toml</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G'''</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>'{3,5}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.triple.single.toml</string>
</dict>
<dict>
<key>begin</key>
<string>\G'</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>'</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.string.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>string.quoted.single.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(?x)
[0-9]{4}
-
(0[1-9]|1[012])
-
(?!00|3[2-9])[0-3][0-9]
(
[Tt ]
(?!2[5-9])[0-2][0-9]
:
[0-5][0-9]
:
(?!6[1-9])[0-6][0-9]
(\.[0-9]+)?
(
Z
| [+-](?!2[5-9])[0-2][0-9]:[0-5][0-9]
)?
)?
</string>
<key>name</key>
<string>constant.other.date.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(?x)
(?!2[5-9])[0-2][0-9]
:
[0-5][0-9]
:
(?!6[1-9])[0-6][0-9]
(\.[0-9]+)?
</string>
<key>name</key>
<string>constant.other.time.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G(true|false)</string>
<key>name</key>
<string>constant.language.boolean.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0x\h(\h|_\h)*</string>
<key>name</key>
<string>constant.numeric.hex.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0o[0-7]([0-7]|_[0-7])*</string>
<key>name</key>
<string>constant.numeric.octal.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G0b[01]([01]|_[01])*</string>
<key>name</key>
<string>constant.numeric.binary.toml</string>
</dict>
<dict>
<key>match</key>
<string>\G[+-]?(inf|nan)</string>
<key>name</key>
<string>constant.numeric.toml</string>
</dict>
<dict>
<key>match</key>
<string>(?x)
\G
(
[+-]?
(
0
| ([1-9](([0-9]|_[0-9])+)?)
)
)
(?=[.eE])
(
\.
([0-9](([0-9]|_[0-9])+)?)
)?
(
[eE]
([+-]?[0-9](([0-9]|_[0-9])+)?)
)?
</string>
<key>name</key>
<string>constant.numeric.float.toml</string>
</dict>
<dict>
<key>match</key>
<string>(?x)
\G
(
[+-]?
(
0
| ([1-9](([0-9]|_[0-9])+)?)
)
)
</string>
<key>name</key>
<string>constant.numeric.integer.toml</string>
</dict>
<dict>
<key>begin</key>
<string>\G\[</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>\]</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.array.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>meta.array.toml</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>(?=["'']|[+-]?[0-9]|[+-]?(inf|nan)|true|false|\[|\{)</string>
<key>end</key>
<string>,|(?=])</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.array.toml</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#primatives</string>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
<dict>
<key>include</key>
<string>#invalid</string>
</dict>
</array>
</dict>
<dict>
<key>begin</key>
<string>\G\{</string>
<key>beginCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.inline-table.begin.toml</string>
</dict>
</dict>
<key>end</key>
<string>\}</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.definition.inline-table.end.toml</string>
</dict>
</dict>
<key>name</key>
<string>meta.inline-table.toml</string>
<key>patterns</key>
<array>
<dict>
<key>begin</key>
<string>(?=\S)</string>
<key>end</key>
<string>,|(?=})</string>
<key>endCaptures</key>
<dict>
<key>0</key>
<dict>
<key>name</key>
<string>punctuation.separator.inline-table.toml</string>
</dict>
</dict>
<key>patterns</key>
<array>
<dict>
<key>include</key>
<string>#key_pair</string>
</dict>
</array>
</dict>
<dict>
<key>include</key>
<string>#comments</string>
</dict>
</array>
</dict>
</array>
</dict>
</dict>
<key>scopeName</key>
<string>source.toml</string>
<key>uuid</key>
<string>7DEF2EDB-5BB7-4DD2-9E78-3541A26B7923</string>
</dict>
</plist>

Some files were not shown because too many files have changed in this diff Show more