commit af60d1172fd981344cfb3c119a68d9b88b0a8dbd Author: chloe caruso Date: Fri Jun 6 23:38:02 2025 -0700 i accidentally deleted the repo, but recovered it. i'll start committing it was weird. i pressed delete on a subfolder, i think one of the pages.off folders that i was using. and then, suddenly, nvim on windows 7 decided to delete every file in the directory. they weren't shred off the space time continuum, but just marked deleted. i had to pay $80 to get access to a software that could see them. bleh! just seeing all my work, a little over a week, was pretty heart shattering. but i remembered that long ago, a close friend said i could call them whenever i was feeling sad. i finally took them up on that offer. the first time i've ever called someone for emotional support. but it's ok. i got it back. and the site framework is better than ever. i'm gonna commit and push more often. the repo is private anyways. diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..c57de36 --- /dev/null +++ b/.gitignore @@ -0,0 +1,3 @@ +.clover +node_modules + diff --git a/deno.jsonc b/deno.jsonc new file mode 100644 index 0000000..34331cd --- /dev/null +++ b/deno.jsonc @@ -0,0 +1,10 @@ +{ + "lint": { + "exclude": ["framework/meta"], // OLD + "rules": { + "exclude": [ + "no-explicit-any" // TODO + ] + } + } +} diff --git a/framework/assets.ts b/framework/assets.ts new file mode 100644 index 0000000..5a925f2 --- /dev/null +++ b/framework/assets.ts @@ -0,0 +1,99 @@ +interface Loaded { + map: BuiltAssetMap; + buf: Buffer; +} +let assets: Loaded | null = null; + +export type StaticPageId = string; + +export async function reload() { + const [map, buf] = await Promise.all([ + fs.readFile(".clover/static.json", "utf8"), + fs.readFile(".clover/static.blob"), + ]); + assets = { + map: JSON.parse(map), + buf, + }; +} + +export async function reloadSync() { + const map = fs.readFileSync(".clover/static.json", "utf8"); + const buf = fs.readFileSync(".clover/static.blob"); + assets = { + map: JSON.parse(map), + buf, + }; +} + +export async function assetMiddleware(c: Context, next: Next) { + if (!assets) await reload(); + const asset = assets!.map[c.req.path]; + if (asset) { + return assetInner(c, asset, 200); + } + return next(); +} + +export async function serveAsset( + c: Context, + id: StaticPageId, + status: StatusCode, +) { + assets ?? await reload(); + return assetInner(c, assets!.map[id], status); +} + +export function hasAsset(id: string) { + if (!assets) reloadSync(); + return assets!.map[id] !== undefined; +} + +export function etagMatches(etag: string, ifNoneMatch: string) { + return ifNoneMatch === etag || ifNoneMatch.split(/,\s*/).indexOf(etag) > -1; +} + +function subarrayAsset([start, end]: View) { + return assets!.buf.subarray(start, end); +} + +function assetInner(c: Context, asset: BuiltAsset, status: StatusCode) { + const ifnonematch = c.req.header("If-None-Match"); + if (ifnonematch) { + const etag = asset.headers.ETag; + if (etagMatches(etag, ifnonematch)) { + c.res = new Response(null, { + status: 304, + statusText: "Not Modified", + headers: { + ETag: etag, + }, + }); + return; + } + } + const acceptEncoding = c.req.header("Accept-Encoding") ?? ""; + let body; + let headers = asset.headers; + if (acceptEncoding.includes("zstd") && asset.zstd) { + body = subarrayAsset(asset.zstd); + headers = { + ...asset.headers, + "Content-Encoding": "zstd", + }; + } else if (acceptEncoding.includes("gzip") && asset.gzip) { + body = subarrayAsset(asset.gzip); + headers = { + ...asset.headers, + "Content-Encoding": "gzip", + }; + } else { + body = subarrayAsset(asset.raw); + } + c.res = new Response(body, { headers, status }); +} + +import * as fs from "./fs.ts"; +import type { Context, Next } from "hono"; +import type { StatusCode } from "hono/utils/http-status"; +import type { BuiltAsset, BuiltAssetMap, View } from "./incremental.ts"; diff --git a/framework/bundle.ts b/framework/bundle.ts new file mode 100644 index 0000000..eef613f --- /dev/null +++ b/framework/bundle.ts @@ -0,0 +1,85 @@ +// This file implements client-side bundling, mostly wrapping esbuild. +import process from "node:process"; +const plugins: esbuild.Plugin[] = [ + // There are currently no plugins needed by 'paperclover.net' +]; + +export async function bundleClientJavaScript( + referencedScripts: string[], + extraPublicScripts: string[], + incr: Incremental, + dev: boolean = false, +) { + const entryPoints = [ + ...new Set([ + ...referencedScripts, + ...extraPublicScripts, + ]), + ]; + if (entryPoints.length === 0) return; + const invalidFiles = entryPoints + .filter((file) => !file.match(/\.client\.[tj]sx?/)); + if (invalidFiles.length > 0) { + const cwd = process.cwd(); + throw new Error( + "All client-side scripts should be named like '.client.ts'. Exceptions: " + + invalidFiles.map((x) => path.join(cwd, x)).join(","), + ); + } + + const bundle = await esbuild.build({ + bundle: true, + chunkNames: "/js/c.[hash]", + entryNames: "/js/[name]", + assetNames: "/asset/[hash]", + entryPoints, + format: "esm", + minify: !dev, + outdir: "/out!", + plugins, + splitting: true, + write: false, + }); + if (bundle.errors.length || bundle.warnings.length) { + throw new AggregateError( + bundle.errors.concat(bundle.warnings), + "JS bundle failed", + ); + } + incr.invalidate("bundle-script"); + const publicScriptRoutes = extraPublicScripts.map((file) => + path.basename(file).replace(/\.client\.[tj]sx?/, "") + ); + const promises: Promise[] = []; + // TODO: add a shared build hash to entrypoints, derived from all the chunk hashes. + for (const file of bundle.outputFiles) { + let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/"); + const text = file.text; + // Register non-chunks as script entries. + const chunk = route.startsWith("/js/c."); + if (!chunk) { + route = route.replace(".client.js", ".js"); + incr.put({ + srcId: "bundle-script", + type: "script", + key: route.slice("/js/".length, -".js".length), + value: text, + }); + } + if (chunk || publicScriptRoutes.includes(route)) { + promises.push(incr.putAsset({ + srcId: "bundle-script", + key: route, + body: text, + })); + } + } + + if (promises.length > 0) { + await Promise.all(promises); + } +} + +import * as path from "node:path"; +import * as esbuild from "esbuild"; +import { Incremental } from "./incremental.ts"; diff --git a/framework/css.ts b/framework/css.ts new file mode 100644 index 0000000..682f6c7 --- /dev/null +++ b/framework/css.ts @@ -0,0 +1,87 @@ +export interface Theme { + bg: string; + fg: string; + primary?: string; + h1?: string; +} + +export function stringifyTheme(theme: Theme) { + return [ + ":root {", + "--bg: " + theme.bg + ";", + "--fg: " + theme.fg + ";", + theme.primary ? "--primary: " + theme.primary + ";" : null, + "}", + theme.h1 ? "h1 { color: " + theme.h1 + "}" : null, + ].filter(Boolean).join("\n"); +} + +export function preprocess(css: string, theme: Theme): string { + const keys = Object.keys(theme); + const regex = new RegExp( + `([{};\\n][^{};\\n]*var\\(--(${keys.join("|")})\\).*?(?=[;{}\\n]))`, + "gs", + ); + const regex2 = new RegExp(`var\\(--(${keys.join("|")})\\)`); + return css.replace( + regex, + (_, line) => + line.replace(regex2, (_: string, varName: string) => theme[varName]) + + ";" + line.slice(1), + ); +} + +export async function bundleCssFiles( + cssImports: string[], + theme: Theme, + dev: boolean = false, +): Promise { + const plugin = { + name: "clover", + setup(b) { + b.onResolve( + { filter: /^\$input\$$/ }, + () => ({ path: ".", namespace: "input" }), + ); + b.onLoad( + { filter: /./, namespace: "input" }, + () => ({ + loader: "css", + contents: + cssImports.map((path) => `@import url(${JSON.stringify(path)});`) + .join("\n") + stringifyTheme(theme), + resolveDir: ".", + }), + ); + b.onLoad( + { filter: /\.css$/ }, + async ({ path: file }) => ({ + loader: "css", + contents: preprocess(await fs.readFile(file, "utf-8"), theme), + }), + ); + }, + } satisfies Plugin; + const build = await esbuild.build({ + bundle: true, + entryPoints: ["$input$"], + write: false, + external: ["*.woff2"], + target: ["ie11"], + plugins: [plugin], + minify: !dev, + }); + const { errors, warnings, outputFiles } = build; + if (errors.length > 0) { + throw new AggregateError(errors, "CSS Build Failed"); + } + if (warnings.length > 0) { + throw new AggregateError(warnings, "CSS Build Failed"); + } + if (outputFiles.length > 1) throw new Error("Too many output files"); + return outputFiles[0].text; +} + +import type { Plugin } from "esbuild"; +import * as esbuild from "esbuild"; +import * as fs from "./fs.ts"; diff --git a/framework/fs.ts b/framework/fs.ts new file mode 100644 index 0000000..251e7cc --- /dev/null +++ b/framework/fs.ts @@ -0,0 +1,10 @@ +// default +function isScope(node, parent) { + if ((0, _index.isBlockStatement)(node) && ((0, _index.isFunction)(parent) || (0, _index.isCatchClause)(parent))) { + return false; + } + if ((0, _index.isPattern)(node) && ((0, _index.isFunction)(parent) || (0, _index.isCatchClause)(parent))) { + return true; + } + return (0, _index.isScopable)(node); +} diff --git a/framework/hot.ts b/framework/hot.ts new file mode 100644 index 0000000..faa2e87 --- /dev/null +++ b/framework/hot.ts @@ -0,0 +1,240 @@ +// This implements the ability to use TS, TSX, and more plugins +// in Node.js. It cannot be built on the ES module loader, +// because there is no exposed way to replace modules when +// needed (see nodejs#49442). +// +// It also allows using a simple compile cache, which is used by +// the site generator to determine when code changes. +export const projectRoot = path.resolve(import.meta.dirname, "../"); +export const projectSrc = path.resolve(projectRoot, "src"); + +// Create a project-relative require. For convenience, it is generic-typed. +export const load = createRequire( + pathToFileURL(path.join(projectRoot, "run.js")).toString(), +) as { + (id: string): T; + extensions: NodeJS.Dict<(mod: NodeJS.Module, file: string) => unknown>; + cache: NodeJS.Dict; + resolve: (id: string, o?: { paths: string[] }) => string; +}; +export const { cache } = load; + +// Register extensions by overwriting `require.extensions` +const require = load; +const exts = require.extensions; +exts[".ts"] = loadEsbuild; +exts[".tsx"] = loadEsbuild; +exts[".jsx"] = loadEsbuild; +exts[".marko"] = loadMarko; +exts[".mdx"] = loadMdx; +exts[".css"] = loadCss; + +// Intercept all module load calls to track CSS imports + file times. +export interface FileStat { + cssImportsRecursive: string[] | null; + lastModified: number; + imports: string[]; + /* Used by 'incremental.ts' */ + srcIds: string[]; +} +let fsGraph = new Map(); +export function setFsGraph(g: Map) { + if (fsGraph.size > 0) { + throw new Error("Cannot restore fsGraph when it has been written into"); + } + fsGraph = g; +} +export function getFsGraph() { + return fsGraph; +} + +function shouldTrackPath(filename: string) { + return !filename.includes("node_modules") && + !filename.includes(import.meta.dirname); +} + +const Module = load("node:module"); +const ModulePrototypeUnderscoreCompile = Module.prototype._compile; +Module.prototype._compile = function ( + content: string, + filename: string, + format: "module" | "commonjs", +) { + fs.writeMkdirSync( + ".clover/debug-transpilation/" + + path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll( + "../", + "_/", + ).replaceAll("/", "."), + content, + ); + const result = ModulePrototypeUnderscoreCompile.call( + this, + content, + filename, + format, + ); + const stat = fs.statSync(filename); + if (shouldTrackPath(filename)) { + const cssImportsMaybe: string[] = []; + const imports: string[] = []; + for (const { filename: file } of this.children) { + const relative = path.relative(projectRoot, file); + if (file.endsWith(".css")) cssImportsMaybe.push(relative); + else { + const child = fsGraph.get(relative); + if (!child) continue; + const { cssImportsRecursive } = child; + if (cssImportsRecursive) cssImportsMaybe.push(...cssImportsRecursive); + imports.push(relative); + } + } + const relative = path.relative(projectRoot, filename); + fsGraph.set(relative, { + cssImportsRecursive: cssImportsMaybe.length > 0 + ? Array.from(new Set(cssImportsMaybe)) + : null, + imports, + lastModified: stat.mtimeMs, + srcIds: [], + }); + } + return result; +}; + +// Implement @/ prefix +const ModuleUnderscoreResolveFilename = Module._resolveFilename; +Module._resolveFilename = (...args) => { + if (args[0].startsWith("@/")) { + const replacedPath = "." + args[0].slice(1); + try { + return require.resolve(replacedPath, { paths: [projectSrc] }); + } catch (err: any) { + if (err.code === "MODULE_NOT_FOUND" && err.requireStack.length <= 1) { + err.message.replace(replacedPath, args[0]); + } + } + } + return ModuleUnderscoreResolveFilename(...args); +}; + +function loadEsbuild(module: NodeJS.Module, filepath: string) { + let src = fs.readFileSync(filepath, "utf8"); + return loadEsbuildCode(module, filepath, src); +} + +function loadEsbuildCode(module: NodeJS.Module, filepath: string, src: string) { + if (filepath === import.meta.filename) { + module.exports = self; + return; + } + + let loader: any = "tsx"; + if (filepath.endsWith(".ts")) loader = "ts"; + else if (filepath.endsWith(".jsx")) loader = "jsx"; + else if (filepath.endsWith(".js")) loader = "js"; + if (src.includes("import.meta")) { + src = ` + import.meta.url = ${JSON.stringify(pathToFileURL(filepath).toString())}; + import.meta.dirname = ${JSON.stringify(path.dirname(filepath))}; + import.meta.filename = ${JSON.stringify(filepath)}; + ` + src; + } + src = esbuild.transformSync(src, { + loader, + format: "cjs", + target: "esnext", + jsx: "automatic", + jsxImportSource: "#ssr", + }).code; + return module._compile(src, filepath, "commonjs"); +} + +function loadMarko(module: NodeJS.Module, filepath: string) { + let src = fs.readFileSync(filepath, "utf8"); + // A non-standard thing here is Clover Sitegen implements + // its own client side scripting stuff, so it overrides + // bare client import statements to it's own usage. + if (src.match(/^\s*client\s+import\s+["']/m)) { + src = src.replace( + /^\s*client\s+import\s+("[^"]+|'[^']+)[^\n]+/m, + "", + ) + '\nimport { Script as CloverScriptInclude } from "#sitegen";'; + } + + src = marko.compileSync(filepath, {}).code; + src = src.replace("marko/debug/html", "#ssr/marko"); + return loadEsbuildCode(module, filepath, src); +} + +function loadMdx(module: NodeJS.Module, filepath: string) { + const input = fs.readFileSync(filepath); + const out = mdx.compileSync(input, { jsxImportSource: "#ssr" }).value; + const src = typeof out === "string" ? out : Buffer.from(out).toString("utf8"); + return loadEsbuildCode(module, filepath, src); +} + +function loadCss(module: NodeJS.Module, filepath: string) { + module.exports = {}; +} + +export function reloadRecursive(filepath: string) { + filepath = path.resolve(filepath); + const existing = cache[filepath]; + if (existing) deleteRecursive(filepath, existing); + fsGraph.clear(); + return require(filepath); +} + +function deleteRecursive(id: string, module: any) { + if (id.includes(path.sep + "node_modules" + path.sep)) { + return; + } + delete cache[id]; + for (const child of module.children) { + if (child.filename.includes("/engine/")) return; + const existing = cache[child.filename]; + if (existing === child) deleteRecursive(child.filename, existing); + } +} + +export function getCssImports(filepath: string) { + filepath = path.resolve(filepath); + if (!require.cache[filepath]) throw new Error(filepath + " was never loaded"); + return fsGraph.get(path.relative(projectRoot, filepath)) + ?.cssImportsRecursive ?? []; +} + +export function resolveFrom(src: string, dest: string) { + try { + return createRequire(src).resolve(dest); + } catch (err: any) { + if (err.code === "MODULE_NOT_FOUND" && err.requireStack.length <= 1) { + err.message = err.message.split("\n")[0] + " from '" + src + "'"; + } + throw err; + } +} + +declare global { + namespace NodeJS { + interface Module { + _compile( + this: NodeJS.Module, + content: string, + filepath: string, + format: "module" | "commonjs", + ): unknown; + } + } +} + +import * as fs from "./fs.ts"; +import * as path from "node:path"; +import { pathToFileURL } from "node:url"; +import * as esbuild from "esbuild"; +import * as marko from "@marko/compiler"; +import { createRequire } from "node:module"; +import * as mdx from "@mdx-js/mdx"; +import * as self from "./hot.ts"; +import { Buffer } from "node:buffer"; diff --git a/framework/incremental.ts b/framework/incremental.ts new file mode 100644 index 0000000..12bbc3e --- /dev/null +++ b/framework/incremental.ts @@ -0,0 +1,265 @@ +// `Incremental` contains multiple maps for the different parts of a site +// build, and tracks reused items across builds. It also handles emitting and +// updating the built site. This structure is self contained and serializable. +// +// Tracking is simple: Files map to one or more 'source IDs', which map to one +// or more 'artifact'. This two layer approach allows many files (say a page + +// all its imports) to map to the build of a page, which produces an HTML file +// plus a list of scripts. + +import { Buffer } from "node:buffer"; +interface ArtifactMap { + asset: Asset; + script: string; +} +type AllArtifactMaps = { + [K in keyof ArtifactMap]: Map; +}; +type ArtifactType = keyof ArtifactMap; + +interface Asset { + buffer: Buffer; + headers: Record; + hash: string; +} + +export interface PutBase { + srcTag?: string; // deprecated + srcId: string; + key: string; +} + +export interface Put extends PutBase { + type: T; + value: ArtifactMap[T]; +} + +export interface Output { + type: ArtifactType; + key: string; +} + +const gzip = util.promisify(zlib.gzip); +const zstd = util.promisify(zlib.zstdCompress); + +export class Incremental { + /** The generated artifacts */ + out: AllArtifactMaps = { + asset: new Map(), + script: new Map(), + }; + /** Compressed resources */ + compress = new Map(); + compressQueue = new Queue({ + name: "Compress", + maxJobs: 5, + fn: this.compressImpl.bind(this), + passive: true, + getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`, + }); + /** Tracking filesystem entries to `srcId` */ + files = new Map(); + srcIds = new Map(); + + static fromSerialized() { + } + serialize() { + const writer = new BufferWriter(); + + const asset = Array.from( + this.out.asset, + ([key, { buffer, hash, headers }]) => { + const raw = writer.write(buffer, hash); + const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; + const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null; + const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null; + return [key, { + raw, + gzip, + zstd, + hash, + headers, + }]; + }, + ); + const script = Array.from(this.out.script); + + const meta = Buffer.from( + JSON.stringify({ + asset, + script, + }), + "utf-8", + ); + + const lengthBuffer = Buffer.alloc(4); + lengthBuffer.writeUInt32LE(meta.byteLength, 0); + + return Buffer.concat([meta, lengthBuffer, ...writer.buffers]); + } + + serializeToDisk(file = ".clover/incr.state") { + const buffer = this.serialize(); + fs.writeFileSync(file, buffer); + } + + put({ + srcId, + type, + key, + value, + }: Put) { + this.out[type].set(key, value); + } + + async putAsset(info: PutAsset) { + const { body, headers, key } = info; + const buffer = Buffer.isBuffer(body) ? body : Buffer.from(body); + const hash = Buffer.from(await crypto.subtle.digest("sha-1", buffer)) + .toString("hex"); + const value: Asset = { + buffer, + headers: { + "Content-Type": headers?.["Content-Type"] ?? mime.contentTypeFor(key), + "ETag": JSON.stringify(hash), + ...headers, + }, + hash, + }; + if (!this.compress.has(hash)) { + const label = info.key; + this.compress.set(hash, { + zstd: undefined, + gzip: undefined, + }); + await Promise.all([ + this.compressQueue.add({ label, buffer, algo: "zstd", hash }), + this.compressQueue.add({ label, buffer, algo: "gzip", hash }), + ]); + } + return this.put({ ...info, type: "asset", value }); + } + + async compressImpl({ algo, buffer, hash }: CompressJob) { + let out; + switch (algo) { + case "zstd": + out = await zstd(buffer); + break; + case "gzip": + out = await gzip(buffer, { level: 9 }); + break; + } + let entry = this.compress.get(hash); + if (!entry) { + this.compress.set( + hash, + entry = { + zstd: undefined, + gzip: undefined, + }, + ); + } + entry![algo] = out; + } + + invalidate(srcId: string) { + } + + async wait() { + await this.compressQueue.done({ method: "stop" }); + } + async flush() { + const writer = new BufferWriter(); + const asset = Object.fromEntries( + Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => { + const raw = writer.write(buffer, hash); + const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {}; + const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null; + const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null; + return [key, { + raw, + gzip, + zstd, + headers, + }]; + }), + ); + await Promise.all([ + fs.writeFile(".clover/static.json", JSON.stringify(asset)), + fs.writeFile(".clover/static.blob", writer.get()), + ]); + } +} + +export interface PutAsset extends PutBase { + body: string | Buffer; + headers?: Record; +} + +export interface Compressed { + gzip?: Buffer; + zstd?: Buffer; +} + +export interface CompressJob { + algo: "zstd" | "gzip"; + buffer: Buffer; + label: string; + hash: string; +} + +class BufferWriter { + size = 0; + seen = new Map(); + buffers: Buffer[] = []; + + write(buffer: Buffer, hash: string): View { + let view = this.seen.get(hash); + if (view) return view; + view = [this.size, this.size += buffer.byteLength]; + this.seen.set(hash, view); + this.buffers.push(buffer); + return view; + } + + get() { + return Buffer.concat(this.buffers); + } +} + +export type View = [start: number, end: number]; + +// Alongside this type is a byte buffer, containing all the assets. +export interface BuiltAssetMap { + [route: string]: BuiltAsset; +} +export interface BuiltAsset { + raw: View; + gzip: View; + zstd: View; + headers: Record; +} + +export interface SerializedMeta { + asset: Array<[route: string, data: { + raw: View; + gzip: View | null; + zstd: View | null; + hash: string; + headers: Record; + }]>; + script: [key: string, value: string][]; +} + +function never(): never { + throw new Error("Impossible"); +} + +import * as path from "node:path"; +import * as fs from "./fs.ts"; +import * as zlib from "node:zlib"; +import * as util from "node:util"; +import { Queue } from "./queue.ts"; +import * as hot from "./hot.ts"; +import * as mime from "./mime.ts"; diff --git a/framework/meta/index.ts b/framework/meta/index.ts new file mode 100644 index 0000000..ffe86b7 --- /dev/null +++ b/framework/meta/index.ts @@ -0,0 +1,13 @@ +import { resolveMetadata } from "./merge"; +import { renderMetadata } from "./render"; +import { Metadata } from "./types"; + +export * from "./types"; +export * from "./merge"; +export * from "./render"; + +export function resolveAndRenderMetadata( + ...metadata: [Metadata, ...Metadata[]] +) { + return renderMetadata(resolveMetadata(...metadata)); +} diff --git a/framework/meta/merge.ts b/framework/meta/merge.ts new file mode 100644 index 0000000..0cbfc75 --- /dev/null +++ b/framework/meta/merge.ts @@ -0,0 +1,154 @@ +import { createDefaultMetadata } from "./nextjs/default-metadata"; +import { resolveAsArrayOrUndefined } from "./nextjs/generate/utils"; +import { + resolveAlternates, + resolveAppleWebApp, + resolveAppLinks, + resolveRobots, + resolveThemeColor, + resolveVerification, + resolveViewport, +} from "./nextjs/resolvers/resolve-basics"; +import { resolveIcons } from "./nextjs/resolvers/resolve-icons"; +import { + resolveOpenGraph, + resolveTwitter, +} from "./nextjs/resolvers/resolve-opengraph"; +import { resolveTitle } from "./nextjs/resolvers/resolve-title"; +import type { + Metadata, + ResolvedMetadata, +} from "./nextjs/types/metadata-interface"; + +type MetadataAccumulationOptions = { + pathname: string; +}; + +// Merge the source metadata into the resolved target metadata. +function merge( + target: ResolvedMetadata, + source: Metadata | null, + titleTemplates: { + title?: string | null; + twitter?: string | null; + openGraph?: string | null; + } = {}, +) { + const metadataBase = source?.metadataBase || target.metadataBase; + for (const key_ in source) { + const key = key_ as keyof Metadata; + + switch (key) { + case "title": { + target.title = resolveTitle(source.title, titleTemplates.title); + break; + } + case "alternates": { + target.alternates = resolveAlternates(source.alternates, metadataBase, { + pathname: (source as any)._pathname ?? "/", + }); + break; + } + case "openGraph": { + target.openGraph = resolveOpenGraph(source.openGraph, metadataBase); + if (target.openGraph) { + target.openGraph.title = resolveTitle( + target.openGraph.title, + titleTemplates.openGraph, + ); + } + break; + } + case "twitter": { + target.twitter = resolveTwitter(source.twitter, metadataBase); + if (target.twitter) { + target.twitter.title = resolveTitle( + target.twitter.title, + titleTemplates.twitter, + ); + } + break; + } + case "verification": + target.verification = resolveVerification(source.verification); + break; + case "viewport": { + target.viewport = resolveViewport(source.viewport); + break; + } + case "icons": { + target.icons = resolveIcons(source.icons); + break; + } + case "appleWebApp": + target.appleWebApp = resolveAppleWebApp(source.appleWebApp); + break; + case "appLinks": + target.appLinks = resolveAppLinks(source.appLinks); + break; + case "robots": { + target.robots = resolveRobots(source.robots); + break; + } + case "themeColor": { + target.themeColor = resolveThemeColor(source.themeColor); + break; + } + case "archives": + case "assets": + case "bookmarks": + case "keywords": + case "authors": { + // FIXME: type inferring + // @ts-ignore + target[key] = resolveAsArrayOrUndefined(source[key]) || null; + break; + } + // directly assign fields that fallback to null + case "applicationName": + case "description": + case "generator": + case "creator": + case "publisher": + case "category": + case "classification": + case "referrer": + case "colorScheme": + case "itunes": + case "formatDetection": + case "manifest": + // @ts-ignore TODO: support inferring + target[key] = source[key] || null; + break; + case "other": + target.other = Object.assign({}, target.other, source.other); + break; + case "metadataBase": + target.metadataBase = metadataBase; + break; + default: + break; + } + } + + return target; +} + +export interface MetadataWithPathname extends Metadata { + /** Set by framework author to the pathname of the page defining this metadata. */ + _pathname?: string; +} + +export function resolveMetadata( + ...metadata: [MetadataWithPathname, ...MetadataWithPathname[]] +) { + const base = createDefaultMetadata(); + for (const item of metadata) { + merge(base, item, { + title: base.title?.template, + twitter: base.twitter?.title?.template, + openGraph: base.openGraph?.title?.template, + }); + } + return base; +} diff --git a/framework/meta/nextjs/constants.ts b/framework/meta/nextjs/constants.ts new file mode 100644 index 0000000..5f9b883 --- /dev/null +++ b/framework/meta/nextjs/constants.ts @@ -0,0 +1,15 @@ +import type { Viewport } from "./types/extra-types"; +import type { Icons } from "./types/metadata-types"; + +export const ViewPortKeys: { [k in keyof Viewport]: string } = { + width: "width", + height: "height", + initialScale: "initial-scale", + minimumScale: "minimum-scale", + maximumScale: "maximum-scale", + viewportFit: "viewport-fit", + userScalable: "user-scalable", + interactiveWidget: "interactive-widget", +} as const; + +export const IconKeys: (keyof Icons)[] = ["icon", "shortcut", "apple", "other"]; diff --git a/framework/meta/nextjs/default-metadata.ts b/framework/meta/nextjs/default-metadata.ts new file mode 100644 index 0000000..9b5a8ae --- /dev/null +++ b/framework/meta/nextjs/default-metadata.ts @@ -0,0 +1,50 @@ +import type { ResolvedMetadata } from "./types/metadata-interface"; +import process from "node:process"; + +export function createDefaultMetadata(): ResolvedMetadata { + const defaultMetadataBase = + process.env.NODE_ENV === "production" && process.env.VERCEL_URL + ? new URL(`https://${process.env.VERCEL_URL}`) + : null; + + return { + viewport: "width=device-width, initial-scale=1", + metadataBase: defaultMetadataBase, + + // Other values are all null + title: null, + description: null, + applicationName: null, + authors: null, + generator: null, + keywords: null, + referrer: null, + themeColor: null, + colorScheme: null, + creator: null, + publisher: null, + robots: null, + manifest: null, + alternates: { + canonical: null, + languages: null, + media: null, + types: null, + }, + icons: null, + openGraph: null, + twitter: null, + verification: {}, + appleWebApp: null, + formatDetection: null, + itunes: null, + abstract: null, + appLinks: null, + archives: null, + assets: null, + bookmarks: null, + category: null, + classification: null, + other: {}, + }; +} diff --git a/framework/meta/nextjs/generate/alternate.tsx b/framework/meta/nextjs/generate/alternate.tsx new file mode 100644 index 0000000..26f694d --- /dev/null +++ b/framework/meta/nextjs/generate/alternate.tsx @@ -0,0 +1,72 @@ +import type { ResolvedMetadata } from "../types/metadata-interface"; + +import React from "react"; +import { AlternateLinkDescriptor } from "../types/alternative-urls-types"; + +function AlternateLink({ + descriptor, + ...props +}: { + descriptor: AlternateLinkDescriptor; +} & React.LinkHTMLAttributes) { + if (!descriptor.url) return null; + return ( + + ); +} + +export function AlternatesMetadata({ + alternates, +}: { + alternates: ResolvedMetadata["alternates"]; +}) { + if (!alternates) return null; + const { canonical, languages, media, types } = alternates; + return ( + <> + {canonical + ? + : null} + {languages + ? Object.entries(languages).map(([locale, descriptors]) => { + return descriptors?.map((descriptor, index) => ( + + )); + }) + : null} + {media + ? Object.entries(media).map(([mediaName, descriptors]) => + descriptors?.map((descriptor, index) => ( + + )) + ) + : null} + {types + ? Object.entries(types).map(([type, descriptors]) => + descriptors?.map((descriptor, index) => ( + + )) + ) + : null} + + ); +} diff --git a/framework/meta/nextjs/generate/basic.tsx b/framework/meta/nextjs/generate/basic.tsx new file mode 100644 index 0000000..9585d1b --- /dev/null +++ b/framework/meta/nextjs/generate/basic.tsx @@ -0,0 +1,171 @@ +import type { ResolvedMetadata } from "../types/metadata-interface"; + +import React from "react"; +import { Meta, MultiMeta } from "./meta"; + +export function BasicMetadata({ metadata }: { metadata: ResolvedMetadata }) { + return ( + <> + + {metadata.title !== null && metadata.title.absolute + ? {metadata.title.absolute} + : null} + + + {metadata.authors + ? metadata.authors.map((author, index) => ( + + {author.url && } + + + )) + : null} + {metadata.manifest + ? + : null} + + + + {metadata.themeColor + ? metadata.themeColor.map((themeColor, index) => ( + + )) + : null} + + + + + + + + {metadata.archives + ? metadata.archives.map((archive) => ( + + )) + : null} + {metadata.assets + ? metadata.assets.map((asset) => ( + + )) + : null} + {metadata.bookmarks + ? metadata.bookmarks.map((bookmark) => ( + + )) + : null} + + + {metadata.other + ? Object.entries(metadata.other).map(([name, content]) => ( + + )) + : null} + + ); +} + +export function ItunesMeta({ itunes }: { itunes: ResolvedMetadata["itunes"] }) { + if (!itunes) return null; + const { appId, appArgument } = itunes; + let content = `app-id=${appId}`; + if (appArgument) { + content += `, app-argument=${appArgument}`; + } + return ; +} + +const formatDetectionKeys = [ + "telephone", + "date", + "address", + "email", + "url", +] as const; +export function FormatDetectionMeta({ + formatDetection, +}: { + formatDetection: ResolvedMetadata["formatDetection"]; +}) { + if (!formatDetection) return null; + let content = ""; + for (const key of formatDetectionKeys) { + if (key in formatDetection) { + if (content) content += ", "; + content += `${key}=no`; + } + } + return ; +} + +export function AppleWebAppMeta({ + appleWebApp, +}: { + appleWebApp: ResolvedMetadata["appleWebApp"]; +}) { + if (!appleWebApp) return null; + const { capable, title, startupImage, statusBarStyle } = appleWebApp; + + return ( + <> + {capable + ? + : null} + + {startupImage + ? startupImage.map((image, index) => ( + + )) + : null} + {statusBarStyle + ? ( + + ) + : null} + + ); +} + +export function VerificationMeta({ + verification, +}: { + verification: ResolvedMetadata["verification"]; +}) { + if (!verification) return null; + + return ( + <> + + + + + {verification.other + ? Object.entries(verification.other).map(([key, value], index) => ( + + )) + : null} + + ); +} diff --git a/framework/meta/nextjs/generate/icons.tsx b/framework/meta/nextjs/generate/icons.tsx new file mode 100644 index 0000000..af54eb1 --- /dev/null +++ b/framework/meta/nextjs/generate/icons.tsx @@ -0,0 +1,62 @@ +import type { ResolvedMetadata } from "../types/metadata-interface"; +import type { Icon, IconDescriptor } from "../types/metadata-types"; + +import React from "react"; + +function IconDescriptorLink({ icon }: { icon: IconDescriptor }) { + const { url, rel = "icon", ...props } = icon; + + return ; +} + +function IconLink({ rel, icon }: { rel?: string; icon: Icon }) { + if (typeof icon === "object" && !(icon instanceof URL)) { + if (rel) icon.rel = rel; + return ; + } else { + const href = icon.toString(); + return ; + } +} + +export function IconsMetadata({ icons }: { icons: ResolvedMetadata["icons"] }) { + if (!icons) return null; + + const shortcutList = icons.shortcut; + const iconList = icons.icon; + const appleList = icons.apple; + const otherList = icons.other; + + return ( + <> + {shortcutList + ? shortcutList.map((icon, index) => ( + + )) + : null} + {iconList + ? iconList.map((icon, index) => ( +