add "source of truth" server to replace the old cache url

This commit is contained in:
chloe caruso 2025-07-08 23:10:41 -07:00
parent 7ef08faaec
commit caf4da47e0
10 changed files with 140 additions and 1049 deletions

View file

@ -8,20 +8,13 @@
// to track needed client scripts without introducing patches to the engine.
export type Addons = Record<string | symbol, unknown>;
export function ssrSync(node: Node): Result;
export function ssrSync<A extends Addons>(node: Node, addon: A): Result<A>;
export function ssrSync(node: Node, addon: Addons = {}) {
export function ssrSync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(false, addon);
const resolved = resolveNode(r, node);
return { text: renderNode(resolved), addon };
}
export function ssrAsync(node: Node): Promise<Result>;
export function ssrAsync<A extends Addons>(
node: Node,
addon: A,
): Promise<Result<A>>;
export function ssrAsync(node: Node, addon: Addons = {}) {
export function ssrAsync<A extends Addons>(node: Node, addon: A = {} as A) {
const r = initRender(true, addon);
const resolved = resolveNode(r, node);
if (r.async === 0) {
@ -142,13 +135,13 @@ export function resolveNode(r: Render, node: unknown): ResolvedNode {
return resolveNode(r, tag(props));
} catch (e) {
const { 4: src } = node;
if (e && typeof e === "object") {
}
if (e && typeof e === "object") (e as { src?: string }).src = src;
throw e;
} finally {
currentRender = null;
}
}
if (typeof tag !== "string") throw new Error("Unexpected " + typeof type);
if (typeof tag !== "string") throw new Error("Unexpected " + inspect(type));
const children = props?.children;
if (children) return [kElement, tag, props, resolveNode(r, children)];
return node;

View file

@ -28,6 +28,10 @@ export function getRender() {
});
}
export function inRender() {
return "sitegen" in ssr.getCurrentRender();
}
/** Add a client-side script to the page. */
export function addScript(id: ScriptId | { value: ScriptId }) {
getRender().scripts.add(typeof id === "string" ? id : id.value);

18
run.js
View file

@ -2,6 +2,7 @@
// all available plugins. Usage: "node run <script>"
import * as util from "node:util";
import * as zlib from "node:zlib";
import * as url from "node:url";
import process from "node:process";
if (!zlib.zstdCompress) {
@ -62,7 +63,22 @@ if (process.argv[1].startsWith(import.meta.filename.slice(0, -".js".length))) {
}
process.argv = [process.argv[0], ...process.argv.slice(2)];
try {
await hot.load(found).main?.();
const mod = await hot.load(found);
if (mod.main) mod.main();
else if (mod.default?.fetch) {
const protocol = "http";
const { serve } = hot.load("@hono/node-server");
serve({
fetch: mod.default.fetch,
}, ({ address, port }) => {
if (address === "::") address = "::1";
console.info(url.format({
protocol,
hostname: address,
port,
}));
});
}
} catch (e) {
console.error(util.inspect(e));
}

View file

@ -17,16 +17,6 @@ app.use(assets.middleware);
// Handlers
app.notFound(assets.notFound);
if (process.argv.includes("--development")) {
app.onError((err, c) => {
if (err instanceof HTTPException) {
// Get the custom response
return err.getResponse();
}
return c.text(util.inspect(err), 500);
});
}
export default app;

View file

@ -43,13 +43,12 @@ app.post("/file/cotyledon", async (c) => {
app.get("/file/*", async (c, next) => {
const ua = c.req.header("User-Agent")?.toLowerCase() ?? "";
const lofi = ua.includes("msie") || ua.includes("rv:") || false;
const lofi = ua.includes("msie") || false;
// Discord ignores 'robots.txt' which violates the license agreement.
if (ua.includes("discordbot")) {
return next();
}
console.log(ua, lofi);
let rawFilePath = c.req.path.slice(5) || "/";
if (rawFilePath.endsWith("$partial")) {

View file

@ -1,8 +0,0 @@
export function main() {
const meows = MediaFile.db.prepare(`
select * from media_files;
`).as(MediaFile).array();
console.log(meows);
}
import { MediaFile } from "@/file-viewer/models/MediaFile.ts";

File diff suppressed because it is too large Load diff

View file

@ -71,6 +71,7 @@ export default function MediaList({
hasCotyledonCookie: boolean;
}) {
addScript("./clofi.client.ts");
addScript("@/tags/hls-polyfill.client.ts");
const dirs: MediaFile[] = [];
let dir: MediaFile | null = file;
@ -699,7 +700,6 @@ function CodeView({ file }: { file: MediaFile }) {
}
function VideoView({ file }: { file: MediaFile }) {
addScript("@/tags/hls-polyfill.client.ts");
const dimensions = file.parseDimensions() ?? { width: 1920, height: 1080 };
return (
<>

View file

@ -14,5 +14,10 @@ export const siteSections: Section[] = [
// { root: join("fiction/"), pageBase: "/fiction" },
];
export const backends: string[] = [
join("backend.ts"),
join("source-of-truth.ts"),
];
import * as path from "node:path";
import type { Section } from "#sitegen";

107
src/source-of-truth.ts Normal file
View file

@ -0,0 +1,107 @@
// The "source of truth" server is the canonical storage for
// paper clover's files. This is technically needed because
// the VPS she uses can only store about 20gb of content, where
// the contents of /file is about 48gb as of writing; only a
// limited amount of data can be cached.
//
// What's great about this system is it also allows scaling the
// website up into multiple servers, if that is ever desired.
// When that happens, mutations to "q+a" will be moved here,
// and the SQLite synchronization mechanism will apply to both
// of those databases.
//
// An alternative to this would have been to use a SMB client,
// but I've read that the systems used for caching don't work
// like the HTTP Cache-Control header, where you can say a file
// is valid for up to a certain amount of time. If we seriously
// need cache busts (paper clover does not), the proper way
// would be to push a message to all VPS nodes instead of
// checking upstream if a file changed every time.
const app = new Hono();
export default app;
const token = "bwaa";
const nasRoot = "/Volumes/clover";
const rawFileRoot = path.join(nasRoot, "Published");
const derivedFileRoot = path.join(
nasRoot,
"Documents/Config/clover_file/derived",
);
type Awaitable<T> = T | Promise<T>;
// Re-use file descriptors if the same file is being read twice.
const fds = new Map<string, Awaitable<{ fd: number; refs: number }>>();
app.get("/file/*", async (c) => {
const fullQuery = c.req.path.slice("/file".length);
const [filePath, derivedAsset, ...invalid] = fullQuery.split("$/");
if (invalid.length > 0) return c.notFound();
if (filePath.length <= 1) return c.notFound();
const permissions = FilePermissions.getByPrefix(filePath);
if (permissions !== 0) {
if (c.req.header("Authorization") !== token) {
return c.json({ error: "invalid authorization header" });
}
}
const file = MediaFile.getByPath(filePath);
if (!file || file.kind === MediaFileKind.directory) {
return c.notFound();
}
const fullPath = derivedAsset
? path.join(derivedFileRoot, file.hash, derivedAsset)
: path.join(rawFileRoot, file.path);
let handle: { fd: number; refs: number } | null = null;
console.log("start", fullPath);
try {
handle = await fds.get(fullPath) ?? null;
if (!handle) {
const promise = openFile(fullPath, "r")
.then((fd) => ({ fd, refs: 0 }))
.catch((err) => {
fds.delete(fullPath);
throw err;
});
fds.set(file.path, promise);
fds.set(file.path, handle = await promise);
}
handle.refs += 1;
} catch (err: any) {
if (err.code === "ENOENT") {
return c.notFound();
}
throw err;
}
const nodeStream = fs.createReadStream(fullPath, {
fd: handle.fd,
fs: {
close: util.callbackify(async () => {
ASSERT(handle);
if ((handle.refs -= 1) <= 0) {
fds.delete(fullPath);
await closeFile(handle.fd);
}
handle = null;
}),
read: fsCallbacks.read,
},
});
c.header("Content-Type", mime.contentTypeFor(fullPath));
if (!derivedAsset)
c.header("Content-Length", file.size.toString());
return c.body(stream.Readable.toWeb(nodeStream) as ReadableStream);
});
const openFile = util.promisify(fsCallbacks.open);
const closeFile = util.promisify(fsCallbacks.close);
import { Hono } from "#hono";
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
import { FilePermissions } from "@/file-viewer/models/FilePermissions.ts";
import * as path from "node:path";
import * as fs from "#sitegen/fs";
import * as fsCallbacks from "node:fs";
import * as util from "node:util";
import * as stream from "node:stream";
import * as mime from "#sitegen/mime";