stuff for file view
This commit is contained in:
parent
71a072b0be
commit
4f89374ee0
19 changed files with 1087 additions and 241 deletions
|
@ -250,7 +250,7 @@ export async function finalizeServerJavaScript(
|
||||||
// Replace the magic word
|
// Replace the magic word
|
||||||
let text = files[fileWithMagicWord].toString("utf-8");
|
let text = files[fileWithMagicWord].toString("utf-8");
|
||||||
text = text.replace(
|
text = text.replace(
|
||||||
new RegExp(magicWord + "\\[(-?\\d)\\]", "gs"),
|
new RegExp(magicWord + "\\[(-?\\d+)\\]", "gs"),
|
||||||
(_, i) => {
|
(_, i) => {
|
||||||
i = Number(i);
|
i = Number(i);
|
||||||
// Inline the styling data
|
// Inline the styling data
|
||||||
|
|
|
@ -34,6 +34,12 @@ export class Queue<T, R> {
|
||||||
this.#passive = options.passive ?? false;
|
this.#passive = options.passive ?? false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cancel() {
|
||||||
|
const bar = this.#cachedProgress;
|
||||||
|
bar?.stop();
|
||||||
|
this.#queue = [];
|
||||||
|
}
|
||||||
|
|
||||||
get bar() {
|
get bar() {
|
||||||
const cached = this.#cachedProgress;
|
const cached = this.#cachedProgress;
|
||||||
if (!cached) {
|
if (!cached) {
|
||||||
|
@ -65,7 +71,7 @@ export class Queue<T, R> {
|
||||||
return cached;
|
return cached;
|
||||||
}
|
}
|
||||||
|
|
||||||
add(args: T) {
|
addReturn(args: T) {
|
||||||
this.#total += 1;
|
this.#total += 1;
|
||||||
this.updateTotal();
|
this.updateTotal();
|
||||||
if (this.#active.length > this.#maxJobs) {
|
if (this.#active.length > this.#maxJobs) {
|
||||||
|
@ -76,6 +82,10 @@ export class Queue<T, R> {
|
||||||
return this.#run(args);
|
return this.#run(args);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
add(args: T) {
|
||||||
|
return this.addReturn(args).then(() => {}, () => {});
|
||||||
|
}
|
||||||
|
|
||||||
addMany(items: T[]) {
|
addMany(items: T[]) {
|
||||||
this.#total += items.length;
|
this.#total += items.length;
|
||||||
this.updateTotal();
|
this.updateTotal();
|
||||||
|
@ -92,10 +102,12 @@ export class Queue<T, R> {
|
||||||
const itemText = this.#getItemText(args);
|
const itemText = this.#getItemText(args);
|
||||||
const spinner = new Spinner(itemText);
|
const spinner = new Spinner(itemText);
|
||||||
spinner.stop();
|
spinner.stop();
|
||||||
|
(spinner as any).redraw = () => (bar as any).redraw();
|
||||||
const active = this.#active;
|
const active = this.#active;
|
||||||
try {
|
try {
|
||||||
active.unshift(spinner);
|
active.unshift(spinner);
|
||||||
bar.props = { active };
|
bar.props = { active };
|
||||||
|
console.log(this.#name + ": " + itemText);
|
||||||
const result = await this.#fn(args, spinner);
|
const result = await this.#fn(args, spinner);
|
||||||
this.#done++;
|
this.#done++;
|
||||||
return result;
|
return result;
|
||||||
|
@ -139,7 +151,7 @@ export class Queue<T, R> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async done(o: { method: "success" | "stop" }) {
|
async done(o?: { method: "success" | "stop" }) {
|
||||||
if (this.#active.length === 0) {
|
if (this.#active.length === 0) {
|
||||||
this.#end(o);
|
this.#end(o);
|
||||||
return;
|
return;
|
||||||
|
@ -153,8 +165,8 @@ export class Queue<T, R> {
|
||||||
|
|
||||||
#end(
|
#end(
|
||||||
{ method = this.#passive ? "stop" : "success" }: {
|
{ method = this.#passive ? "stop" : "success" }: {
|
||||||
method: "success" | "stop";
|
method?: "success" | "stop";
|
||||||
},
|
} = {},
|
||||||
) {
|
) {
|
||||||
const bar = this.#cachedProgress;
|
const bar = this.#cachedProgress;
|
||||||
if (this.#errors.length > 0) {
|
if (this.#errors.length > 0) {
|
||||||
|
@ -171,6 +183,12 @@ export class Queue<T, R> {
|
||||||
get active(): boolean {
|
get active(): boolean {
|
||||||
return this.#active.length !== 0;
|
return this.#active.length !== 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
[Symbol.dispose]() {
|
||||||
|
if (this.active) {
|
||||||
|
this.cancel();
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const cwd = process.cwd();
|
const cwd = process.cwd();
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
|
// File System APIs. Some custom APIs, but mostly a re-export a mix of built-in
|
||||||
// Node.js sync+promise fs methods. For convenince.
|
// Node.js sync+promise fs methods. For convenince.
|
||||||
export {
|
export {
|
||||||
|
createReadStream,
|
||||||
|
createWriteStream,
|
||||||
existsSync,
|
existsSync,
|
||||||
|
open,
|
||||||
readdir,
|
readdir,
|
||||||
readdirSync,
|
readdirSync,
|
||||||
readFile,
|
readFile,
|
||||||
|
@ -57,6 +60,8 @@ export function readJsonSync<T>(file: string) {
|
||||||
|
|
||||||
import * as path from "node:path";
|
import * as path from "node:path";
|
||||||
import {
|
import {
|
||||||
|
createReadStream,
|
||||||
|
createWriteStream,
|
||||||
existsSync,
|
existsSync,
|
||||||
mkdirSync as nodeMkdirSync,
|
mkdirSync as nodeMkdirSync,
|
||||||
readdirSync,
|
readdirSync,
|
||||||
|
@ -67,9 +72,11 @@ import {
|
||||||
} from "node:fs";
|
} from "node:fs";
|
||||||
import {
|
import {
|
||||||
mkdir as nodeMkdir,
|
mkdir as nodeMkdir,
|
||||||
|
open,
|
||||||
readdir,
|
readdir,
|
||||||
readFile,
|
readFile,
|
||||||
rm,
|
rm,
|
||||||
stat,
|
stat,
|
||||||
writeFile,
|
writeFile,
|
||||||
} from "node:fs/promises";
|
} from "node:fs/promises";
|
||||||
|
export { Stats } from "node:fs";
|
||||||
|
|
|
@ -48,6 +48,15 @@ export class WrappedDatabase {
|
||||||
prepare<Args extends unknown[] = [], Result = unknown>(
|
prepare<Args extends unknown[] = [], Result = unknown>(
|
||||||
query: string,
|
query: string,
|
||||||
): Stmt<Args, Result> {
|
): Stmt<Args, Result> {
|
||||||
|
query = query.trim();
|
||||||
|
const lines = query.split("\n");
|
||||||
|
const trim = Math.min(
|
||||||
|
...lines.map((line) =>
|
||||||
|
line.trim().length === 0 ? Infinity : line.match(/^\s*/)![0].length
|
||||||
|
),
|
||||||
|
);
|
||||||
|
query = lines.map((x) => x.slice(trim)).join("\n");
|
||||||
|
|
||||||
let prepared;
|
let prepared;
|
||||||
try {
|
try {
|
||||||
prepared = this.node.prepare(query);
|
prepared = this.node.prepare(query);
|
||||||
|
@ -62,42 +71,64 @@ export class WrappedDatabase {
|
||||||
export class Stmt<Args extends unknown[] = unknown[], Row = unknown> {
|
export class Stmt<Args extends unknown[] = unknown[], Row = unknown> {
|
||||||
#node: StatementSync;
|
#node: StatementSync;
|
||||||
#class: any | null = null;
|
#class: any | null = null;
|
||||||
|
query: string;
|
||||||
|
|
||||||
constructor(node: StatementSync) {
|
constructor(node: StatementSync) {
|
||||||
this.#node = node;
|
this.#node = node;
|
||||||
|
this.query = node.sourceSQL;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** Get one row */
|
/** Get one row */
|
||||||
get(...args: Args): Row | null {
|
get(...args: Args): Row | null {
|
||||||
|
return this.#wrap(args, () => {
|
||||||
const item = this.#node.get(...args as any) as Row;
|
const item = this.#node.get(...args as any) as Row;
|
||||||
if (!item) return null;
|
if (!item) return null;
|
||||||
const C = this.#class;
|
const C = this.#class;
|
||||||
if (C) Object.setPrototypeOf(item, C.prototype);
|
if (C) Object.setPrototypeOf(item, C.prototype);
|
||||||
return item;
|
return item;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
getNonNull(...args: Args) {
|
getNonNull(...args: Args) {
|
||||||
const item = this.get(...args);
|
const item = this.get(...args);
|
||||||
if (!item) throw new Error("Query returned no result");
|
if (!item) {
|
||||||
|
throw this.#wrap(args, () => new Error("Query returned no result"));
|
||||||
|
}
|
||||||
return item;
|
return item;
|
||||||
}
|
}
|
||||||
iter(...args: Args): Iterator<Row> {
|
iter(...args: Args): Iterator<Row> {
|
||||||
return this.array(...args)[Symbol.iterator]();
|
return this.#wrap(args, () => this.array(...args)[Symbol.iterator]());
|
||||||
}
|
}
|
||||||
/** Get all rows */
|
/** Get all rows */
|
||||||
array(...args: Args): Row[] {
|
array(...args: Args): Row[] {
|
||||||
|
return this.#wrap(args, () => {
|
||||||
const array = this.#node.all(...args as any) as Row[];
|
const array = this.#node.all(...args as any) as Row[];
|
||||||
const C = this.#class;
|
const C = this.#class;
|
||||||
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
|
if (C) array.forEach((item) => Object.setPrototypeOf(item, C.prototype));
|
||||||
return array;
|
return array;
|
||||||
|
});
|
||||||
}
|
}
|
||||||
/** Return the number of changes / row ID */
|
/** Return the number of changes / row ID */
|
||||||
run(...args: Args) {
|
run(...args: Args) {
|
||||||
return this.#node.run(...args as any);
|
return this.#wrap(args, () => this.#node.run(...args as any));
|
||||||
}
|
}
|
||||||
|
|
||||||
as<R>(Class: { new (): R }): Stmt<Args, R> {
|
as<R>(Class: { new (): R }): Stmt<Args, R> {
|
||||||
this.#class = Class;
|
this.#class = Class;
|
||||||
return this as any;
|
return this as any;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#wrap<T>(args: unknown[], fn: () => T) {
|
||||||
|
try {
|
||||||
|
return fn();
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err && typeof err === "object") {
|
||||||
|
err.query = this.query;
|
||||||
|
args = args.flat(Infinity);
|
||||||
|
err.queryArgs = args.length === 1 ? args[0] : args;
|
||||||
|
}
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
import { DatabaseSync, StatementSync } from "node:sqlite";
|
import { DatabaseSync, StatementSync } from "node:sqlite";
|
||||||
|
|
16
package-lock.json
generated
16
package-lock.json
generated
|
@ -17,7 +17,9 @@
|
||||||
"marko": "^6.0.20",
|
"marko": "^6.0.20",
|
||||||
"puppeteer": "^24.10.1",
|
"puppeteer": "^24.10.1",
|
||||||
"sharp": "^0.34.2",
|
"sharp": "^0.34.2",
|
||||||
"unique-names-generator": "^4.7.1"
|
"unique-names-generator": "^4.7.1",
|
||||||
|
"vscode-oniguruma": "^2.0.1",
|
||||||
|
"vscode-textmate": "^9.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.15.29",
|
"@types/node": "^22.15.29",
|
||||||
|
@ -4680,6 +4682,18 @@
|
||||||
"url": "https://opencollective.com/unified"
|
"url": "https://opencollective.com/unified"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"node_modules/vscode-oniguruma": {
|
||||||
|
"version": "2.0.1",
|
||||||
|
"resolved": "https://registry.npmjs.org/vscode-oniguruma/-/vscode-oniguruma-2.0.1.tgz",
|
||||||
|
"integrity": "sha512-poJU8iHIWnC3vgphJnrLZyI3YdqRlR27xzqDmpPXYzA93R4Gk8z7T6oqDzDoHjoikA2aS82crdXFkjELCdJsjQ==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
|
"node_modules/vscode-textmate": {
|
||||||
|
"version": "9.2.0",
|
||||||
|
"resolved": "https://registry.npmjs.org/vscode-textmate/-/vscode-textmate-9.2.0.tgz",
|
||||||
|
"integrity": "sha512-rkvG4SraZQaPSN/5XjwKswdU0OP9MF28QjrYzUBbhb8QyG3ljB1Ky996m++jiI7KdiAP2CkBiQZd9pqEDTClqA==",
|
||||||
|
"license": "MIT"
|
||||||
|
},
|
||||||
"node_modules/w3c-keyname": {
|
"node_modules/w3c-keyname": {
|
||||||
"version": "2.2.8",
|
"version": "2.2.8",
|
||||||
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
|
"resolved": "https://registry.npmjs.org/w3c-keyname/-/w3c-keyname-2.2.8.tgz",
|
||||||
|
|
|
@ -13,7 +13,9 @@
|
||||||
"marko": "^6.0.20",
|
"marko": "^6.0.20",
|
||||||
"puppeteer": "^24.10.1",
|
"puppeteer": "^24.10.1",
|
||||||
"sharp": "^0.34.2",
|
"sharp": "^0.34.2",
|
||||||
"unique-names-generator": "^4.7.1"
|
"unique-names-generator": "^4.7.1",
|
||||||
|
"vscode-oniguruma": "^2.0.1",
|
||||||
|
"vscode-textmate": "^9.2.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^22.15.29",
|
"@types/node": "^22.15.29",
|
||||||
|
|
2
repl.js
2
repl.js
|
@ -18,7 +18,7 @@ const repl = hot.load("node:repl").start({
|
||||||
.catch((err) => {
|
.catch((err) => {
|
||||||
// TODO: improve @paperclover/console's ability to print AggregateError
|
// TODO: improve @paperclover/console's ability to print AggregateError
|
||||||
// and errors with extra random properties
|
// and errors with extra random properties
|
||||||
console.error(util.inspect(err));
|
console.error(util.inspect(err, false, 10, true));
|
||||||
})
|
})
|
||||||
.then((result) => done(null, result));
|
.then((result) => done(null, result));
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,19 +1,22 @@
|
||||||
|
// This is the main file for the backend
|
||||||
|
const app = new Hono();
|
||||||
const logHttp = scoped("http", { color: "magenta" });
|
const logHttp = scoped("http", { color: "magenta" });
|
||||||
|
|
||||||
const app = new Hono();
|
// Middleware
|
||||||
|
|
||||||
app.notFound(assets.notFound);
|
|
||||||
|
|
||||||
app.use(trimTrailingSlash());
|
app.use(trimTrailingSlash());
|
||||||
app.use(removeDuplicateSlashes);
|
app.use(removeDuplicateSlashes);
|
||||||
app.use(logger((msg) => msg.startsWith("-->") && logHttp(msg.slice(4))));
|
app.use(logger((msg) => msg.startsWith("-->") && logHttp(msg.slice(4))));
|
||||||
app.use(admin.middleware);
|
app.use(admin.middleware);
|
||||||
|
|
||||||
|
// Backends
|
||||||
app.route("", require("./q+a/backend.ts").app);
|
app.route("", require("./q+a/backend.ts").app);
|
||||||
app.route("", require("./file-viewer/backend.tsx").app);
|
app.route("", require("./file-viewer/backend.tsx").app);
|
||||||
|
|
||||||
|
// Asset middleware has least precedence
|
||||||
app.use(assets.middleware);
|
app.use(assets.middleware);
|
||||||
|
|
||||||
|
// Handlers
|
||||||
|
app.notFound(assets.notFound);
|
||||||
if (process.argv.includes("--development")) {
|
if (process.argv.includes("--development")) {
|
||||||
app.onError((err, c) => {
|
app.onError((err, c) => {
|
||||||
if (err instanceof HTTPException) {
|
if (err instanceof HTTPException) {
|
||||||
|
|
|
@ -1,83 +0,0 @@
|
||||||
import * as path from "node:path";
|
|
||||||
import { cache, MediaFile } from "../db";
|
|
||||||
|
|
||||||
// Function to get file extension statistics
|
|
||||||
function getExtensionStats() {
|
|
||||||
// Get all files (not directories) from the database
|
|
||||||
const query = `
|
|
||||||
SELECT path FROM media_files
|
|
||||||
WHERE kind = ${MediaFile.Kind.file}
|
|
||||||
`;
|
|
||||||
|
|
||||||
// Use raw query to get all file paths
|
|
||||||
const rows = cache.query(query).all() as { path: string }[];
|
|
||||||
|
|
||||||
// Count extensions
|
|
||||||
const extensionCounts: Record<string, number> = {};
|
|
||||||
|
|
||||||
for (const row of rows) {
|
|
||||||
const extension = path.extname(row.path).toLowerCase();
|
|
||||||
extensionCounts[extension] = (extensionCounts[extension] || 0) + 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sort extensions by count (descending)
|
|
||||||
const sortedExtensions = Object.entries(extensionCounts)
|
|
||||||
.sort((a, b) => b[1] - a[1]);
|
|
||||||
|
|
||||||
return {
|
|
||||||
totalFiles: rows.length,
|
|
||||||
extensions: sortedExtensions,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Function to print a visual table
|
|
||||||
function printExtensionTable() {
|
|
||||||
const stats = getExtensionStats();
|
|
||||||
|
|
||||||
// Calculate column widths
|
|
||||||
const extensionColWidth = Math.max(
|
|
||||||
...stats.extensions.map(([ext]) => ext.length),
|
|
||||||
"Extension".length,
|
|
||||||
) + 2;
|
|
||||||
|
|
||||||
const countColWidth = Math.max(
|
|
||||||
...stats.extensions.map(([_, count]) => count.toString().length),
|
|
||||||
"Count".length,
|
|
||||||
) + 2;
|
|
||||||
|
|
||||||
const percentColWidth = "Percentage".length + 2;
|
|
||||||
|
|
||||||
// Print header
|
|
||||||
console.log("MediaFile Extension Statistics");
|
|
||||||
console.log(`Total files: ${stats.totalFiles}`);
|
|
||||||
console.log();
|
|
||||||
|
|
||||||
// Print table header
|
|
||||||
console.log(
|
|
||||||
"Extension".padEnd(extensionColWidth) +
|
|
||||||
"Count".padEnd(countColWidth) +
|
|
||||||
"Percentage".padEnd(percentColWidth),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Print separator
|
|
||||||
console.log(
|
|
||||||
"-".repeat(extensionColWidth) +
|
|
||||||
"-".repeat(countColWidth) +
|
|
||||||
"-".repeat(percentColWidth),
|
|
||||||
);
|
|
||||||
|
|
||||||
// Print rows
|
|
||||||
for (const [extension, count] of stats.extensions) {
|
|
||||||
const percentage = ((count / stats.totalFiles) * 100).toFixed(2);
|
|
||||||
const ext = extension || "(no extension)";
|
|
||||||
|
|
||||||
console.log(
|
|
||||||
ext.padEnd(extensionColWidth) +
|
|
||||||
count.toString().padEnd(countColWidth) +
|
|
||||||
`${percentage}%`.padEnd(percentColWidth),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Run the program
|
|
||||||
printExtensionTable();
|
|
8
src/file-viewer/bin/list.ts
Normal file
8
src/file-viewer/bin/list.ts
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
export function main() {
|
||||||
|
const meows = MediaFile.db.prepare(`
|
||||||
|
select * from media_files;
|
||||||
|
`).as(MediaFile).array();
|
||||||
|
console.log(meows);
|
||||||
|
}
|
||||||
|
|
||||||
|
import { MediaFile } from "@/file-viewer/models/MediaFile.ts";
|
|
@ -1,2 +1,589 @@
|
||||||
import "@/file-viewer/models/MediaFile.ts";
|
const root = path.resolve("C:/media");
|
||||||
import "@/file-viewer/models/BlobAsset.ts";
|
const workDir = path.resolve(".clover/file-assets");
|
||||||
|
|
||||||
|
export async function main() {
|
||||||
|
const start = performance.now();
|
||||||
|
const timerSpinner = new Spinner({
|
||||||
|
text: () =>
|
||||||
|
`paper clover's scan3 [${
|
||||||
|
((performance.now() - start) / 1000).toFixed(1)
|
||||||
|
}s]`,
|
||||||
|
fps: 10,
|
||||||
|
});
|
||||||
|
using _endTimerSpinner = { [Symbol.dispose]: () => timerSpinner.stop() };
|
||||||
|
|
||||||
|
// Read a directory or file stat and queue up changed files.
|
||||||
|
using qList = new async.Queue({
|
||||||
|
name: "Discover Tree",
|
||||||
|
async fn(absPath: string, spin) {
|
||||||
|
const stat = await fs.stat(absPath);
|
||||||
|
|
||||||
|
const publicPath = toPublicPath(absPath);
|
||||||
|
const mediaFile = MediaFile.getByPath(publicPath);
|
||||||
|
|
||||||
|
if (stat.isDirectory()) {
|
||||||
|
const items = await fs.readdir(absPath);
|
||||||
|
qList.addMany(items.map((subPath) => path.join(absPath, subPath)));
|
||||||
|
|
||||||
|
if (mediaFile) {
|
||||||
|
const deleted = mediaFile.getChildren()
|
||||||
|
.filter((child) => !items.includes(child.basename))
|
||||||
|
.flatMap((child) =>
|
||||||
|
child.kind === MediaFileKind.directory
|
||||||
|
? child.getRecursiveFileChildren()
|
||||||
|
: child
|
||||||
|
);
|
||||||
|
|
||||||
|
qMeta.addMany(deleted.map((mediaFile) => ({
|
||||||
|
absPath: path.join(root, mediaFile.path),
|
||||||
|
publicPath: mediaFile.path,
|
||||||
|
stat: null,
|
||||||
|
mediaFile,
|
||||||
|
})));
|
||||||
|
}
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// All processes must be performed again if there is no file.
|
||||||
|
if (
|
||||||
|
!mediaFile ||
|
||||||
|
stat.size !== mediaFile.size ||
|
||||||
|
stat.mtime.getTime() !== mediaFile.date.getTime()
|
||||||
|
) {
|
||||||
|
qMeta.add({ absPath, publicPath, stat, mediaFile });
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the scanners changed, it may mean more processes should be run.
|
||||||
|
queueProcessors({ absPath, stat, mediaFile });
|
||||||
|
},
|
||||||
|
maxJobs: 24,
|
||||||
|
});
|
||||||
|
using qMeta = new async.Queue({
|
||||||
|
name: "Update Metadata",
|
||||||
|
async fn({ absPath, publicPath, stat, mediaFile }: UpdateMetadataJob) {
|
||||||
|
if (!stat) {
|
||||||
|
// File was deleted.
|
||||||
|
await runUndoProcessors(UNWRAP(mediaFile));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
// TODO: run scrubLocationMetadata first
|
||||||
|
|
||||||
|
const hash = await new Promise<string>((resolve, reject) => {
|
||||||
|
const reader = fs.createReadStream(absPath);
|
||||||
|
reader.on("error", reject);
|
||||||
|
|
||||||
|
const hasher = crypto.createHash("sha1").setEncoding("hex");
|
||||||
|
hasher.on("error", reject);
|
||||||
|
hasher.on("readable", () => resolve(hasher.read()));
|
||||||
|
|
||||||
|
reader.pipe(hasher);
|
||||||
|
});
|
||||||
|
let date = stat.mtime;
|
||||||
|
if (
|
||||||
|
mediaFile &&
|
||||||
|
mediaFile.date.getTime() < stat.mtime.getTime() &&
|
||||||
|
(Date.now() - stat.mtime.getTime()) < monthMilliseconds
|
||||||
|
) {
|
||||||
|
date = mediaFile.date;
|
||||||
|
console.warn(
|
||||||
|
`M-time on ${publicPath} was likely corrupted. ${
|
||||||
|
formatDate(mediaFile.date)
|
||||||
|
} -> ${formatDate(stat.mtime)}`,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
mediaFile = MediaFile.createFile({
|
||||||
|
path: publicPath,
|
||||||
|
date,
|
||||||
|
hash,
|
||||||
|
size: stat.size,
|
||||||
|
duration: mediaFile?.duration ?? 0,
|
||||||
|
dimensions: mediaFile?.dimensions ?? "",
|
||||||
|
contents: mediaFile?.contents ?? "",
|
||||||
|
});
|
||||||
|
await queueProcessors({ absPath, stat, mediaFile });
|
||||||
|
},
|
||||||
|
getItemText: (job) =>
|
||||||
|
job.publicPath.slice(1) + (job.stat ? "" : " (deleted)"),
|
||||||
|
maxJobs: 2,
|
||||||
|
});
|
||||||
|
using qProcess = new async.Queue({
|
||||||
|
name: "Process Contents",
|
||||||
|
async fn(
|
||||||
|
{ absPath, stat, mediaFile, processor, index, after }: ProcessJob,
|
||||||
|
spin,
|
||||||
|
) {
|
||||||
|
await processor.run({ absPath, stat, mediaFile, spin });
|
||||||
|
mediaFile.setProcessed(mediaFile.processed | (1 << (16 + index)));
|
||||||
|
for (const dependantJob of after) {
|
||||||
|
ASSERT(dependantJob.needs > 0);
|
||||||
|
dependantJob.needs -= 1;
|
||||||
|
if (dependantJob.needs == 0) qProcess.add(dependantJob);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
getItemText: ({ mediaFile, processor }) =>
|
||||||
|
`${mediaFile.path.slice(1)} - ${processor.name}`,
|
||||||
|
maxJobs: 2,
|
||||||
|
});
|
||||||
|
|
||||||
|
function decodeProcessors(input: string) {
|
||||||
|
return input
|
||||||
|
.split(";")
|
||||||
|
.filter(Boolean)
|
||||||
|
.map(([a, b, c]) => ({
|
||||||
|
id: a,
|
||||||
|
hash: (b.charCodeAt(0) << 8) + c.charCodeAt(0),
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function queueProcessors(
|
||||||
|
{ absPath, stat, mediaFile }: Omit<ProcessFileArgs, "spin">,
|
||||||
|
) {
|
||||||
|
const ext = mediaFile.extension.toLowerCase();
|
||||||
|
let possible = processors.filter((p) => p.include.has(ext));
|
||||||
|
if (possible.length === 0) return;
|
||||||
|
|
||||||
|
const hash = possible.reduce((a, b) => a ^ b.hash, 0) | 1;
|
||||||
|
ASSERT(hash <= 0xFFFF);
|
||||||
|
let processed = mediaFile.processed;
|
||||||
|
|
||||||
|
// If the hash has changed, migrate the bitfield over.
|
||||||
|
// This also runs when the processor hash is in it's initial 0 state.
|
||||||
|
const order = decodeProcessors(mediaFile.processors);
|
||||||
|
if ((processed & 0xFFFF) !== hash) {
|
||||||
|
const previous = order.filter((_, i) =>
|
||||||
|
(processed & (1 << (16 + i))) !== 0
|
||||||
|
);
|
||||||
|
processed = hash;
|
||||||
|
for (const { id, hash } of previous) {
|
||||||
|
const p = processors.find((p) => p.id === id);
|
||||||
|
if (!p) continue;
|
||||||
|
const index = possible.indexOf(p);
|
||||||
|
if (index !== -1 && p.hash === hash) {
|
||||||
|
processed |= 1 << (16 + index);
|
||||||
|
} else {
|
||||||
|
if (p.undo) await p.undo(mediaFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mediaFile.setProcessors(
|
||||||
|
processed,
|
||||||
|
possible.map((p) =>
|
||||||
|
p.id + String.fromCharCode(p.hash >> 8, p.hash & 0xFF)
|
||||||
|
).join(";"),
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
possible = order.map(({ id }) =>
|
||||||
|
UNWRAP(possible.find((p) => p.id === id))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Queue needed processors.
|
||||||
|
const jobs: ProcessJob[] = [];
|
||||||
|
for (let i = 0, { length } = possible; i < length; i += 1) {
|
||||||
|
if ((processed & (1 << (16 + i))) === 0) {
|
||||||
|
const job: ProcessJob = {
|
||||||
|
absPath,
|
||||||
|
stat,
|
||||||
|
mediaFile,
|
||||||
|
processor: possible[i],
|
||||||
|
index: i,
|
||||||
|
after: [],
|
||||||
|
needs: possible[i].depends.length,
|
||||||
|
};
|
||||||
|
jobs.push(job);
|
||||||
|
if (job.needs === 0) qProcess.add(job);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for (const job of jobs) {
|
||||||
|
for (const dependId of job.processor.depends) {
|
||||||
|
const dependJob = jobs.find((j) => j.processor.id === dependId);
|
||||||
|
if (dependJob) {
|
||||||
|
dependJob.after.push(job);
|
||||||
|
} else {
|
||||||
|
ASSERT(job.needs > 0);
|
||||||
|
job.needs -= 1;
|
||||||
|
if (job.needs === 0) qProcess.add(job);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function runUndoProcessors(mediaFile: MediaFile) {
|
||||||
|
const { processed } = mediaFile;
|
||||||
|
const previous = decodeProcessors(mediaFile.processors)
|
||||||
|
.filter((_, i) => (processed & (1 << (16 + i))) !== 0);
|
||||||
|
for (const { id } of previous) {
|
||||||
|
const p = processors.find((p) => p.id === id);
|
||||||
|
if (!p) continue;
|
||||||
|
if (p.undo) {
|
||||||
|
await p.undo(mediaFile);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mediaFile.delete();
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add the root & recursively iterate!
|
||||||
|
qList.add(root);
|
||||||
|
await qList.done();
|
||||||
|
await qMeta.done();
|
||||||
|
await qProcess.done();
|
||||||
|
|
||||||
|
console.info(
|
||||||
|
"Updated file viewer index in " +
|
||||||
|
((performance.now() - start) / 1000).toFixed(1) + "s",
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
interface Process {
|
||||||
|
name: string;
|
||||||
|
enable?: boolean;
|
||||||
|
include: Set<string>;
|
||||||
|
depends?: string[];
|
||||||
|
/* Perform an action. */
|
||||||
|
run(args: ProcessFileArgs): Promise<void>;
|
||||||
|
/* Should detect if `run` was never even run before before undoing state */
|
||||||
|
undo?(mediaFile: MediaFile): Promise<void>;
|
||||||
|
}
|
||||||
|
|
||||||
|
const execFileRaw = util.promisify(child_process.execFile);
|
||||||
|
const execFile: typeof execFileRaw = ((
|
||||||
|
...args: Parameters<typeof execFileRaw>
|
||||||
|
) =>
|
||||||
|
execFileRaw(...args).catch((e: any) => {
|
||||||
|
if (e?.message?.startsWith?.("Command failed")) {
|
||||||
|
if (e.code > (2 ** 31)) e.code |= 0;
|
||||||
|
const code = e.signal ? `signal ${e.signal}` : `code ${e.code}`;
|
||||||
|
e.message = `${e.cmd.split(" ")[0]} failed with ${code}`;
|
||||||
|
}
|
||||||
|
throw e;
|
||||||
|
})) as any;
|
||||||
|
const ffprobe = testProgram("ffprobe", "--help");
|
||||||
|
const ffmpeg = testProgram("ffmpeg", "--help");
|
||||||
|
|
||||||
|
const ffmpegOptions = [
|
||||||
|
"-hide_banner",
|
||||||
|
"-loglevel",
|
||||||
|
"warning",
|
||||||
|
];
|
||||||
|
|
||||||
|
const imageSizes = [64, 128, 256, 512, 1024, 2048];
|
||||||
|
|
||||||
|
const procDuration: Process = {
|
||||||
|
name: "calculate duration",
|
||||||
|
enable: ffprobe !== null,
|
||||||
|
include: rules.extsDuration,
|
||||||
|
async run({ absPath, mediaFile }) {
|
||||||
|
const { stdout } = await execFile(ffprobe!, [
|
||||||
|
"-v",
|
||||||
|
"error",
|
||||||
|
"-show_entries",
|
||||||
|
"format=duration",
|
||||||
|
"-of",
|
||||||
|
"default=noprint_wrappers=1:nokey=1",
|
||||||
|
absPath,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const duration = parseFloat(stdout.trim());
|
||||||
|
if (Number.isNaN(duration)) {
|
||||||
|
throw new Error("Could not extract duration from " + stdout);
|
||||||
|
}
|
||||||
|
mediaFile.setDuration(Math.ceil(duration));
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
// NOTE: Never re-order the processors. Add new ones at the end.
|
||||||
|
const procDimensions: Process = {
|
||||||
|
name: "calculate dimensions",
|
||||||
|
enable: ffprobe != null,
|
||||||
|
include: rules.extsDimensions,
|
||||||
|
async run({ absPath, mediaFile }) {
|
||||||
|
const ext = path.extname(absPath);
|
||||||
|
|
||||||
|
let dimensions;
|
||||||
|
|
||||||
|
if (ext === ".svg") {
|
||||||
|
// Parse out of text data
|
||||||
|
const content = await fs.readFile(absPath, "utf8");
|
||||||
|
const widthMatch = content.match(/width="(\d+)"/);
|
||||||
|
const heightMatch = content.match(/height="(\d+)"/);
|
||||||
|
|
||||||
|
if (widthMatch && heightMatch) {
|
||||||
|
dimensions = `${widthMatch[1]}x${heightMatch[1]}`;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// Use ffprobe to observe streams
|
||||||
|
const { stdout } = await execFile("ffprobe", [
|
||||||
|
"-v",
|
||||||
|
"error",
|
||||||
|
"-select_streams",
|
||||||
|
"v:0",
|
||||||
|
"-show_entries",
|
||||||
|
"stream=width,height",
|
||||||
|
"-of",
|
||||||
|
"csv=s=x:p=0",
|
||||||
|
absPath,
|
||||||
|
]);
|
||||||
|
if (stdout.includes("x")) {
|
||||||
|
dimensions = stdout.trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mediaFile.setDimensions(dimensions ?? "");
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const procLoadTextContents: Process = {
|
||||||
|
name: "load text content",
|
||||||
|
include: rules.extsReadContents,
|
||||||
|
async run({ absPath, mediaFile, stat }) {
|
||||||
|
if (stat.size > 1_000_000) return;
|
||||||
|
const text = await fs.readFile(absPath, "utf-8");
|
||||||
|
mediaFile.setContents(text);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const procHighlightCode: Process = {
|
||||||
|
name: "highlight source code",
|
||||||
|
include: new Set(rules.extsCode.keys()),
|
||||||
|
async run({ absPath, mediaFile, stat }) {
|
||||||
|
const language = UNWRAP(
|
||||||
|
rules.extsCode.get(path.extname(absPath).toLowerCase()),
|
||||||
|
);
|
||||||
|
// An issue is that .ts is an overloaded extension, shared between
|
||||||
|
// 'transport stream' and 'typescript'.
|
||||||
|
//
|
||||||
|
// Filter used here is:
|
||||||
|
// - more than 1mb
|
||||||
|
// - invalid UTF-8
|
||||||
|
if (stat.size > 1_000_000) return;
|
||||||
|
let code;
|
||||||
|
const buf = await fs.readFile(absPath);
|
||||||
|
try {
|
||||||
|
code = new TextDecoder("utf-8", { fatal: true }).decode(buf);
|
||||||
|
} catch (error) {
|
||||||
|
mediaFile.setContents("");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const content = await highlight.highlightCode(code, language);
|
||||||
|
mediaFile.setContents(content);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const imageSubsets = [
|
||||||
|
{
|
||||||
|
ext: ".webp",
|
||||||
|
// deno-fmt-disable-line
|
||||||
|
args: [
|
||||||
|
"-lossless",
|
||||||
|
"0",
|
||||||
|
"-compression_level",
|
||||||
|
"6",
|
||||||
|
"-quality",
|
||||||
|
"95",
|
||||||
|
"-method",
|
||||||
|
"6",
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
ext: ".jxl",
|
||||||
|
args: ["-c:v", "libjxl", "-distance", "0.8", "-effort", "9"],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const procImageSubsets: Process = {
|
||||||
|
name: "encode image subsets",
|
||||||
|
include: rules.extsImage,
|
||||||
|
enable: false,
|
||||||
|
depends: ["calculate dimensions"],
|
||||||
|
async run({ absPath, mediaFile, stat, spin }) {
|
||||||
|
const { width, height } = UNWRAP(mediaFile.parseDimensions());
|
||||||
|
const targetSizes = imageSizes.filter((w) => w < width);
|
||||||
|
const baseStatus = spin.text;
|
||||||
|
|
||||||
|
using stack = new DisposableStack();
|
||||||
|
for (const size of targetSizes) {
|
||||||
|
const { w, h } = resizeDimensions(width, height, size);
|
||||||
|
for (const { ext, args } of imageSubsets) {
|
||||||
|
spin.text = baseStatus +
|
||||||
|
` (${w}x${h}, ${ext.slice(1).toUpperCase()})`;
|
||||||
|
|
||||||
|
stack.use(
|
||||||
|
await produceAsset(
|
||||||
|
`${mediaFile.hash}/${size}${ext}`,
|
||||||
|
async (out) => {
|
||||||
|
await fs.mkdir(path.dirname(out));
|
||||||
|
await fs.rm(out, { force: true });
|
||||||
|
await execFile(ffmpeg!, [
|
||||||
|
...ffmpegOptions,
|
||||||
|
"-i",
|
||||||
|
absPath,
|
||||||
|
"-vf",
|
||||||
|
`scale=${w}:${h}:force_original_aspect_ratio=increase,crop=${w}:${h}`,
|
||||||
|
...args,
|
||||||
|
out,
|
||||||
|
]);
|
||||||
|
return [out];
|
||||||
|
},
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
stack.move();
|
||||||
|
},
|
||||||
|
async undo(mediaFile) {
|
||||||
|
const { width } = UNWRAP(mediaFile.parseDimensions());
|
||||||
|
const targetSizes = imageSizes.filter((w) => w < width);
|
||||||
|
for (const size of targetSizes) {
|
||||||
|
for (const { ext } of imageSubsets) {
|
||||||
|
unproduceAsset(`${mediaFile.hash}/${size}${ext}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
const videoFormats = [
|
||||||
|
{
|
||||||
|
name: "webm",
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const processors = [
|
||||||
|
procDimensions,
|
||||||
|
procDuration,
|
||||||
|
procLoadTextContents,
|
||||||
|
procHighlightCode,
|
||||||
|
procImageSubsets,
|
||||||
|
]
|
||||||
|
.map((process, id, all) => {
|
||||||
|
const strIndex = (id: number) =>
|
||||||
|
String.fromCharCode("a".charCodeAt(0) + id);
|
||||||
|
return {
|
||||||
|
...process as Process,
|
||||||
|
id: strIndex(id),
|
||||||
|
// Create a unique key.
|
||||||
|
hash: new Uint16Array(
|
||||||
|
crypto.createHash("sha1")
|
||||||
|
.update(process.run.toString())
|
||||||
|
.digest().buffer,
|
||||||
|
).reduce((a, b) => a ^ b),
|
||||||
|
depends: (process.depends ?? []).map((depend) => {
|
||||||
|
const index = all.findIndex((p) => p.name === depend);
|
||||||
|
if (index === -1) throw new Error(`Cannot find depend '${depend}'`);
|
||||||
|
if (index === id) throw new Error(`Cannot depend on self: '${depend}'`);
|
||||||
|
return strIndex(index);
|
||||||
|
}),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
|
||||||
|
function resizeDimensions(w: number, h: number, desiredWidth: number) {
|
||||||
|
ASSERT(desiredWidth < w, `${desiredWidth} < ${w}`);
|
||||||
|
return { w: desiredWidth, h: Math.floor((h / w) * desiredWidth) };
|
||||||
|
}
|
||||||
|
|
||||||
|
async function produceAsset(
|
||||||
|
key: string,
|
||||||
|
builder: (prefix: string) => Promise<string[]>,
|
||||||
|
) {
|
||||||
|
const asset = AssetRef.putOrIncrement(key);
|
||||||
|
try {
|
||||||
|
if (asset.refs === 1) {
|
||||||
|
const paths = await builder(path.join(workDir, key));
|
||||||
|
asset.addFiles(
|
||||||
|
paths.map((file) =>
|
||||||
|
path.relative(workDir, file)
|
||||||
|
.replaceAll("\\", "/")
|
||||||
|
),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return {
|
||||||
|
[Symbol.dispose]: () => asset.unref(),
|
||||||
|
};
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err && typeof err === "object") err.assetKey = key;
|
||||||
|
asset.unref();
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function unproduceAsset(key: string) {
|
||||||
|
const ref = AssetRef.get(key);
|
||||||
|
if (ref) {
|
||||||
|
ref.unref();
|
||||||
|
console.log(`unref ${key}`);
|
||||||
|
// TODO: remove associated files from target
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
interface UpdateMetadataJob {
|
||||||
|
absPath: string;
|
||||||
|
publicPath: string;
|
||||||
|
stat: fs.Stats | null;
|
||||||
|
mediaFile: MediaFile | null;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProcessFileArgs {
|
||||||
|
absPath: string;
|
||||||
|
stat: fs.Stats;
|
||||||
|
mediaFile: MediaFile;
|
||||||
|
spin: Spinner;
|
||||||
|
}
|
||||||
|
|
||||||
|
interface ProcessJob {
|
||||||
|
absPath: string;
|
||||||
|
stat: fs.Stats;
|
||||||
|
mediaFile: MediaFile;
|
||||||
|
processor: typeof processors[0];
|
||||||
|
index: number;
|
||||||
|
after: ProcessJob[];
|
||||||
|
needs: number;
|
||||||
|
}
|
||||||
|
|
||||||
|
export function skipBasename(basename: string): boolean {
|
||||||
|
// dot files must be incrementally tracked
|
||||||
|
if (basename === ".dirsort") return true;
|
||||||
|
if (basename === ".friends") return true;
|
||||||
|
|
||||||
|
return (
|
||||||
|
basename.startsWith(".") ||
|
||||||
|
basename.startsWith("._") ||
|
||||||
|
basename.startsWith(".tmp") ||
|
||||||
|
basename === ".DS_Store" ||
|
||||||
|
basename.toLowerCase() === "thumbs.db" ||
|
||||||
|
basename.toLowerCase() === "desktop.ini"
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function toPublicPath(absPath: string) {
|
||||||
|
ASSERT(path.isAbsolute(absPath));
|
||||||
|
if (absPath === root) return "/";
|
||||||
|
return "/" + path.relative(root, absPath).replaceAll("\\", "/");
|
||||||
|
}
|
||||||
|
|
||||||
|
export function testProgram(name: string, helpArgument: string) {
|
||||||
|
try {
|
||||||
|
child_process.spawnSync(name, [helpArgument]);
|
||||||
|
return name;
|
||||||
|
} catch (err) {
|
||||||
|
console.warn(`Missing or corrupt executable '${name}'`);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const monthMilliseconds = 30 * 24 * 60 * 60 * 1000;
|
||||||
|
|
||||||
|
import { Spinner } from "@paperclover/console/Spinner";
|
||||||
|
import * as async from "#sitegen/async";
|
||||||
|
import * as fs from "#sitegen/fs";
|
||||||
|
|
||||||
|
import * as path from "node:path";
|
||||||
|
import * as child_process from "node:child_process";
|
||||||
|
import * as util from "node:util";
|
||||||
|
import * as crypto from "node:crypto";
|
||||||
|
|
||||||
|
import { MediaFile, MediaFileKind } from "@/file-viewer/models/MediaFile.ts";
|
||||||
|
import { AssetRef } from "@/file-viewer/models/AssetRef.ts";
|
||||||
|
import { formatDate } from "@/file-viewer/format.ts";
|
||||||
|
import * as rules from "@/file-viewer/rules.ts";
|
||||||
|
import * as highlight from "@/file-viewer/highlight.ts";
|
||||||
|
|
|
@ -1,3 +1,48 @@
|
||||||
|
// WARNING
|
||||||
|
// -------
|
||||||
|
// This file contains spoilers for COTYLEDON
|
||||||
|
// Consider reading through the entire archive before picking apart this
|
||||||
|
// code, as this contains the beginning AND the ending sections, which
|
||||||
|
// contains very percise storytelling. You've been warned...
|
||||||
|
//
|
||||||
|
// --> https://paperclover.net/file/cotyledon <--
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
//
|
||||||
|
|
||||||
|
// SPEEDBUMP
|
||||||
export function Speedbump() {
|
export function Speedbump() {
|
||||||
return (
|
return (
|
||||||
<div class="panel last">
|
<div class="panel last">
|
||||||
|
@ -80,6 +125,7 @@ export function Speedbump() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// OPENING
|
||||||
export function Readme() {
|
export function Readme() {
|
||||||
return (
|
return (
|
||||||
<div class="panel last">
|
<div class="panel last">
|
||||||
|
@ -136,6 +182,7 @@ export function Readme() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TRUE ENDING. Written in Apple Notes.
|
||||||
export function ForEveryone() {
|
export function ForEveryone() {
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
|
|
|
@ -1,10 +1,3 @@
|
||||||
import { onceAsync } from "../lib.ts";
|
|
||||||
import * as fs from "node:fs/promises";
|
|
||||||
import * as path from "node:path";
|
|
||||||
import * as oniguruma from "vscode-oniguruma";
|
|
||||||
import * as textmate from "vscode-textmate";
|
|
||||||
import { escapeHTML } from "../framework/bun-polyfill.ts";
|
|
||||||
|
|
||||||
const languages = [
|
const languages = [
|
||||||
"ts",
|
"ts",
|
||||||
"tsx",
|
"tsx",
|
||||||
|
@ -88,7 +81,6 @@ interface HighlightLinesOptions {
|
||||||
}
|
}
|
||||||
|
|
||||||
export function getStyle(scopesToCheck: string[], langugage: Language) {
|
export function getStyle(scopesToCheck: string[], langugage: Language) {
|
||||||
if (import.meta.main) console.log(scopesToCheck);
|
|
||||||
for (const scope of scopes) {
|
for (const scope of scopes) {
|
||||||
if (scope[2] && scope[2] !== langugage) continue;
|
if (scope[2] && scope[2] !== langugage) continue;
|
||||||
const find = scopesToCheck.find((s) => s.startsWith(scope[0]));
|
const find = scopesToCheck.find((s) => s.startsWith(scope[0]));
|
||||||
|
@ -98,6 +90,7 @@ export function getStyle(scopesToCheck: string[], langugage: Language) {
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
function highlightLines({
|
function highlightLines({
|
||||||
lines,
|
lines,
|
||||||
grammar,
|
grammar,
|
||||||
|
@ -120,7 +113,7 @@ function highlightLines({
|
||||||
const str = lines[i].slice(token.startIndex, token.endIndex);
|
const str = lines[i].slice(token.startIndex, token.endIndex);
|
||||||
if (str.trim().length === 0) {
|
if (str.trim().length === 0) {
|
||||||
// Emit but do not consider scope changes
|
// Emit but do not consider scope changes
|
||||||
html += escapeHTML(str);
|
html += ssr.escapeHtml(str);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,7 +122,7 @@ function highlightLines({
|
||||||
if (lastHtmlStyle) html += "</span>";
|
if (lastHtmlStyle) html += "</span>";
|
||||||
if (style) html += `<span class='${style}'>`;
|
if (style) html += `<span class='${style}'>`;
|
||||||
}
|
}
|
||||||
html += escapeHTML(str);
|
html += ssr.escapeHtml(str);
|
||||||
lastHtmlStyle = style;
|
lastHtmlStyle = style;
|
||||||
}
|
}
|
||||||
html += "\n";
|
html += "\n";
|
||||||
|
@ -140,7 +133,7 @@ function highlightLines({
|
||||||
return { state, html };
|
return { state, html };
|
||||||
}
|
}
|
||||||
|
|
||||||
export const getRegistry = onceAsync(async () => {
|
export const getRegistry = async.once(async () => {
|
||||||
const wasmBin = await fs.readFile(
|
const wasmBin = await fs.readFile(
|
||||||
path.join(
|
path.join(
|
||||||
import.meta.dirname,
|
import.meta.dirname,
|
||||||
|
@ -187,18 +180,24 @@ export async function highlightCode(code: string, language: Language) {
|
||||||
return html;
|
return html;
|
||||||
}
|
}
|
||||||
|
|
||||||
import { existsSync } from "node:fs";
|
export async function main() {
|
||||||
if (import.meta.main) {
|
|
||||||
// validate exts
|
// validate exts
|
||||||
for (const ext of languages) {
|
for (const ext of languages) {
|
||||||
if (
|
if (
|
||||||
!existsSync(
|
!fs.existsSync(
|
||||||
path.join(import.meta.dirname, `highlight-grammar/${ext}.plist`),
|
path.join(import.meta.dirname, `highlight-grammar/${ext}.plist`),
|
||||||
)
|
)
|
||||||
) {
|
) {
|
||||||
console.error(`Missing grammar for ${ext}`);
|
console.error(`Missing grammar for ${ext}`);
|
||||||
}
|
}
|
||||||
const html = await highlightCode("wwwwwwwwwwwaaaaaaaaaaaaaaaa", ext);
|
// Sanity check
|
||||||
|
await highlightCode("wwwwwwwwwwwaaaaaaaaaaaaaaaa", ext);
|
||||||
}
|
}
|
||||||
console.log(await highlightCode(`{"maps":"damn"`, "json"));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
import * as async from "#sitegen/async";
|
||||||
|
import * as fs from "#sitegen/fs";
|
||||||
|
import * as path from "node:path";
|
||||||
|
import * as oniguruma from "vscode-oniguruma";
|
||||||
|
import * as textmate from "vscode-textmate";
|
||||||
|
import * as ssr from "#ssr";
|
||||||
|
|
73
src/file-viewer/models/AssetRef.ts
Normal file
73
src/file-viewer/models/AssetRef.ts
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
const db = getDb("cache.sqlite");
|
||||||
|
db.table(
|
||||||
|
"asset_refs",
|
||||||
|
/* SQL */ `
|
||||||
|
create table if not exists asset_refs (
|
||||||
|
id integer primary key autoincrement,
|
||||||
|
key text not null UNIQUE,
|
||||||
|
refs integer not null
|
||||||
|
);
|
||||||
|
create table if not exists asset_ref_files (
|
||||||
|
file text not null,
|
||||||
|
id integer not null,
|
||||||
|
foreign key (id) references asset_refs(id)
|
||||||
|
);
|
||||||
|
create index asset_ref_files_id on asset_ref_files(id);
|
||||||
|
`,
|
||||||
|
);
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Uncompressed files are read directly from the media store root. Derivied
|
||||||
|
* assets like compressed files, optimized images, and streamable video are
|
||||||
|
* stored in the `derived` folder. After scanning, the derived assets are
|
||||||
|
* uploaded into the store (storage1/clofi-derived dataset on NAS). Since
|
||||||
|
* multiple files can share the same hash, the number of references is
|
||||||
|
* tracked, and the derived content is only produced once. This means if a
|
||||||
|
* file is deleted, it should only decrement a reference count; deleting it
|
||||||
|
* once all references are removed.
|
||||||
|
*/
|
||||||
|
export class AssetRef {
|
||||||
|
/** Key which aws referenced */
|
||||||
|
id!: number;
|
||||||
|
key!: string;
|
||||||
|
refs!: number;
|
||||||
|
|
||||||
|
unref() {
|
||||||
|
decrementQuery.run(this.key);
|
||||||
|
deleteUnreferencedQuery.run().changes > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
addFiles(files: string[]) {
|
||||||
|
for (const file of files) {
|
||||||
|
addFileQuery.run({ id: this.id, file });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
static get(key: string) {
|
||||||
|
return getQuery.get(key);
|
||||||
|
}
|
||||||
|
|
||||||
|
static putOrIncrement(key: string) {
|
||||||
|
putOrIncrementQuery.get(key);
|
||||||
|
return UNWRAP(AssetRef.get(key));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const getQuery = db.prepare<[key: string]>(/* SQL */ `
|
||||||
|
select * from asset_refs where key = ?;
|
||||||
|
`).as(AssetRef);
|
||||||
|
const putOrIncrementQuery = db.prepare<[key: string]>(/* SQL */ `
|
||||||
|
insert into asset_refs (key, refs) values (?, 1)
|
||||||
|
on conflict(key) do update set refs = refs + 1;
|
||||||
|
`);
|
||||||
|
const decrementQuery = db.prepare<[key: string]>(/* SQL */ `
|
||||||
|
update asset_refs set refs = refs - 1 where key = ? and refs > 0;
|
||||||
|
`);
|
||||||
|
const deleteUnreferencedQuery = db.prepare(/* SQL */ `
|
||||||
|
delete from asset_refs where refs <= 0;
|
||||||
|
`);
|
||||||
|
const addFileQuery = db.prepare<[{ id: number; file: string }]>(/* SQL */ `
|
||||||
|
insert into asset_ref_files (id, file) values ($id, $file);
|
||||||
|
`);
|
||||||
|
|
||||||
|
import { getDb } from "#sitegen/sqlite";
|
|
@ -1,57 +0,0 @@
|
||||||
const db = getDb("cache.sqlite");
|
|
||||||
db.table(
|
|
||||||
"blob_assets",
|
|
||||||
/* SQL */ `
|
|
||||||
CREATE TABLE IF NOT EXISTS blob_assets (
|
|
||||||
hash TEXT PRIMARY KEY,
|
|
||||||
refs INTEGER NOT NULL DEFAULT 0
|
|
||||||
);
|
|
||||||
`,
|
|
||||||
);
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Uncompressed files are read directly from the media store root. Compressed
|
|
||||||
* files are stored as `<compress store>/<first 2 chars of hash>/<hash>` Since
|
|
||||||
* multiple files can share the same hash, the number of references is tracked
|
|
||||||
* so that when a file is deleted, the compressed data is only removed when all
|
|
||||||
* references are gone.
|
|
||||||
*/
|
|
||||||
export class BlobAsset {
|
|
||||||
/** sha1 of the contents */
|
|
||||||
hash!: string;
|
|
||||||
refs!: number;
|
|
||||||
|
|
||||||
decrementOrDelete() {
|
|
||||||
BlobAsset.decrementOrDelete(this.hash);
|
|
||||||
}
|
|
||||||
|
|
||||||
static get(hash: string) {
|
|
||||||
return getQuery.get(hash);
|
|
||||||
}
|
|
||||||
static putOrIncrement(hash: string) {
|
|
||||||
ASSERT(hash.length === 40);
|
|
||||||
putOrIncrementQuery.get(hash);
|
|
||||||
return BlobAsset.get(hash)!;
|
|
||||||
}
|
|
||||||
static decrementOrDelete(hash: string) {
|
|
||||||
ASSERT(hash.length === 40);
|
|
||||||
decrementQuery.run(hash);
|
|
||||||
return deleteQuery.run(hash).changes > 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const getQuery = db.prepare<[hash: string]>(/* SQL */ `
|
|
||||||
SELECT * FROM blob_assets WHERE hash = ?;
|
|
||||||
`).as(BlobAsset);
|
|
||||||
const putOrIncrementQuery = db.prepare<[hash: string]>(/* SQL */ `
|
|
||||||
INSERT INTO blob_assets (hash, refs) VALUES (?, 1)
|
|
||||||
ON CONFLICT(hash) DO UPDATE SET refs = refs + 1;
|
|
||||||
`);
|
|
||||||
const decrementQuery = db.prepare<[hash: string]>(/* SQL */ `
|
|
||||||
UPDATE blob_assets SET refs = refs - 1 WHERE hash = ? AND refs > 0;
|
|
||||||
`);
|
|
||||||
const deleteQuery = db.prepare<[hash: string]>(/* SQL */ `
|
|
||||||
DELETE FROM blob_assets WHERE hash = ? AND refs <= 0;
|
|
||||||
`);
|
|
||||||
|
|
||||||
import { getDb } from "#sitegen/sqlite";
|
|
|
@ -2,26 +2,31 @@ const db = getDb("cache.sqlite");
|
||||||
db.table(
|
db.table(
|
||||||
"media_files",
|
"media_files",
|
||||||
/* SQL */ `
|
/* SQL */ `
|
||||||
CREATE TABLE IF NOT EXISTS media_files (
|
create table media_files (
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
id integer primary key autoincrement,
|
||||||
parent_id INTEGER,
|
parent_id integer,
|
||||||
path TEXT UNIQUE,
|
path text unique,
|
||||||
kind INTEGER NOT NULL,
|
kind integer not null,
|
||||||
timestamp INTEGER NOT NULL,
|
timestamp integer not null,
|
||||||
timestamp_updated INTEGER NOT NULL DEFAULT NOW,
|
timestamp_updated integer not null default current_timestamp,
|
||||||
hash TEXT NOT NULL,
|
hash text not null,
|
||||||
size INTEGER NOT NULL,
|
size integer not null,
|
||||||
duration INTEGER NOT NULL DEFAULT 0,
|
duration integer not null default 0,
|
||||||
dimensions TEXT NOT NULL DEFAULT "",
|
dimensions text not null default "",
|
||||||
contents TEXT NOT NULL,
|
contents text not null,
|
||||||
dirsort TEXT,
|
dirsort text,
|
||||||
processed INTEGER NOT NULL,
|
processed integer not null,
|
||||||
FOREIGN KEY (parent_id) REFERENCES media_files(id)
|
processors text not null default "",
|
||||||
|
foreign key (parent_id) references media_files(id)
|
||||||
);
|
);
|
||||||
-- Index for quickly looking up files by path
|
-- index for quickly looking up files by path
|
||||||
CREATE INDEX IF NOT EXISTS media_files_path ON media_files (path);
|
create index media_files_path on media_files (path);
|
||||||
-- Index for finding directories that need to be processed
|
-- index for quickly looking up children
|
||||||
CREATE INDEX IF NOT EXISTS media_files_directory_processed ON media_files (kind, processed);
|
create index media_files_parent_id on media_files (parent_id);
|
||||||
|
-- index for quickly looking up recursive file children
|
||||||
|
create index media_files_file_children on media_files (kind, path);
|
||||||
|
-- index for finding directories that need to be processed
|
||||||
|
create index media_files_directory_processed on media_files (kind, processed);
|
||||||
`,
|
`,
|
||||||
);
|
);
|
||||||
|
|
||||||
|
@ -31,7 +36,7 @@ export enum MediaFileKind {
|
||||||
}
|
}
|
||||||
export class MediaFile {
|
export class MediaFile {
|
||||||
id!: number;
|
id!: number;
|
||||||
parent_id!: number;
|
parent_id!: number | null;
|
||||||
/**
|
/**
|
||||||
* Has leading slash, does not have `/file` prefix.
|
* Has leading slash, does not have `/file` prefix.
|
||||||
* @example "/2025/waterfalls/waterfalls.mp3"
|
* @example "/2025/waterfalls/waterfalls.mp3"
|
||||||
|
@ -69,12 +74,13 @@ export class MediaFile {
|
||||||
size!: number;
|
size!: number;
|
||||||
/**
|
/**
|
||||||
* 0 - not processed
|
* 0 - not processed
|
||||||
* 1 - processed
|
* non-zero - processed
|
||||||
*
|
*
|
||||||
* file: this is for compression
|
* file: a bit-field of the processors.
|
||||||
* directory: this is for re-indexing contents
|
* directory: this is for re-indexing contents
|
||||||
*/
|
*/
|
||||||
processed!: number;
|
processed!: number;
|
||||||
|
processors!: string;
|
||||||
|
|
||||||
// -- instance ops --
|
// -- instance ops --
|
||||||
get date() {
|
get date() {
|
||||||
|
@ -119,8 +125,33 @@ export class MediaFile {
|
||||||
ASSERT(result.kind === MediaFileKind.directory);
|
ASSERT(result.kind === MediaFileKind.directory);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
setCompressed(compressed: boolean) {
|
setProcessed(processed: number) {
|
||||||
MediaFile.markCompressed(this.id, compressed);
|
setProcessedQuery.run({ id: this.id, processed });
|
||||||
|
this.processed = processed;
|
||||||
|
}
|
||||||
|
setProcessors(processed: number, processors: string) {
|
||||||
|
setProcessorsQuery.run({ id: this.id, processed, processors });
|
||||||
|
this.processed = processed;
|
||||||
|
this.processors = processors;
|
||||||
|
}
|
||||||
|
setDuration(duration: number) {
|
||||||
|
setDurationQuery.run({ id: this.id, duration });
|
||||||
|
this.duration = duration;
|
||||||
|
}
|
||||||
|
setDimensions(dimensions: string) {
|
||||||
|
setDimensionsQuery.run({ id: this.id, dimensions });
|
||||||
|
this.dimensions = dimensions;
|
||||||
|
}
|
||||||
|
setContents(contents: string) {
|
||||||
|
setContentsQuery.run({ id: this.id, contents });
|
||||||
|
this.contents = contents;
|
||||||
|
}
|
||||||
|
getRecursiveFileChildren() {
|
||||||
|
if (this.kind !== MediaFileKind.directory) return [];
|
||||||
|
return getChildrenFilesRecursiveQuery.array(this.path + "/");
|
||||||
|
}
|
||||||
|
delete() {
|
||||||
|
deleteCascadeQuery.run({ id: this.id });
|
||||||
}
|
}
|
||||||
|
|
||||||
// -- static ops --
|
// -- static ops --
|
||||||
|
@ -130,7 +161,7 @@ export class MediaFile {
|
||||||
if (filePath === "/") {
|
if (filePath === "/") {
|
||||||
return Object.assign(new MediaFile(), {
|
return Object.assign(new MediaFile(), {
|
||||||
id: 0,
|
id: 0,
|
||||||
parent_id: 0,
|
parent_id: null,
|
||||||
path: "/",
|
path: "/",
|
||||||
kind: MediaFileKind.directory,
|
kind: MediaFileKind.directory,
|
||||||
timestamp: 0,
|
timestamp: 0,
|
||||||
|
@ -149,11 +180,15 @@ export class MediaFile {
|
||||||
date,
|
date,
|
||||||
hash,
|
hash,
|
||||||
size,
|
size,
|
||||||
duration = 0,
|
duration,
|
||||||
dimensions = "",
|
dimensions,
|
||||||
content = "",
|
contents,
|
||||||
}: CreateFile) {
|
}: CreateFile) {
|
||||||
createFileQuery.get({
|
ASSERT(
|
||||||
|
!filePath.includes("\\") && filePath.startsWith("/"),
|
||||||
|
`Invalid path: ${filePath}`,
|
||||||
|
);
|
||||||
|
return createFileQuery.getNonNull({
|
||||||
path: filePath,
|
path: filePath,
|
||||||
parentId: MediaFile.getOrPutDirectoryId(path.dirname(filePath)),
|
parentId: MediaFile.getOrPutDirectoryId(path.dirname(filePath)),
|
||||||
timestamp: date.getTime(),
|
timestamp: date.getTime(),
|
||||||
|
@ -162,37 +197,46 @@ export class MediaFile {
|
||||||
size,
|
size,
|
||||||
duration,
|
duration,
|
||||||
dimensions,
|
dimensions,
|
||||||
contents: content,
|
contents,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static getOrPutDirectoryId(filePath: string) {
|
static getOrPutDirectoryId(filePath: string) {
|
||||||
filePath = path.posix.normalize(filePath);
|
ASSERT(
|
||||||
const row = getDirectoryIdQuery.get(filePath) as { id: number };
|
!filePath.includes("\\") && filePath.startsWith("/"),
|
||||||
|
`Invalid path: ${filePath}`,
|
||||||
|
);
|
||||||
|
filePath = path.normalize(filePath);
|
||||||
|
const row = getDirectoryIdQuery.get(filePath);
|
||||||
if (row) return row.id;
|
if (row) return row.id;
|
||||||
let current = filePath;
|
let current = filePath;
|
||||||
let parts = [];
|
let parts = [];
|
||||||
let parentId: null | number = 0;
|
let parentId: null | number = null;
|
||||||
if (filePath === "/") {
|
if (filePath === "/") {
|
||||||
return createDirectoryQuery.run(filePath, 0).lastInsertRowid as number;
|
return createDirectoryQuery.getNonNull({
|
||||||
|
path: filePath,
|
||||||
|
parentId,
|
||||||
|
}).id;
|
||||||
}
|
}
|
||||||
// walk down the path until we find a directory that exists
|
// walk up the path until we find a directory that exists
|
||||||
do {
|
do {
|
||||||
parts.unshift(path.basename(current));
|
parts.unshift(path.basename(current));
|
||||||
current = path.dirname(current);
|
current = path.dirname(current);
|
||||||
parentId = (getDirectoryIdQuery.get(current) as { id: number })?.id;
|
parentId = getDirectoryIdQuery.get(current)?.id ?? null;
|
||||||
} while (parentId == undefined && current !== "/");
|
} while (parentId == undefined && current !== "/");
|
||||||
if (parentId == undefined) {
|
if (parentId == undefined) {
|
||||||
parentId = createDirectoryQuery.run({
|
parentId = createDirectoryQuery.getNonNull({
|
||||||
path: current,
|
path: current,
|
||||||
parentId: 0,
|
parentId,
|
||||||
}).lastInsertRowid as number;
|
}).id;
|
||||||
}
|
}
|
||||||
// walk back up the path, creating directories as needed
|
// walk back down the path, creating directories as needed
|
||||||
for (const part of parts) {
|
for (const part of parts) {
|
||||||
current = path.join(current, part);
|
current = path.join(current, part);
|
||||||
ASSERT(parentId != undefined);
|
ASSERT(parentId != undefined);
|
||||||
parentId = createDirectoryQuery.run({ path: current, parentId })
|
parentId = createDirectoryQuery.getNonNull({
|
||||||
.lastInsertRowid as number;
|
path: current,
|
||||||
|
parentId,
|
||||||
|
}).id;
|
||||||
}
|
}
|
||||||
return parentId;
|
return parentId;
|
||||||
}
|
}
|
||||||
|
@ -213,8 +257,8 @@ export class MediaFile {
|
||||||
size,
|
size,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
static markCompressed(id: number, compressed: boolean) {
|
static setProcessed(id: number, processed: number) {
|
||||||
markCompressedQuery.run({ id, processed: compressed ? 2 : 1 });
|
setProcessedQuery.run({ id, processed });
|
||||||
}
|
}
|
||||||
static createOrUpdateDirectory(dirPath: string) {
|
static createOrUpdateDirectory(dirPath: string) {
|
||||||
const id = MediaFile.getOrPutDirectoryId(dirPath);
|
const id = MediaFile.getOrPutDirectoryId(dirPath);
|
||||||
|
@ -223,6 +267,7 @@ export class MediaFile {
|
||||||
static getChildren(id: number) {
|
static getChildren(id: number) {
|
||||||
return getChildrenQuery.array(id);
|
return getChildrenQuery.array(id);
|
||||||
}
|
}
|
||||||
|
static db = db;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a `file` entry with a given path, date, file hash, size, and duration
|
// Create a `file` entry with a given path, date, file hash, size, and duration
|
||||||
|
@ -233,9 +278,9 @@ interface CreateFile {
|
||||||
date: Date;
|
date: Date;
|
||||||
hash: string;
|
hash: string;
|
||||||
size: number;
|
size: number;
|
||||||
duration?: number;
|
duration: number;
|
||||||
dimensions?: string;
|
dimensions: string;
|
||||||
content?: string;
|
contents: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set the `processed` flag true and update the metadata for a directory
|
// Set the `processed` flag true and update the metadata for a directory
|
||||||
|
@ -256,14 +301,18 @@ export interface DirConfig {
|
||||||
// -- queries --
|
// -- queries --
|
||||||
|
|
||||||
// Get a directory ID by path, creating it if it doesn't exist
|
// Get a directory ID by path, creating it if it doesn't exist
|
||||||
const createDirectoryQuery = db.prepare<[{ path: string; parentId: number }]>(
|
const createDirectoryQuery = db.prepare<
|
||||||
|
[{ path: string; parentId: number | null }],
|
||||||
|
{ id: number }
|
||||||
|
>(
|
||||||
/* SQL */ `
|
/* SQL */ `
|
||||||
insert into media_files (
|
insert into media_files (
|
||||||
path, parent_id, kind, timestamp, hash, size,
|
path, parent_id, kind, timestamp, hash, size,
|
||||||
duration, dimensions, contents, dirsort, processed)
|
duration, dimensions, contents, dirsort, processed)
|
||||||
values (
|
values (
|
||||||
$path, $parentId, ${MediaFileKind.directory}, 0, '', 0,
|
$path, $parentId, ${MediaFileKind.directory}, 0, '', 0,
|
||||||
0, '', '', '', 0);
|
0, '', '', '', 0)
|
||||||
|
returning id;
|
||||||
`,
|
`,
|
||||||
);
|
);
|
||||||
const getDirectoryIdQuery = db.prepare<[string], { id: number }>(/* SQL */ `
|
const getDirectoryIdQuery = db.prepare<[string], { id: number }>(/* SQL */ `
|
||||||
|
@ -295,14 +344,43 @@ const createFileQuery = db.prepare<[{
|
||||||
processed = case
|
processed = case
|
||||||
when media_files.hash != excluded.hash then 0
|
when media_files.hash != excluded.hash then 0
|
||||||
else media_files.processed
|
else media_files.processed
|
||||||
end;
|
end
|
||||||
`);
|
returning *;
|
||||||
const markCompressedQuery = db.prepare<[{
|
`).as(MediaFile);
|
||||||
|
const setProcessedQuery = db.prepare<[{
|
||||||
id: number;
|
id: number;
|
||||||
processed: number;
|
processed: number;
|
||||||
}]>(/* SQL */ `
|
}]>(/* SQL */ `
|
||||||
update media_files set processed = $processed where id = $id;
|
update media_files set processed = $processed where id = $id;
|
||||||
`);
|
`);
|
||||||
|
const setProcessorsQuery = db.prepare<[{
|
||||||
|
id: number;
|
||||||
|
processed: number;
|
||||||
|
processors: string;
|
||||||
|
}]>(/* SQL */ `
|
||||||
|
update media_files set
|
||||||
|
processed = $processed,
|
||||||
|
processors = $processors
|
||||||
|
where id = $id;
|
||||||
|
`);
|
||||||
|
const setDurationQuery = db.prepare<[{
|
||||||
|
id: number;
|
||||||
|
duration: number;
|
||||||
|
}]>(/* SQL */ `
|
||||||
|
update media_files set duration = $duration where id = $id;
|
||||||
|
`);
|
||||||
|
const setDimensionsQuery = db.prepare<[{
|
||||||
|
id: number;
|
||||||
|
dimensions: string;
|
||||||
|
}]>(/* SQL */ `
|
||||||
|
update media_files set dimensions = $dimensions where id = $id;
|
||||||
|
`);
|
||||||
|
const setContentsQuery = db.prepare<[{
|
||||||
|
id: number;
|
||||||
|
contents: string;
|
||||||
|
}]>(/* SQL */ `
|
||||||
|
update media_files set contents = $contents where id = $id;
|
||||||
|
`);
|
||||||
const getByPathQuery = db.prepare<[string]>(/* SQL */ `
|
const getByPathQuery = db.prepare<[string]>(/* SQL */ `
|
||||||
select * from media_files where path = ?;
|
select * from media_files where path = ?;
|
||||||
`).as(MediaFile);
|
`).as(MediaFile);
|
||||||
|
@ -330,7 +408,29 @@ const updateDirectoryQuery = db.prepare<[id: number]>(/* SQL */ `
|
||||||
const getChildrenQuery = db.prepare<[id: number]>(/* SQL */ `
|
const getChildrenQuery = db.prepare<[id: number]>(/* SQL */ `
|
||||||
select * from media_files where parent_id = ?;
|
select * from media_files where parent_id = ?;
|
||||||
`).as(MediaFile);
|
`).as(MediaFile);
|
||||||
|
const getChildrenFilesRecursiveQuery = db.prepare<[dir: string]>(/* SQL */ `
|
||||||
|
select * from media_files
|
||||||
|
where path like ? || '%'
|
||||||
|
and kind = ${MediaFileKind.file}
|
||||||
|
`).as(MediaFile);
|
||||||
|
const deleteCascadeQuery = db.prepare<[{ id: number }]>(/* SQL */ `
|
||||||
|
with recursive items as (
|
||||||
|
select id, parent_id from media_files where id = $id
|
||||||
|
union all
|
||||||
|
select p.id, p.parent_id
|
||||||
|
from media_files p
|
||||||
|
join items c on p.id = c.parent_id
|
||||||
|
where p.parent_id is not null
|
||||||
|
and not exists (
|
||||||
|
select 1 from media_files child
|
||||||
|
where child.parent_id = p.id
|
||||||
|
and child.id <> c.id
|
||||||
|
)
|
||||||
|
)
|
||||||
|
delete from media_files
|
||||||
|
where id in (select id from items)
|
||||||
|
`);
|
||||||
|
|
||||||
import { getDb } from "#sitegen/sqlite";
|
import { getDb } from "#sitegen/sqlite";
|
||||||
import * as path from "node:path";
|
import * as path from "node:path/posix";
|
||||||
import { FilePermissions } from "./FilePermissions.ts";
|
import { FilePermissions } from "./FilePermissions.ts";
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// -- file extension rules --
|
// -- file extension rules --
|
||||||
|
|
||||||
/** Extensions that must have EXIF/etc data stripped */
|
/** Extensions that must have EXIF/etc data stripped */
|
||||||
const extScrubExif = new Set([
|
export const extScrubExif = new Set([
|
||||||
".jpg",
|
".jpg",
|
||||||
".jpeg",
|
".jpeg",
|
||||||
".png",
|
".png",
|
||||||
|
@ -10,7 +10,7 @@ const extScrubExif = new Set([
|
||||||
".m4a",
|
".m4a",
|
||||||
]);
|
]);
|
||||||
/** Extensions that rendered syntax-highlighted code */
|
/** Extensions that rendered syntax-highlighted code */
|
||||||
const extsCode = new Map<string, highlight.Language>(Object.entries({
|
export const extsCode = new Map<string, highlight.Language>(Object.entries({
|
||||||
".json": "json",
|
".json": "json",
|
||||||
".toml": "toml",
|
".toml": "toml",
|
||||||
".ts": "ts",
|
".ts": "ts",
|
||||||
|
@ -36,7 +36,7 @@ const extsCode = new Map<string, highlight.Language>(Object.entries({
|
||||||
".diff": "diff",
|
".diff": "diff",
|
||||||
}));
|
}));
|
||||||
/** These files show an audio embed. */
|
/** These files show an audio embed. */
|
||||||
const extsAudio = new Set([
|
export const extsAudio = new Set([
|
||||||
".mp3",
|
".mp3",
|
||||||
".flac",
|
".flac",
|
||||||
".wav",
|
".wav",
|
||||||
|
@ -44,7 +44,7 @@ const extsAudio = new Set([
|
||||||
".m4a",
|
".m4a",
|
||||||
]);
|
]);
|
||||||
/** These files show a video embed. */
|
/** These files show a video embed. */
|
||||||
const extsVideo = new Set([
|
export const extsVideo = new Set([
|
||||||
".mp4",
|
".mp4",
|
||||||
".mkv",
|
".mkv",
|
||||||
".webm",
|
".webm",
|
||||||
|
@ -52,7 +52,7 @@ const extsVideo = new Set([
|
||||||
".mov",
|
".mov",
|
||||||
]);
|
]);
|
||||||
/** These files show an image embed */
|
/** These files show an image embed */
|
||||||
const extsImage = new Set([
|
export const extsImage = new Set([
|
||||||
".jpg",
|
".jpg",
|
||||||
".jpeg",
|
".jpeg",
|
||||||
".png",
|
".png",
|
||||||
|
@ -85,7 +85,7 @@ export const extsArchive = new Set([
|
||||||
* Formats which are already compression formats, meaning a pass
|
* Formats which are already compression formats, meaning a pass
|
||||||
* through zstd would offer little to negative benefits
|
* through zstd would offer little to negative benefits
|
||||||
*/
|
*/
|
||||||
export const extsHaveCompression = new Set([
|
export const extsPreCompressed = new Set([
|
||||||
...extsAudio,
|
...extsAudio,
|
||||||
...extsVideo,
|
...extsVideo,
|
||||||
...extsImage,
|
...extsImage,
|
||||||
|
|
47
src/pages/resume.css
Normal file
47
src/pages/resume.css
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
body,html {
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
h1 {
|
||||||
|
color: #f09;
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
.job {
|
||||||
|
padding: 18px;
|
||||||
|
margin: 1em -18px;
|
||||||
|
border: 1px solid black;
|
||||||
|
}
|
||||||
|
.job *, footer * {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
}
|
||||||
|
.job ul {
|
||||||
|
margin-left: 1em;
|
||||||
|
}
|
||||||
|
.job li {
|
||||||
|
line-height: 1.5em;
|
||||||
|
}
|
||||||
|
.job header, footer {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: auto max-content;
|
||||||
|
grid-template-rows: 1fr 1fr;
|
||||||
|
}
|
||||||
|
footer {
|
||||||
|
margin-top: 1.5em;
|
||||||
|
}
|
||||||
|
footer h2 {
|
||||||
|
font-size: 1em;
|
||||||
|
margin-bottom: 0.5em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.job header > em, footer > em {
|
||||||
|
margin-top: 2px;
|
||||||
|
font-size: 1.25em;
|
||||||
|
}
|
||||||
|
|
||||||
|
header h2, header em, footer h2, footer em {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
header em, footer em {
|
||||||
|
margin-left: 16px!important;
|
||||||
|
text-align: right;
|
||||||
|
}
|
50
src/pages/resume.marko
Normal file
50
src/pages/resume.marko
Normal file
|
@ -0,0 +1,50 @@
|
||||||
|
import "./resume.css";
|
||||||
|
|
||||||
|
export const meta = { title: 'clover\'s resume' };
|
||||||
|
|
||||||
|
<main>
|
||||||
|
<h1>clover's resume</h1>
|
||||||
|
<div>last updated: 2025</>
|
||||||
|
|
||||||
|
<article.job>
|
||||||
|
<header>
|
||||||
|
<h2>web/backend engineer</h2>
|
||||||
|
<em>2025-now</em>
|
||||||
|
</>
|
||||||
|
<ul>
|
||||||
|
<i>(more details added as time goes on...)</i>
|
||||||
|
</ul>
|
||||||
|
</>
|
||||||
|
|
||||||
|
<article.job>
|
||||||
|
<header>
|
||||||
|
<h2>runtime/systems engineer</h2>
|
||||||
|
<em>2023-2025</em>
|
||||||
|
<p>developer tools company</p>
|
||||||
|
</>
|
||||||
|
<ul>
|
||||||
|
<li>hardcore engineering, elegant solutions</>
|
||||||
|
<li>platform compatibility & stability</>
|
||||||
|
<li>debugging and profiling across platforms</>
|
||||||
|
</ul>
|
||||||
|
</>
|
||||||
|
|
||||||
|
<article.job>
|
||||||
|
<header>
|
||||||
|
<h2>technician</h2>
|
||||||
|
<em>2023; part time</em>
|
||||||
|
<p>automotive maintainance company</p>
|
||||||
|
</>
|
||||||
|
<ul>
|
||||||
|
<li>pressed buttons on a computer</>
|
||||||
|
</ul>
|
||||||
|
</>
|
||||||
|
|
||||||
|
<footer>
|
||||||
|
<h2>eduation</h2> <em>2004-now</em>
|
||||||
|
<p>
|
||||||
|
my life on earth has taught me more than i expected. i <br/>
|
||||||
|
continue to learn new things daily, as if it was magic.
|
||||||
|
</p>
|
||||||
|
</footer>
|
||||||
|
</main>
|
Loading…
Reference in a new issue