incremental generator
This commit is contained in:
parent
92ddecc37e
commit
399ccec226
12 changed files with 435 additions and 337 deletions
|
@ -38,6 +38,7 @@ export async function bundleClientJavaScript(
|
||||||
plugins,
|
plugins,
|
||||||
splitting: true,
|
splitting: true,
|
||||||
write: false,
|
write: false,
|
||||||
|
metafile: true,
|
||||||
});
|
});
|
||||||
if (bundle.errors.length || bundle.warnings.length) {
|
if (bundle.errors.length || bundle.warnings.length) {
|
||||||
throw new AggregateError(
|
throw new AggregateError(
|
||||||
|
@ -45,29 +46,34 @@ export async function bundleClientJavaScript(
|
||||||
"JS bundle failed",
|
"JS bundle failed",
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
incr.invalidate("bundle-script");
|
|
||||||
const publicScriptRoutes = extraPublicScripts.map((file) =>
|
const publicScriptRoutes = extraPublicScripts.map((file) =>
|
||||||
path.basename(file).replace(/\.client\.[tj]sx?/, "")
|
path.basename(file).replace(/\.client\.[tj]sx?/, "")
|
||||||
);
|
);
|
||||||
|
const { metafile } = bundle;
|
||||||
|
console.log(metafile);
|
||||||
const promises: Promise<void>[] = [];
|
const promises: Promise<void>[] = [];
|
||||||
// TODO: add a shared build hash to entrypoints, derived from all the chunk hashes.
|
// TODO: add a shared build hash to entrypoints, derived from all the chunk hashes.
|
||||||
for (const file of bundle.outputFiles) {
|
for (const file of bundle.outputFiles) {
|
||||||
|
const { text } = file;
|
||||||
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
let route = file.path.replace(/^.*!/, "").replaceAll("\\", "/");
|
||||||
const text = file.text;
|
const { inputs } = UNWRAP(metafile.outputs["out!" + route]);
|
||||||
|
const sources = Object.keys(inputs);
|
||||||
|
|
||||||
// Register non-chunks as script entries.
|
// Register non-chunks as script entries.
|
||||||
const chunk = route.startsWith("/js/c.");
|
const chunk = route.startsWith("/js/c.");
|
||||||
if (!chunk) {
|
if (!chunk) {
|
||||||
route = route.replace(".client.js", ".js");
|
route = route.replace(".client.js", ".js");
|
||||||
incr.put({
|
incr.put({
|
||||||
srcId: "bundle-script",
|
sources,
|
||||||
type: "script",
|
type: "script",
|
||||||
key: route.slice("/js/".length, -".js".length),
|
key: route.slice("/js/".length, -".js".length),
|
||||||
value: text,
|
value: text,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
// Register chunks and public scripts as assets.
|
||||||
if (chunk || publicScriptRoutes.includes(route)) {
|
if (chunk || publicScriptRoutes.includes(route)) {
|
||||||
promises.push(incr.putAsset({
|
promises.push(incr.putAsset({
|
||||||
srcId: "bundle-script",
|
sources,
|
||||||
key: route,
|
key: route,
|
||||||
body: text,
|
body: text,
|
||||||
}));
|
}));
|
||||||
|
|
|
@ -34,11 +34,19 @@ export function preprocess(css: string, theme: Theme): string {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export interface Output {
|
||||||
|
text: string;
|
||||||
|
sources: string[];
|
||||||
|
}
|
||||||
|
|
||||||
export async function bundleCssFiles(
|
export async function bundleCssFiles(
|
||||||
cssImports: string[],
|
cssImports: string[],
|
||||||
theme: Theme,
|
theme: Theme,
|
||||||
dev: boolean = false,
|
dev: boolean = false,
|
||||||
): Promise<string> {
|
): Promise<Output> {
|
||||||
|
cssImports = cssImports.map((file) =>
|
||||||
|
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
||||||
|
);
|
||||||
const plugin = {
|
const plugin = {
|
||||||
name: "clover",
|
name: "clover",
|
||||||
setup(b) {
|
setup(b) {
|
||||||
|
@ -68,13 +76,14 @@ export async function bundleCssFiles(
|
||||||
const build = await esbuild.build({
|
const build = await esbuild.build({
|
||||||
bundle: true,
|
bundle: true,
|
||||||
entryPoints: ["$input$"],
|
entryPoints: ["$input$"],
|
||||||
write: false,
|
external: ["*.woff2", "*.ttf", "*.png", "*.jpeg"],
|
||||||
external: ["*.woff2"],
|
metafile: true,
|
||||||
target: ["ie11"],
|
|
||||||
plugins: [plugin],
|
|
||||||
minify: !dev,
|
minify: !dev,
|
||||||
|
plugins: [plugin],
|
||||||
|
target: ["ie11"],
|
||||||
|
write: false,
|
||||||
});
|
});
|
||||||
const { errors, warnings, outputFiles } = build;
|
const { errors, warnings, outputFiles, metafile } = build;
|
||||||
if (errors.length > 0) {
|
if (errors.length > 0) {
|
||||||
throw new AggregateError(errors, "CSS Build Failed");
|
throw new AggregateError(errors, "CSS Build Failed");
|
||||||
}
|
}
|
||||||
|
@ -82,8 +91,15 @@ export async function bundleCssFiles(
|
||||||
throw new AggregateError(warnings, "CSS Build Failed");
|
throw new AggregateError(warnings, "CSS Build Failed");
|
||||||
}
|
}
|
||||||
if (outputFiles.length > 1) throw new Error("Too many output files");
|
if (outputFiles.length > 1) throw new Error("Too many output files");
|
||||||
return outputFiles[0].text;
|
return {
|
||||||
|
text: outputFiles[0].text,
|
||||||
|
sources: Object.keys(metafile.outputs["$input$.css"].inputs)
|
||||||
|
.filter((x) => x !== "input:."),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
import * as esbuild from "esbuild";
|
import * as esbuild from "esbuild";
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
|
import * as hot from "./hot.ts";
|
||||||
|
import * as path from "node:path";
|
||||||
|
import { Incremental } from "./incremental.ts";
|
||||||
|
|
|
@ -21,7 +21,8 @@ async function sitegen(status: Spinner) {
|
||||||
|
|
||||||
let root = path.resolve(import.meta.dirname, "../src");
|
let root = path.resolve(import.meta.dirname, "../src");
|
||||||
const join = (...sub: string[]) => path.join(root, ...sub);
|
const join = (...sub: string[]) => path.join(root, ...sub);
|
||||||
const incr = new Incremental();
|
const incr = Incremental.fromDisk();
|
||||||
|
await incr.statAllFiles();
|
||||||
|
|
||||||
// Sitegen reviews every defined section for resources to process
|
// Sitegen reviews every defined section for resources to process
|
||||||
const sections: sg.Section[] =
|
const sections: sg.Section[] =
|
||||||
|
@ -68,20 +69,24 @@ async function sitegen(status: Spinner) {
|
||||||
];
|
];
|
||||||
for (const { dir, list, prefix, exclude = [], ext = false } of kinds) {
|
for (const { dir, list, prefix, exclude = [], ext = false } of kinds) {
|
||||||
const items = fs.readDirRecOptionalSync(dir);
|
const items = fs.readDirRecOptionalSync(dir);
|
||||||
item: for (const item of items) {
|
item: for (const subPath of items) {
|
||||||
if (item.isDirectory()) continue;
|
const file = path.join(dir, subPath);
|
||||||
|
const stat = fs.statSync(file);
|
||||||
|
if (stat.isDirectory()) continue;
|
||||||
for (const e of exclude) {
|
for (const e of exclude) {
|
||||||
if (item.name.endsWith(e)) continue item;
|
if (subPath.endsWith(e)) continue item;
|
||||||
}
|
}
|
||||||
const file = path.relative(dir, item.parentPath + "/" + item.name);
|
|
||||||
const trim = ext
|
const trim = ext
|
||||||
? file
|
? subPath
|
||||||
: file.slice(0, -path.extname(file).length).replaceAll(".", "/");
|
: subPath.slice(0, -path.extname(subPath).length).replaceAll(
|
||||||
|
".",
|
||||||
|
"/",
|
||||||
|
);
|
||||||
let id = prefix + trim.replaceAll("\\", "/");
|
let id = prefix + trim.replaceAll("\\", "/");
|
||||||
if (prefix === "/" && id.endsWith("/index")) {
|
if (prefix === "/" && id.endsWith("/index")) {
|
||||||
id = id.slice(0, -"/index".length) || "/";
|
id = id.slice(0, -"/index".length) || "/";
|
||||||
}
|
}
|
||||||
list.push({ id, file: path.join(item.parentPath, item.name) });
|
list.push({ id, file: file });
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -92,8 +97,8 @@ async function sitegen(status: Spinner) {
|
||||||
|
|
||||||
// -- server side render --
|
// -- server side render --
|
||||||
status.text = "Building";
|
status.text = "Building";
|
||||||
const cssOnce = new OnceMap<string>();
|
const cssOnce = new OnceMap<css.Output>();
|
||||||
const cssQueue = new Queue<[string, string[], css.Theme], string>({
|
const cssQueue = new Queue<[string, string[], css.Theme], css.Output>({
|
||||||
name: "Bundle",
|
name: "Bundle",
|
||||||
fn: ([, files, theme]) => css.bundleCssFiles(files, theme),
|
fn: ([, files, theme]) => css.bundleCssFiles(files, theme),
|
||||||
passive: true,
|
passive: true,
|
||||||
|
@ -103,7 +108,7 @@ async function sitegen(status: Spinner) {
|
||||||
interface RenderResult {
|
interface RenderResult {
|
||||||
body: string;
|
body: string;
|
||||||
head: string;
|
head: string;
|
||||||
inlineCss: string;
|
css: css.Output;
|
||||||
scriptFiles: string[];
|
scriptFiles: string[];
|
||||||
item: FileItem;
|
item: FileItem;
|
||||||
}
|
}
|
||||||
|
@ -139,11 +144,16 @@ async function sitegen(status: Spinner) {
|
||||||
() => cssQueue.add([item.id, cssImports, theme]),
|
() => cssQueue.add([item.id, cssImports, theme]),
|
||||||
);
|
);
|
||||||
// -- html --
|
// -- html --
|
||||||
const bodyPromise = await ssr.ssrAsync(<Page />, {
|
let page = <Page />;
|
||||||
|
if (layout?.default) {
|
||||||
|
const Layout = layout.default;
|
||||||
|
page = <Layout>{page}</Layout>;
|
||||||
|
}
|
||||||
|
const bodyPromise = await ssr.ssrAsync(page, {
|
||||||
sitegen: sg.initRender(),
|
sitegen: sg.initRender(),
|
||||||
});
|
});
|
||||||
|
|
||||||
const [{ text, addon }, inlineCss, renderedMeta] = await Promise.all([
|
const [{ text, addon }, cssBundle, renderedMeta] = await Promise.all([
|
||||||
bodyPromise,
|
bodyPromise,
|
||||||
cssPromise,
|
cssPromise,
|
||||||
renderedMetaPromise,
|
renderedMetaPromise,
|
||||||
|
@ -160,13 +170,14 @@ async function sitegen(status: Spinner) {
|
||||||
renderResults.push({
|
renderResults.push({
|
||||||
body: text,
|
body: text,
|
||||||
head: renderedMeta,
|
head: renderedMeta,
|
||||||
inlineCss,
|
css: cssBundle,
|
||||||
scriptFiles: Array.from(addon.sitegen.scripts),
|
scriptFiles: Array.from(addon.sitegen.scripts),
|
||||||
item: item,
|
item: item,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
// This is done in two passes so that a page that throws during evaluation
|
// This is done in two passes so that a page that throws during evaluation
|
||||||
// will report "Load Render Module" instead of "Render Static Page".
|
// will report "Load Render Module" instead of "Render Static Page".
|
||||||
|
const neededPages = pages.filter((page) => incr.needsBuild("asset", page.id));
|
||||||
const spinnerFormat = status.format;
|
const spinnerFormat = status.format;
|
||||||
status.format = () => "";
|
status.format = () => "";
|
||||||
const moduleLoadQueue = new Queue({
|
const moduleLoadQueue = new Queue({
|
||||||
|
@ -175,7 +186,7 @@ async function sitegen(status: Spinner) {
|
||||||
getItemText,
|
getItemText,
|
||||||
maxJobs: 1,
|
maxJobs: 1,
|
||||||
});
|
});
|
||||||
moduleLoadQueue.addMany(pages);
|
moduleLoadQueue.addMany(neededPages);
|
||||||
await moduleLoadQueue.done({ method: "stop" });
|
await moduleLoadQueue.done({ method: "stop" });
|
||||||
const pageQueue = new Queue({
|
const pageQueue = new Queue({
|
||||||
name: "Render Static Page",
|
name: "Render Static Page",
|
||||||
|
@ -183,7 +194,7 @@ async function sitegen(status: Spinner) {
|
||||||
getItemText,
|
getItemText,
|
||||||
maxJobs: 2,
|
maxJobs: 2,
|
||||||
});
|
});
|
||||||
pageQueue.addMany(pages);
|
pageQueue.addMany(neededPages);
|
||||||
await pageQueue.done({ method: "stop" });
|
await pageQueue.done({ method: "stop" });
|
||||||
status.format = spinnerFormat;
|
status.format = spinnerFormat;
|
||||||
|
|
||||||
|
@ -207,7 +218,7 @@ async function sitegen(status: Spinner) {
|
||||||
async function doStaticFile(item: FileItem) {
|
async function doStaticFile(item: FileItem) {
|
||||||
const body = await fs.readFile(item.file);
|
const body = await fs.readFile(item.file);
|
||||||
await incr.putAsset({
|
await incr.putAsset({
|
||||||
srcId: "static:" + item.file,
|
sources: [item.file],
|
||||||
key: item.id,
|
key: item.id,
|
||||||
body,
|
body,
|
||||||
});
|
});
|
||||||
|
@ -219,7 +230,9 @@ async function sitegen(status: Spinner) {
|
||||||
maxJobs: 16,
|
maxJobs: 16,
|
||||||
});
|
});
|
||||||
status.format = () => "";
|
status.format = () => "";
|
||||||
staticQueue.addMany(staticFiles);
|
staticQueue.addMany(
|
||||||
|
staticFiles.filter((file) => incr.needsBuild("asset", file.id)),
|
||||||
|
);
|
||||||
await staticQueue.done({ method: "stop" });
|
await staticQueue.done({ method: "stop" });
|
||||||
status.format = spinnerFormat;
|
status.format = spinnerFormat;
|
||||||
|
|
||||||
|
@ -227,11 +240,13 @@ async function sitegen(status: Spinner) {
|
||||||
status.text = `Concat ${renderResults.length} Pages`;
|
status.text = `Concat ${renderResults.length} Pages`;
|
||||||
await Promise.all(
|
await Promise.all(
|
||||||
renderResults.map(
|
renderResults.map(
|
||||||
async ({ item: page, body, head, inlineCss, scriptFiles }) => {
|
async (
|
||||||
|
{ item: page, body, head, css, scriptFiles },
|
||||||
|
) => {
|
||||||
const doc = wrapDocument({
|
const doc = wrapDocument({
|
||||||
body,
|
body,
|
||||||
head,
|
head,
|
||||||
inlineCss,
|
inlineCss: css.text,
|
||||||
scripts: scriptFiles.map(
|
scripts: scriptFiles.map(
|
||||||
(id) =>
|
(id) =>
|
||||||
UNWRAP(
|
UNWRAP(
|
||||||
|
@ -242,7 +257,7 @@ async function sitegen(status: Spinner) {
|
||||||
).map((x) => `{${x}}`).join("\n"),
|
).map((x) => `{${x}}`).join("\n"),
|
||||||
});
|
});
|
||||||
await incr.putAsset({
|
await incr.putAsset({
|
||||||
srcId: "page:" + page.file,
|
sources: [page.file, ...css.sources],
|
||||||
key: page.id,
|
key: page.id,
|
||||||
body: doc,
|
body: doc,
|
||||||
headers: {
|
headers: {
|
||||||
|
@ -262,7 +277,7 @@ async function sitegen(status: Spinner) {
|
||||||
status.format = spinnerFormat;
|
status.format = spinnerFormat;
|
||||||
status.text = `Incremental Flush`;
|
status.text = `Incremental Flush`;
|
||||||
incr.flush();
|
incr.flush();
|
||||||
incr.serializeToDisk(); // Allows picking up this state again
|
incr.toDisk(); // Allows picking up this state again
|
||||||
return { elapsed: (performance.now() - startTime) / 1000 };
|
return { elapsed: (performance.now() - startTime) / 1000 };
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -281,9 +296,11 @@ function wrapDocument({
|
||||||
inlineCss: string;
|
inlineCss: string;
|
||||||
scripts: string;
|
scripts: string;
|
||||||
}) {
|
}) {
|
||||||
return `<!doctype html><head>${head}${
|
return `<!doctype html><html lang=en><head>${head}${
|
||||||
inlineCss ? `<style>${inlineCss}</style>` : ""
|
inlineCss ? `<style>${inlineCss}</style>` : ""
|
||||||
}</head><body>${body}${scripts ? `<script>${scripts}</script>` : ""}</body>`;
|
}</head><body>${body}${
|
||||||
|
scripts ? `<script>${scripts}</script>` : ""
|
||||||
|
}</body></html>`;
|
||||||
}
|
}
|
||||||
|
|
||||||
import { OnceMap, Queue } from "./queue.ts";
|
import { OnceMap, Queue } from "./queue.ts";
|
||||||
|
|
|
@ -34,8 +34,6 @@ export interface FileStat {
|
||||||
cssImportsRecursive: string[] | null;
|
cssImportsRecursive: string[] | null;
|
||||||
lastModified: number;
|
lastModified: number;
|
||||||
imports: string[];
|
imports: string[];
|
||||||
/* Used by 'incremental.ts' */
|
|
||||||
srcIds: string[];
|
|
||||||
}
|
}
|
||||||
let fsGraph = new Map<string, FileStat>();
|
let fsGraph = new Map<string, FileStat>();
|
||||||
export function setFsGraph(g: Map<string, FileStat>) {
|
export function setFsGraph(g: Map<string, FileStat>) {
|
||||||
|
@ -60,14 +58,6 @@ Module.prototype._compile = function (
|
||||||
filename: string,
|
filename: string,
|
||||||
format: "module" | "commonjs",
|
format: "module" | "commonjs",
|
||||||
) {
|
) {
|
||||||
fs.writeMkdirSync(
|
|
||||||
".clover/debug-transpilation/" +
|
|
||||||
path.relative(projectRoot, filename).replaceAll("\\", "/").replaceAll(
|
|
||||||
"../",
|
|
||||||
"_/",
|
|
||||||
).replaceAll("/", "."),
|
|
||||||
content,
|
|
||||||
);
|
|
||||||
const result = ModulePrototypeUnderscoreCompile.call(
|
const result = ModulePrototypeUnderscoreCompile.call(
|
||||||
this,
|
this,
|
||||||
content,
|
content,
|
||||||
|
@ -96,7 +86,6 @@ Module.prototype._compile = function (
|
||||||
: null,
|
: null,
|
||||||
imports,
|
imports,
|
||||||
lastModified: stat.mtimeMs,
|
lastModified: stat.mtimeMs,
|
||||||
srcIds: [],
|
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
return result;
|
return result;
|
||||||
|
|
|
@ -1,54 +1,50 @@
|
||||||
// `Incremental` contains multiple maps for the different parts of a site
|
// `Incremental` contains multiple maps for the different parts of a site
|
||||||
// build, and tracks reused items across builds. It also handles emitting and
|
// build, and tracks reused items across builds. It also handles emitting and
|
||||||
// updating the built site. This structure is self contained and serializable.
|
// updating the built site. This structure is self contained and serializable.
|
||||||
//
|
|
||||||
// Tracking is simple: Files map to one or more 'source IDs', which map to one
|
type SourceId = string; // relative to project root, e.g. 'src/global.css'
|
||||||
// or more 'artifact'. This two layer approach allows many files (say a page +
|
type ArtifactId = string; // `${ArtifactType}#${string}`
|
||||||
// all its imports) to map to the build of a page, which produces an HTML file
|
type Sha1Id = string; // Sha1 hex string
|
||||||
// plus a list of scripts.
|
|
||||||
|
|
||||||
interface ArtifactMap {
|
interface ArtifactMap {
|
||||||
asset: Asset;
|
asset: Asset;
|
||||||
script: string;
|
script: string;
|
||||||
}
|
}
|
||||||
type AllArtifactMaps = {
|
|
||||||
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
|
||||||
};
|
|
||||||
type ArtifactType = keyof ArtifactMap;
|
type ArtifactType = keyof ArtifactMap;
|
||||||
|
|
||||||
interface Asset {
|
interface Asset {
|
||||||
buffer: Buffer;
|
buffer: Buffer;
|
||||||
headers: Record<string, string | undefined>;
|
headers: Record<string, string | undefined>;
|
||||||
hash: string;
|
hash: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface PutBase {
|
export interface PutBase {
|
||||||
srcTag?: string; // deprecated
|
sources: SourceId[];
|
||||||
srcId: string;
|
|
||||||
key: string;
|
key: string;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface Put<T extends ArtifactType> extends PutBase {
|
export interface Put<T extends ArtifactType> extends PutBase {
|
||||||
type: T;
|
type: T;
|
||||||
value: ArtifactMap[T];
|
value: ArtifactMap[T];
|
||||||
}
|
}
|
||||||
|
export interface Invalidations {
|
||||||
export interface Output {
|
lastModified: number;
|
||||||
type: ArtifactType;
|
outputs: Set<ArtifactId>;
|
||||||
key: string;
|
files: Set<SourceId>;
|
||||||
}
|
}
|
||||||
|
|
||||||
const gzip = util.promisify(zlib.gzip);
|
|
||||||
const zstd = util.promisify(zlib.zstdCompress);
|
|
||||||
|
|
||||||
export class Incremental {
|
export class Incremental {
|
||||||
/** The generated artifacts */
|
/** The generated artifacts */
|
||||||
out: AllArtifactMaps = {
|
out: {
|
||||||
|
[K in keyof ArtifactMap]: Map<string, ArtifactMap[K]>;
|
||||||
|
} = {
|
||||||
asset: new Map(),
|
asset: new Map(),
|
||||||
script: new Map(),
|
script: new Map(),
|
||||||
};
|
};
|
||||||
|
/** Tracking filesystem entries to `srcId` */
|
||||||
|
invals = new Map<SourceId, Invalidations>();
|
||||||
|
/** Tracking output keys to files */
|
||||||
|
sources = new Map<ArtifactId, SourceId[]>();
|
||||||
|
|
||||||
/** Compressed resources */
|
/** Compressed resources */
|
||||||
compress = new Map<string, Compressed>();
|
compress = new Map<Sha1Id, Compressed>();
|
||||||
compressQueue = new Queue<CompressJob, void>({
|
compressQueue = new Queue<CompressJob, void>({
|
||||||
name: "Compress",
|
name: "Compress",
|
||||||
maxJobs: 5,
|
maxJobs: 5,
|
||||||
|
@ -56,59 +52,114 @@ export class Incremental {
|
||||||
passive: true,
|
passive: true,
|
||||||
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
getItemText: (job) => `${job.algo.toUpperCase()} ${job.label}`,
|
||||||
});
|
});
|
||||||
/** Tracking filesystem entries to `srcId` */
|
|
||||||
files = new Map<string, hot.FileStat>();
|
|
||||||
srcIds = new Map<string, Output[]>();
|
|
||||||
|
|
||||||
static fromSerialized() {
|
/** Invalidation deletes build artifacts so the check is trivial. */
|
||||||
}
|
needsBuild(type: ArtifactType, key: string) {
|
||||||
serialize() {
|
return !this.out[type].has(key);
|
||||||
const writer = new BufferWriter();
|
|
||||||
|
|
||||||
const asset = Array.from(
|
|
||||||
this.out.asset,
|
|
||||||
([key, { buffer, hash, headers }]) => {
|
|
||||||
const raw = writer.write(buffer, hash);
|
|
||||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
|
||||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash) : null;
|
|
||||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash) : null;
|
|
||||||
return [key, {
|
|
||||||
raw,
|
|
||||||
gzip,
|
|
||||||
zstd,
|
|
||||||
hash,
|
|
||||||
headers,
|
|
||||||
}];
|
|
||||||
},
|
|
||||||
);
|
|
||||||
const script = Array.from(this.out.script);
|
|
||||||
|
|
||||||
const meta = Buffer.from(
|
|
||||||
JSON.stringify({
|
|
||||||
asset,
|
|
||||||
script,
|
|
||||||
}),
|
|
||||||
"utf-8",
|
|
||||||
);
|
|
||||||
|
|
||||||
const lengthBuffer = Buffer.alloc(4);
|
|
||||||
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
|
||||||
|
|
||||||
return Buffer.concat([meta, lengthBuffer, ...writer.buffers]);
|
|
||||||
}
|
|
||||||
|
|
||||||
serializeToDisk(file = ".clover/incr.state") {
|
|
||||||
const buffer = this.serialize();
|
|
||||||
fs.writeFileSync(file, buffer);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Put built artifacts into the incremental cache. The entry points
|
||||||
|
* used to build this must be provided. 'Incremental' will trace JS
|
||||||
|
* imports and file modification times tracked by 'hot.ts'.
|
||||||
|
*/
|
||||||
put<T extends ArtifactType>({
|
put<T extends ArtifactType>({
|
||||||
srcId,
|
sources,
|
||||||
type,
|
type,
|
||||||
key,
|
key,
|
||||||
value,
|
value,
|
||||||
}: Put<T>) {
|
}: Put<T>) {
|
||||||
this.out[type].set(key, value);
|
this.out[type].set(key, value);
|
||||||
|
|
||||||
|
// Update sources information
|
||||||
|
ASSERT(sources.length > 0, "Missing sources for " + type + " " + key);
|
||||||
|
sources = sources.map((src) => path.normalize(src));
|
||||||
|
const fullKey = `${type}#${key}`;
|
||||||
|
const prevSources = this.sources.get(fullKey);
|
||||||
|
const newSources = new Set(
|
||||||
|
sources.map((file) =>
|
||||||
|
path.isAbsolute(file) ? path.relative(hot.projectRoot, file) : file
|
||||||
|
),
|
||||||
|
);
|
||||||
|
this.sources.set(fullKey, [...newSources]);
|
||||||
|
for (const source of prevSources ?? []) {
|
||||||
|
if (sources.includes(source)) {
|
||||||
|
newSources.delete(source);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
const invals = UNWRAP(this.invals.get(source));
|
||||||
|
ASSERT(invals.outputs.has(fullKey));
|
||||||
|
invals.outputs.delete(fullKey);
|
||||||
|
}
|
||||||
|
// Use reflection from the plugin system to get imports.
|
||||||
|
for (const source of newSources) {
|
||||||
|
const invals = this.#getOrInitInvals(source);
|
||||||
|
invals.outputs.add(fullKey);
|
||||||
|
this.#followImports(source);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// TODO: this doesnt remove stuff when it disappeary
|
||||||
|
#getOrInitInvals(source: string) {
|
||||||
|
let invals = this.invals.get(source);
|
||||||
|
if (!invals) {
|
||||||
|
const g = hot.getFsGraph().get(source);
|
||||||
|
const lastModified = g?.lastModified ??
|
||||||
|
fs.statSync(path.resolve(hot.projectRoot, source)).mtimeMs;
|
||||||
|
this.invals.set(
|
||||||
|
source,
|
||||||
|
invals = {
|
||||||
|
lastModified,
|
||||||
|
files: new Set(),
|
||||||
|
outputs: new Set(),
|
||||||
|
},
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return invals;
|
||||||
|
}
|
||||||
|
|
||||||
|
#followImports(file: string) {
|
||||||
|
const graph = hot.getFsGraph();
|
||||||
|
const stat = graph.get(file);
|
||||||
|
if (!stat) return;
|
||||||
|
for (const i of stat.imports) {
|
||||||
|
const invals = this.#getOrInitInvals(i);
|
||||||
|
invals.files.add(file);
|
||||||
|
this.#followImports(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async statAllFiles() {
|
||||||
|
for (const file of this.invals.keys()) {
|
||||||
|
const mtime = fs.statSync(file).mtimeMs;
|
||||||
|
this.updateStat(file, mtime);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
updateStat(fileKey: string, newLastModified: number) {
|
||||||
|
const stat = this.invals.get(fileKey);
|
||||||
|
ASSERT(stat, "Updated stat on untracked file " + fileKey);
|
||||||
|
if (stat.lastModified < newLastModified) {
|
||||||
|
// Invalidate
|
||||||
|
console.log(fileKey + " updated");
|
||||||
|
const invalidQueue = [fileKey];
|
||||||
|
let currentInvalid;
|
||||||
|
while (currentInvalid = invalidQueue.pop()) {
|
||||||
|
const invalidations = this.invals.get(currentInvalid);
|
||||||
|
ASSERT(
|
||||||
|
invalidations,
|
||||||
|
"No reason to track file '" + currentInvalid +
|
||||||
|
"' if it has no invalidations",
|
||||||
|
);
|
||||||
|
const { files, outputs } = invalidations;
|
||||||
|
for (const out of outputs) {
|
||||||
|
const [type, artifactKey] = out.split("#", 2);
|
||||||
|
this.out[type as ArtifactType].delete(artifactKey);
|
||||||
|
}
|
||||||
|
invalidQueue.push(...files);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
stat.lastModified = newLastModified;
|
||||||
}
|
}
|
||||||
|
|
||||||
async putAsset(info: PutAsset) {
|
async putAsset(info: PutAsset) {
|
||||||
|
@ -161,7 +212,99 @@ export class Incremental {
|
||||||
entry![algo] = out;
|
entry![algo] = out;
|
||||||
}
|
}
|
||||||
|
|
||||||
invalidate(srcId: string) {
|
serialize() {
|
||||||
|
const writer = new BufferWriter();
|
||||||
|
|
||||||
|
const asset = Array.from(
|
||||||
|
this.out.asset,
|
||||||
|
([key, { buffer, hash, headers }]) => {
|
||||||
|
const raw = writer.write(buffer, hash);
|
||||||
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||||
|
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
||||||
|
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
||||||
|
return [key, {
|
||||||
|
raw,
|
||||||
|
gzip,
|
||||||
|
zstd,
|
||||||
|
hash,
|
||||||
|
headers: headers as Record<string, string>,
|
||||||
|
}] satisfies SerializedMeta["asset"][0];
|
||||||
|
},
|
||||||
|
);
|
||||||
|
const script = Array.from(this.out.script);
|
||||||
|
const invals = Array.from(this.invals, ([key, value]) => {
|
||||||
|
const { lastModified, files, outputs } = value;
|
||||||
|
return [key, {
|
||||||
|
m: lastModified,
|
||||||
|
f: [...files],
|
||||||
|
o: [...outputs],
|
||||||
|
}] satisfies SerializedMeta["invals"][0];
|
||||||
|
});
|
||||||
|
const sources = Array.from(this.sources, ([key, value]) => {
|
||||||
|
return [key, ...value] as [string, ...string[]];
|
||||||
|
});
|
||||||
|
const json = {
|
||||||
|
asset,
|
||||||
|
script,
|
||||||
|
invals,
|
||||||
|
sources,
|
||||||
|
} satisfies SerializedMeta;
|
||||||
|
const meta = Buffer.from(JSON.stringify(json), "utf-8");
|
||||||
|
|
||||||
|
const lengthBuffer = Buffer.alloc(4);
|
||||||
|
lengthBuffer.writeUInt32LE(meta.byteLength, 0);
|
||||||
|
|
||||||
|
return Buffer.concat([lengthBuffer, meta, ...writer.buffers]);
|
||||||
|
}
|
||||||
|
|
||||||
|
static fromSerialized(buffer: Buffer): Incremental {
|
||||||
|
const metaLength = buffer.readUint32LE(0);
|
||||||
|
const meta: SerializedMeta = JSON.parse(
|
||||||
|
buffer.subarray(4, 4 + metaLength).toString("utf8"),
|
||||||
|
);
|
||||||
|
const view = ([start, end]: View) =>
|
||||||
|
buffer.subarray(4 + metaLength + start, 4 + metaLength + end);
|
||||||
|
|
||||||
|
const incr = new Incremental();
|
||||||
|
incr.out.asset = new Map<string, Asset>(meta.asset.map(([key, value]) => {
|
||||||
|
const { hash, raw, gzip, zstd, headers } = value;
|
||||||
|
if ((gzip || zstd) && !incr.compress.has(hash)) {
|
||||||
|
incr.compress.set(hash, {
|
||||||
|
gzip: gzip ? view(gzip) : undefined,
|
||||||
|
zstd: zstd ? view(zstd) : undefined,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
return [key, {
|
||||||
|
buffer: view(raw),
|
||||||
|
headers: headers,
|
||||||
|
hash: hash,
|
||||||
|
}];
|
||||||
|
}));
|
||||||
|
incr.out.script = new Map(meta.script);
|
||||||
|
incr.invals = new Map(meta.invals.map(([key, { m, f, o }]) => {
|
||||||
|
return [key, {
|
||||||
|
lastModified: m,
|
||||||
|
files: new Set(f),
|
||||||
|
outputs: new Set(o),
|
||||||
|
}];
|
||||||
|
}));
|
||||||
|
incr.sources = new Map(meta.sources.map(([key, ...value]) => [key, value]));
|
||||||
|
return incr;
|
||||||
|
}
|
||||||
|
|
||||||
|
toDisk(file = ".clover/incr.state") {
|
||||||
|
const buffer = this.serialize();
|
||||||
|
fs.writeFileSync(file, buffer);
|
||||||
|
}
|
||||||
|
|
||||||
|
static fromDisk(file = ".clover/incr.state"): Incremental {
|
||||||
|
try {
|
||||||
|
const buffer = fs.readFileSync(file);
|
||||||
|
return Incremental.fromSerialized(buffer);
|
||||||
|
} catch (err: any) {
|
||||||
|
if (err.code === "ENOENT") return new Incremental();
|
||||||
|
throw err;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async wait() {
|
async wait() {
|
||||||
|
@ -171,18 +314,14 @@ export class Incremental {
|
||||||
async flush() {
|
async flush() {
|
||||||
ASSERT(!this.compressQueue.active);
|
ASSERT(!this.compressQueue.active);
|
||||||
const writer = new BufferWriter();
|
const writer = new BufferWriter();
|
||||||
|
// TODO: ensure all assets are actually compressed and not fake lying.
|
||||||
const asset = Object.fromEntries(
|
const asset = Object.fromEntries(
|
||||||
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
Array.from(this.out.asset, ([key, { buffer, hash, headers }]) => {
|
||||||
const raw = writer.write(buffer, hash);
|
const raw = writer.write(buffer, hash);
|
||||||
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
const { gzip: gzipBuf, zstd: zstdBuf } = this.compress.get(hash) ?? {};
|
||||||
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
const gzip = gzipBuf ? writer.write(gzipBuf, hash + ".gz") : null;
|
||||||
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
const zstd = zstdBuf ? writer.write(zstdBuf, hash + ".zstd") : null;
|
||||||
return [key, {
|
return [key, { raw, gzip, zstd, headers }];
|
||||||
raw,
|
|
||||||
gzip,
|
|
||||||
zstd,
|
|
||||||
headers,
|
|
||||||
}];
|
|
||||||
}),
|
}),
|
||||||
);
|
);
|
||||||
await Promise.all([
|
await Promise.all([
|
||||||
|
@ -230,10 +369,10 @@ class BufferWriter {
|
||||||
|
|
||||||
export type View = [start: number, end: number];
|
export type View = [start: number, end: number];
|
||||||
|
|
||||||
// Alongside this type is a byte buffer, containing all the assets.
|
|
||||||
export interface BuiltAssetMap {
|
export interface BuiltAssetMap {
|
||||||
[route: string]: BuiltAsset;
|
[route: string]: BuiltAsset;
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface BuiltAsset {
|
export interface BuiltAsset {
|
||||||
raw: View;
|
raw: View;
|
||||||
gzip: View;
|
gzip: View;
|
||||||
|
@ -249,13 +388,24 @@ export interface SerializedMeta {
|
||||||
hash: string;
|
hash: string;
|
||||||
headers: Record<string, string>;
|
headers: Record<string, string>;
|
||||||
}]>;
|
}]>;
|
||||||
script: [key: string, value: string][];
|
script: Array<[key: string, value: string]>;
|
||||||
|
invals: Array<[key: string, {
|
||||||
|
/** Modified */
|
||||||
|
m: number;
|
||||||
|
f: SourceId[];
|
||||||
|
o: ArtifactId[];
|
||||||
|
}]>;
|
||||||
|
sources: Array<[string, ...string[]]>;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const gzip = util.promisify(zlib.gzip);
|
||||||
|
const zstd = util.promisify(zlib.zstdCompress);
|
||||||
|
|
||||||
import * as fs from "#sitegen/fs";
|
import * as fs from "#sitegen/fs";
|
||||||
import * as zlib from "node:zlib";
|
import * as zlib from "node:zlib";
|
||||||
import * as util from "node:util";
|
import * as util from "node:util";
|
||||||
import { Queue } from "./queue.ts";
|
import { Queue } from "./queue.ts";
|
||||||
import * as hot from "./hot.ts";
|
import * as hot from "./hot.ts";
|
||||||
import * as mime from "#sitegen/mime";
|
import * as mime from "#sitegen/mime";
|
||||||
|
import * as path from "node:path";
|
||||||
import { Buffer } from "node:buffer";
|
import { Buffer } from "node:buffer";
|
||||||
|
|
|
@ -34,7 +34,7 @@ export function writeMkdirSync(file: string, contents: Buffer | string) {
|
||||||
|
|
||||||
export function readDirRecOptionalSync(dir: string) {
|
export function readDirRecOptionalSync(dir: string) {
|
||||||
try {
|
try {
|
||||||
return readdirSync(dir, { recursive: true, withFileTypes: true });
|
return readdirSync(dir, { recursive: true, encoding: "utf8" });
|
||||||
} catch (err: any) {
|
} catch (err: any) {
|
||||||
if (err.code === "ENOENT") return [];
|
if (err.code === "ENOENT") return [];
|
||||||
throw err;
|
throw err;
|
||||||
|
|
|
@ -33,6 +33,11 @@ Included is `src`, which contains `paperclover.net`. Website highlights:
|
||||||
|
|
||||||
## Development
|
## Development
|
||||||
|
|
||||||
|
minimum system requirements:
|
||||||
|
- a cpu with at least 1 core.
|
||||||
|
- random access memory.
|
||||||
|
- windows 7 or later, macos, or linux operating system.
|
||||||
|
|
||||||
```
|
```
|
||||||
npm install
|
npm install
|
||||||
|
|
||||||
|
|
13
src/q+a/components/QuestionForm.marko
Normal file
13
src/q+a/components/QuestionForm.marko
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
<form action="/q+a" method="POST">
|
||||||
|
<textarea
|
||||||
|
name="text"
|
||||||
|
placeholder="ask clover a question..."
|
||||||
|
required
|
||||||
|
minlength="1"
|
||||||
|
maxlength="10000"
|
||||||
|
/>
|
||||||
|
<div aria-hidden class="title">ask a question</div>
|
||||||
|
<button type="submit">send</button>
|
||||||
|
<div class="disabled-button">send</div>
|
||||||
|
</form>
|
||||||
|
|
|
@ -3,7 +3,6 @@ export * as layout from "../layout.tsx";
|
||||||
export interface Input {
|
export interface Input {
|
||||||
admin?: boolean;
|
admin?: boolean;
|
||||||
}
|
}
|
||||||
|
|
||||||
export const meta: Metadata = {
|
export const meta: Metadata = {
|
||||||
title: "paper clover q+a",
|
title: "paper clover q+a",
|
||||||
description: "ask clover a question",
|
description: "ask clover a question",
|
||||||
|
@ -12,24 +11,16 @@ export const meta: Metadata = {
|
||||||
<const/{ admin = false } = input />
|
<const/{ admin = false } = input />
|
||||||
<const/questions = [...Question.getAll()] />
|
<const/questions = [...Question.getAll()] />
|
||||||
|
|
||||||
<form action="/q+a" method="POST">
|
<if=!admin>
|
||||||
<textarea
|
<QuestionForm />
|
||||||
name="text"
|
</>
|
||||||
placeholder="ask clover a question..."
|
|
||||||
required
|
|
||||||
minlength="1"
|
|
||||||
maxlength="10000"
|
|
||||||
/>
|
|
||||||
<div aria-hidden class="title">ask a question</div>
|
|
||||||
<button type="submit">send</button>
|
|
||||||
<div class="disabled-button">send</div>
|
|
||||||
</form>
|
|
||||||
<for|q| of=questions>
|
<for|q| of=questions>
|
||||||
<QuestionRender question=q admin=admin />;
|
<QuestionRender question=q admin=admin />
|
||||||
</>
|
</>
|
||||||
<footer>
|
<footer>
|
||||||
fun fact: clover has answered ${questions.length} questions
|
fun fact: clover has answered ${questions.length} questions
|
||||||
</footer>
|
</footer>
|
||||||
|
|
||||||
import { Question } from "@/q+a/models/Question.ts";
|
import { Question } from "@/q+a/models/Question.ts";
|
||||||
|
import { QuestionForm } from "@/q+a/components/QuestionForm.marko";
|
||||||
import QuestionRender from '@/q+a/components/Question.marko';
|
import QuestionRender from '@/q+a/components/Question.marko';
|
||||||
|
|
|
@ -1,59 +1,6 @@
|
||||||
// __esModule
|
User-agent: *
|
||||||
true
|
Allow: /
|
||||||
|
|
||||||
// getEnd
|
# Restrict access to media files to ensure bots do not unintentionally
|
||||||
function getEnd(file, node) {
|
# break their license agreement (https://paperclover.net/license)
|
||||||
// Restore if merged: https://github.com/babel/babel/pull/16849
|
Disallow: /file
|
||||||
// if (node.end != null) {
|
|
||||||
// return node.end;
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (node.loc) {
|
|
||||||
return locToIndex(file, node.loc.end);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// getLoc
|
|
||||||
function getLoc(file, index) {
|
|
||||||
return findLoc(getLineIndexes(file), 0, index);
|
|
||||||
}
|
|
||||||
|
|
||||||
// getLocRange
|
|
||||||
function getLocRange(file, start, end) {
|
|
||||||
const lineIndexes = getLineIndexes(file);
|
|
||||||
const startLoc = findLoc(lineIndexes, 0, start);
|
|
||||||
|
|
||||||
if (startLoc) {
|
|
||||||
const endLoc =
|
|
||||||
start === end ? startLoc : findLoc(lineIndexes, startLoc.line - 1, end);
|
|
||||||
|
|
||||||
return {
|
|
||||||
start: startLoc,
|
|
||||||
end: endLoc
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// getStart
|
|
||||||
function getStart(file, node) {
|
|
||||||
// Restore if merged: https://github.com/babel/babel/pull/16849
|
|
||||||
// if (node.start != null) {
|
|
||||||
// return node.start;
|
|
||||||
// }
|
|
||||||
|
|
||||||
if (node.loc) {
|
|
||||||
return locToIndex(file, node.loc.start);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// withLoc
|
|
||||||
function withLoc(file, node, start, end) {
|
|
||||||
node.loc = getLocRange(file, start, end);
|
|
||||||
node.start = start;
|
|
||||||
node.end = end;
|
|
||||||
return node;
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,115 +1,43 @@
|
||||||
// __esModule
|
<?xml version="1.0" encoding="UTF-8" ?>
|
||||||
true
|
<rss version="2.0" xmlns:media="http://search.yahoo.com/mrss/">
|
||||||
|
|
||||||
// parseArgs
|
<channel>
|
||||||
function parseArgs(file, str, sourceStart, sourceEnd) {
|
<title>paper clover</title>
|
||||||
const parsed = parseExpression(file, `_(${str})`, sourceStart, sourceEnd, 2);
|
<description>art</description>
|
||||||
|
<link>https://paperclover.net</link>
|
||||||
|
<item>
|
||||||
|
<title>clo's files & cotyledon</title>
|
||||||
|
<link>https://paperclover.net/file</link>
|
||||||
|
<description>
|
||||||
|
i rewrote the file viewer to be nicer to use. as well as uploading about
|
||||||
|
10gb of old content. there's over a dozen hours worth of shit to explore.
|
||||||
|
</description>
|
||||||
|
<pubDate>Wed, 30 Apr 2025 00:00:00 -0000</pubDate>
|
||||||
|
<guid>https://paperclover.net/file</guid>
|
||||||
|
</item>
|
||||||
|
<item>
|
||||||
|
<title>waterfalls and gender identity update</title>
|
||||||
|
<link>https://paperclover.net/waterfalls</link>
|
||||||
|
<description>
|
||||||
|
this is a song about identity, jealousness, and the seasons. more
|
||||||
|
specifically, it's a personal account of how it felt realizing i
|
||||||
|
was a transgender woman. enjoy.
|
||||||
|
</description>
|
||||||
|
<pubDate>Wed, 01 Jan 2025 00:00:00 -0000</pubDate>
|
||||||
|
<guid>https://paperclover.net/waterfalls</guid>
|
||||||
|
|
||||||
|
<enclosure url="https://paperclover.net/file/2025/waterfalls/waterfalls.mp4"
|
||||||
|
type="video/mp4"/>
|
||||||
|
<media:content url="https://paperclover.net/file/2025/waterfalls/waterfalls.mp4"
|
||||||
|
type="video/mp4"
|
||||||
|
width="1920"
|
||||||
|
height="1080">
|
||||||
|
</media:content>
|
||||||
|
<media:thumbnail url="https://paperclover.net/file/2025/waterfalls/thumbnail.jpeg"
|
||||||
|
width="1920"
|
||||||
|
height="1080"
|
||||||
|
/>
|
||||||
|
</item>
|
||||||
|
</channel>
|
||||||
|
|
||||||
if (parsed.type === "CallExpression") {
|
</rss>
|
||||||
return parsed.arguments;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseExpression
|
|
||||||
function parseExpression(
|
|
||||||
file,
|
|
||||||
str,
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
sourceOffset)
|
|
||||||
{
|
|
||||||
return tryParse(file, true, str, sourceStart, sourceEnd, sourceOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseParams
|
|
||||||
function parseParams(file, str, sourceStart, sourceEnd) {
|
|
||||||
const parsed = parseExpression(
|
|
||||||
file,
|
|
||||||
`(${str})=>{}`,
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
1
|
|
||||||
);
|
|
||||||
|
|
||||||
if (parsed.type === "ArrowFunctionExpression") {
|
|
||||||
return parsed.params;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseStatements
|
|
||||||
function parseStatements(
|
|
||||||
file,
|
|
||||||
str,
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
sourceOffset)
|
|
||||||
{
|
|
||||||
return tryParse(file, false, str, sourceStart, sourceEnd, sourceOffset);
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseTemplateLiteral
|
|
||||||
function parseTemplateLiteral(file, str, sourceStart, sourceEnd) {
|
|
||||||
const parsed = parseExpression(
|
|
||||||
file,
|
|
||||||
"`" + str + "`",
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
1
|
|
||||||
);
|
|
||||||
|
|
||||||
if (parsed.type === "TemplateLiteral") {
|
|
||||||
return _compiler.types.templateLiteral(parsed.quasis, parsed.expressions);
|
|
||||||
}
|
|
||||||
|
|
||||||
return ensureParseError(file, parsed, sourceStart, sourceEnd);
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseTypeArgs
|
|
||||||
function parseTypeArgs(file, str, sourceStart, sourceEnd) {
|
|
||||||
const parsed = parseExpression(file, `_<${str}>`, sourceStart, sourceEnd, 2);
|
|
||||||
|
|
||||||
if (parsed.type === "TSInstantiationExpression") {
|
|
||||||
// typeArguments is Flow only (not TS), we need to use typeParameters
|
|
||||||
return parsed.typeParameters;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseTypeParams
|
|
||||||
function parseTypeParams(file, str, sourceStart, sourceEnd) {
|
|
||||||
const parsed = parseExpression(
|
|
||||||
file,
|
|
||||||
`<${str}>()=>{}`,
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
1
|
|
||||||
);
|
|
||||||
|
|
||||||
if (parsed.type === "ArrowFunctionExpression") {
|
|
||||||
return parsed.typeParameters;
|
|
||||||
}
|
|
||||||
|
|
||||||
return [ensureParseError(file, parsed, sourceStart, sourceEnd)];
|
|
||||||
}
|
|
||||||
|
|
||||||
// parseVar
|
|
||||||
function parseVar(file, str, sourceStart, sourceEnd) {
|
|
||||||
const parsed = parseExpression(
|
|
||||||
file,
|
|
||||||
`(${str})=>{}`,
|
|
||||||
sourceStart,
|
|
||||||
sourceEnd,
|
|
||||||
1
|
|
||||||
);
|
|
||||||
|
|
||||||
if (parsed.type === "ArrowFunctionExpression" && parsed.params.length === 1) {
|
|
||||||
return parsed.params[0];
|
|
||||||
}
|
|
||||||
|
|
||||||
return ensureParseError(file, parsed, sourceStart, sourceEnd);
|
|
||||||
}
|
|
||||||
|
|
|
@ -1,30 +1,66 @@
|
||||||
// __esModule
|
<html lang="en">
|
||||||
true
|
<head>
|
||||||
|
<title>paper clover</title>
|
||||||
// getTagDefForTagName
|
</head>
|
||||||
function getTagDefForTagName(file, tagName) {
|
<body bgcolor="black" style="word-wrap: initial">
|
||||||
const tagDef = getTaglibLookup(file).getTag(tagName);
|
<main style="display: flex; flex-direction: column; justify-content: center; align-items: center; height: 100vh">
|
||||||
|
<div>
|
||||||
if (tagDef) {
|
<p style="margin: 0.5rem 0">
|
||||||
let seen = file.metadata.marko[SEEN_TAGS_KEY];
|
<a
|
||||||
if (!seen) {
|
href="/waterfalls"
|
||||||
seen = file.metadata.marko[SEEN_TAGS_KEY] = new Set();
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
}
|
>
|
||||||
|
<font color="#FFB5E8">water</font><font color="#FFE3F6">falls</font>
|
||||||
if (!seen.has(tagDef)) {
|
</a>
|
||||||
seen.add(tagName);
|
</p>
|
||||||
const { filePath } = tagDef;
|
<p style="margin: 0.5rem 0">
|
||||||
const len = filePath.length;
|
<a
|
||||||
|
href="/q+a"
|
||||||
if (filePath[len - 14] === "m" && filePath.endsWith("marko-tag.json")) {
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
file.metadata.marko.watchFiles.push(filePath);
|
>
|
||||||
}
|
<font color="#97E5B9">questions</font>
|
||||||
}
|
<font color="#D4F5E2">and answers</font>
|
||||||
}
|
</a>
|
||||||
return tagDef;
|
</p>
|
||||||
}
|
<p style="margin: 0.5rem 0">
|
||||||
|
<a
|
||||||
// getTaglibLookup
|
href="/file"
|
||||||
function getTaglibLookup(file) {
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
return file.___taglibLookup;
|
>
|
||||||
}
|
<font color="#B28DFF">file</font>
|
||||||
|
<font color="#DDC6FF">browser</font>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
<p style="margin: 0.5rem 0">
|
||||||
|
<a
|
||||||
|
href="/discord"
|
||||||
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
|
>
|
||||||
|
<font color="#5865F2">dis</font><font color="#7289DA">cord</font>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
<p style="margin: 0.5rem 0">
|
||||||
|
<a
|
||||||
|
href="/github"
|
||||||
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
|
>
|
||||||
|
<font color="#ffffff">git</font><font color="#959DA5">hub</font>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
<p style="margin: 0.5rem 0">
|
||||||
|
<a
|
||||||
|
href="/rss.xml"
|
||||||
|
style="text-decoration: none; font-family: times; font-size: 48px"
|
||||||
|
>
|
||||||
|
<font color="#F26522">rss</font>
|
||||||
|
<font color="#FF8147">feed</font>
|
||||||
|
</a>
|
||||||
|
</p>
|
||||||
|
<h1 style="margin: -1.5rem 0 3rem 0; font-size: 7rem; font-weight: 400; font-family: times">
|
||||||
|
<font color="#B8E1FF">paper</font>
|
||||||
|
<font color="#E8F4FF">clover</font>
|
||||||
|
</h1>
|
||||||
|
</div>
|
||||||
|
</main>
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
|
Loading…
Reference in a new issue