2023-07-23 07:27:41 +07:00
|
|
|
import esbuild from "esbuild"
|
|
|
|
import remarkParse from "remark-parse"
|
|
|
|
import remarkRehype from "remark-rehype"
|
2023-05-30 22:02:20 +07:00
|
|
|
import { Processor, unified } from "unified"
|
2023-07-23 07:27:41 +07:00
|
|
|
import { Root as MDRoot } from "remark-parse/lib"
|
|
|
|
import { Root as HTMLRoot } from "hast"
|
|
|
|
import { ProcessedContent } from "../plugins/vfile"
|
|
|
|
import { PerfTimer } from "../perf"
|
|
|
|
import { read } from "to-vfile"
|
|
|
|
import { FilePath, QUARTZ, ServerSlug, slugifyFilePath } from "../path"
|
|
|
|
import path from "path"
|
|
|
|
import os from "os"
|
|
|
|
import workerpool, { Promise as WorkerPromise } from "workerpool"
|
|
|
|
import { QuartzTransformerPluginInstance } from "../plugins/types"
|
|
|
|
import { QuartzLogger } from "../log"
|
|
|
|
import { trace } from "../trace"
|
2023-07-24 07:07:19 +07:00
|
|
|
import { BuildCtx } from "../ctx"
|
2023-05-30 22:02:20 +07:00
|
|
|
|
|
|
|
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
|
2023-06-12 13:26:43 +07:00
|
|
|
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
|
2023-05-30 22:02:20 +07:00
|
|
|
// base Markdown -> MD AST
|
|
|
|
let processor = unified().use(remarkParse)
|
|
|
|
|
|
|
|
// MD AST -> MD AST transforms
|
2023-07-23 07:27:41 +07:00
|
|
|
for (const plugin of transformers.filter((p) => p.markdownPlugins)) {
|
2023-06-20 12:50:25 +07:00
|
|
|
processor = processor.use(plugin.markdownPlugins!())
|
2023-05-30 22:02:20 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
// MD AST -> HTML AST
|
|
|
|
processor = processor.use(remarkRehype, { allowDangerousHtml: true })
|
|
|
|
|
|
|
|
// HTML AST -> HTML AST transforms
|
2023-07-23 07:27:41 +07:00
|
|
|
for (const plugin of transformers.filter((p) => p.htmlPlugins)) {
|
2023-06-20 12:50:25 +07:00
|
|
|
processor = processor.use(plugin.htmlPlugins!())
|
2023-05-30 22:02:20 +07:00
|
|
|
}
|
|
|
|
|
|
|
|
return processor
|
|
|
|
}
|
|
|
|
|
2023-06-04 23:35:45 +07:00
|
|
|
function* chunks<T>(arr: T[], n: number) {
|
|
|
|
for (let i = 0; i < arr.length; i += n) {
|
|
|
|
yield arr.slice(i, i + n)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-06-05 00:37:43 +07:00
|
|
|
async function transpileWorkerScript() {
|
2023-06-04 23:35:45 +07:00
|
|
|
// transpile worker script
|
|
|
|
const cacheFile = "./.quartz-cache/transpiled-worker.mjs"
|
|
|
|
const fp = "./quartz/worker.ts"
|
2023-06-05 00:37:43 +07:00
|
|
|
return esbuild.build({
|
2023-06-04 23:35:45 +07:00
|
|
|
entryPoints: [fp],
|
2023-07-17 00:39:35 +07:00
|
|
|
outfile: path.join(QUARTZ, cacheFile),
|
2023-06-04 23:35:45 +07:00
|
|
|
bundle: true,
|
|
|
|
keepNames: true,
|
|
|
|
platform: "node",
|
|
|
|
format: "esm",
|
|
|
|
packages: "external",
|
|
|
|
plugins: [
|
|
|
|
{
|
2023-07-23 07:27:41 +07:00
|
|
|
name: "css-and-scripts-as-text",
|
2023-06-04 23:35:45 +07:00
|
|
|
setup(build) {
|
|
|
|
build.onLoad({ filter: /\.scss$/ }, (_) => ({
|
2023-07-23 07:27:41 +07:00
|
|
|
contents: "",
|
|
|
|
loader: "text",
|
2023-06-04 23:35:45 +07:00
|
|
|
}))
|
|
|
|
build.onLoad({ filter: /\.inline\.(ts|js)$/ }, (_) => ({
|
2023-07-23 07:27:41 +07:00
|
|
|
contents: "",
|
|
|
|
loader: "text",
|
2023-06-04 23:35:45 +07:00
|
|
|
}))
|
2023-07-23 07:27:41 +07:00
|
|
|
},
|
|
|
|
},
|
|
|
|
],
|
2023-06-04 23:35:45 +07:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-07-24 07:07:19 +07:00
|
|
|
export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) {
|
2023-06-05 00:37:43 +07:00
|
|
|
return async (processor: QuartzProcessor) => {
|
|
|
|
const res: ProcessedContent[] = []
|
|
|
|
for (const fp of fps) {
|
|
|
|
try {
|
|
|
|
const file = await read(fp)
|
|
|
|
|
2023-07-07 09:18:18 +07:00
|
|
|
// strip leading and trailing whitespace
|
|
|
|
file.value = file.value.toString().trim()
|
|
|
|
|
2023-06-20 12:50:25 +07:00
|
|
|
// Text -> Text transforms
|
2023-07-24 07:07:19 +07:00
|
|
|
for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) {
|
2023-06-20 12:50:25 +07:00
|
|
|
file.value = plugin.textTransform!(file.value)
|
|
|
|
}
|
|
|
|
|
2023-06-05 00:37:43 +07:00
|
|
|
// base data properties that plugins may use
|
2023-07-24 07:07:19 +07:00
|
|
|
file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath)
|
2023-07-10 09:32:24 +07:00
|
|
|
file.data.allSlugs = allSlugs
|
2023-06-05 00:37:43 +07:00
|
|
|
file.data.filePath = fp
|
|
|
|
|
|
|
|
const ast = processor.parse(file)
|
|
|
|
const newAst = await processor.run(ast, file)
|
|
|
|
res.push([newAst, file])
|
|
|
|
|
2023-07-24 07:07:19 +07:00
|
|
|
if (argv.verbose) {
|
2023-06-05 00:37:43 +07:00
|
|
|
console.log(`[process] ${fp} -> ${file.data.slug}`)
|
|
|
|
}
|
|
|
|
} catch (err) {
|
2023-07-16 13:02:12 +07:00
|
|
|
trace(`\nFailed to process \`${fp}\``, err as Error)
|
2023-07-24 01:49:26 +07:00
|
|
|
throw err
|
2023-06-05 00:37:43 +07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return res
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-07-24 07:07:19 +07:00
|
|
|
export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
|
|
|
|
const { argv, cfg } = ctx
|
2023-05-30 22:02:20 +07:00
|
|
|
const perf = new PerfTimer()
|
2023-07-24 07:07:19 +07:00
|
|
|
const log = new QuartzLogger(argv.verbose)
|
2023-06-04 23:35:45 +07:00
|
|
|
|
|
|
|
const CHUNK_SIZE = 128
|
|
|
|
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
|
2023-06-05 00:37:43 +07:00
|
|
|
|
2023-07-10 09:32:24 +07:00
|
|
|
// get all slugs ahead of time as each thread needs a copy
|
2023-07-23 07:27:41 +07:00
|
|
|
const allSlugs = fps.map((fp) =>
|
2023-07-24 07:07:19 +07:00
|
|
|
slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath),
|
2023-07-23 07:27:41 +07:00
|
|
|
)
|
2023-07-10 09:32:24 +07:00
|
|
|
|
|
|
|
let res: ProcessedContent[] = []
|
2023-06-05 00:37:43 +07:00
|
|
|
log.start(`Parsing input files using ${concurrency} threads`)
|
2023-06-04 23:35:45 +07:00
|
|
|
if (concurrency === 1) {
|
2023-07-24 01:49:26 +07:00
|
|
|
try {
|
2023-07-24 07:07:19 +07:00
|
|
|
const processor = createProcessor(cfg.plugins.transformers)
|
|
|
|
const parse = createFileParser(ctx, fps, allSlugs)
|
2023-07-24 01:49:26 +07:00
|
|
|
res = await parse(processor)
|
|
|
|
} catch (error) {
|
|
|
|
log.end()
|
|
|
|
throw error
|
|
|
|
}
|
2023-06-04 23:35:45 +07:00
|
|
|
} else {
|
2023-06-05 00:37:43 +07:00
|
|
|
await transpileWorkerScript()
|
2023-07-23 07:27:41 +07:00
|
|
|
const pool = workerpool.pool("./quartz/bootstrap-worker.mjs", {
|
|
|
|
minWorkers: "max",
|
|
|
|
maxWorkers: concurrency,
|
|
|
|
workerType: "thread",
|
|
|
|
})
|
2023-05-30 22:02:20 +07:00
|
|
|
|
2023-06-04 23:35:45 +07:00
|
|
|
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
|
|
|
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
2023-07-24 07:07:19 +07:00
|
|
|
childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs]))
|
2023-05-30 22:02:20 +07:00
|
|
|
}
|
2023-06-05 00:37:43 +07:00
|
|
|
|
2023-06-04 23:35:45 +07:00
|
|
|
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
|
2023-06-05 00:37:43 +07:00
|
|
|
res = results.flat()
|
2023-06-04 23:35:45 +07:00
|
|
|
await pool.terminate()
|
2023-05-30 22:02:20 +07:00
|
|
|
}
|
|
|
|
|
2023-07-24 01:49:26 +07:00
|
|
|
log.end(`Parsed ${res.length} Markdown files in ${perf.timeSince()}`)
|
2023-05-30 22:02:20 +07:00
|
|
|
return res
|
|
|
|
}
|