2023-08-06 07:53:29 +07:00
|
|
|
import sourceMapSupport from "source-map-support"
|
2023-08-09 12:52:49 +07:00
|
|
|
sourceMapSupport.install(options)
|
2023-06-04 23:35:45 +07:00
|
|
|
import path from "path"
|
|
|
|
import { PerfTimer } from "./perf"
|
|
|
|
import { rimraf } from "rimraf"
|
2023-08-03 12:10:13 +07:00
|
|
|
import { isGitIgnored } from "globby"
|
2023-06-04 23:35:45 +07:00
|
|
|
import chalk from "chalk"
|
|
|
|
import { parseMarkdown } from "./processors/parse"
|
|
|
|
import { filterContent } from "./processors/filter"
|
|
|
|
import { emitContent } from "./processors/emit"
|
|
|
|
import cfg from "../quartz.config"
|
2023-08-09 12:52:49 +07:00
|
|
|
import { FilePath, joinSegments, slugifyFilePath } from "./path"
|
2023-07-23 06:06:36 +07:00
|
|
|
import chokidar from "chokidar"
|
2023-07-23 07:27:41 +07:00
|
|
|
import { ProcessedContent } from "./plugins/vfile"
|
2023-07-24 07:07:19 +07:00
|
|
|
import { Argv, BuildCtx } from "./ctx"
|
2023-08-03 12:10:13 +07:00
|
|
|
import { glob, toPosixPath } from "./glob"
|
2023-08-06 01:28:09 +07:00
|
|
|
import { trace } from "./trace"
|
2023-08-09 12:52:49 +07:00
|
|
|
import { options } from "./sourcemap"
|
2023-06-04 23:35:45 +07:00
|
|
|
|
2023-08-06 01:28:09 +07:00
|
|
|
async function buildQuartz(argv: Argv, clientRefresh: () => void) {
|
2023-07-24 07:07:19 +07:00
|
|
|
const ctx: BuildCtx = {
|
|
|
|
argv,
|
|
|
|
cfg,
|
2023-07-24 14:04:01 +07:00
|
|
|
allSlugs: [],
|
2023-07-24 07:07:19 +07:00
|
|
|
}
|
|
|
|
|
2023-06-04 23:35:45 +07:00
|
|
|
const perf = new PerfTimer()
|
|
|
|
const output = argv.output
|
|
|
|
|
2023-06-05 00:37:43 +07:00
|
|
|
const pluginCount = Object.values(cfg.plugins).flat().length
|
2023-07-23 07:27:41 +07:00
|
|
|
const pluginNames = (key: "transformers" | "filters" | "emitters") =>
|
|
|
|
cfg.plugins[key].map((plugin) => plugin.name)
|
2023-06-04 23:35:45 +07:00
|
|
|
if (argv.verbose) {
|
2023-06-06 12:14:17 +07:00
|
|
|
console.log(`Loaded ${pluginCount} plugins`)
|
2023-07-23 07:27:41 +07:00
|
|
|
console.log(` Transformers: ${pluginNames("transformers").join(", ")}`)
|
|
|
|
console.log(` Filters: ${pluginNames("filters").join(", ")}`)
|
|
|
|
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
|
2023-06-04 23:35:45 +07:00
|
|
|
}
|
|
|
|
|
2023-07-23 07:27:41 +07:00
|
|
|
perf.addEvent("clean")
|
2023-07-05 14:16:06 +07:00
|
|
|
await rimraf(output)
|
2023-07-23 07:27:41 +07:00
|
|
|
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
2023-06-04 23:35:45 +07:00
|
|
|
|
2023-07-23 07:27:41 +07:00
|
|
|
perf.addEvent("glob")
|
2023-08-07 10:52:17 +07:00
|
|
|
const allFiles = await glob("**/*.*", argv.directory, cfg.configuration.ignorePatterns)
|
|
|
|
const fps = allFiles.filter((fp) => fp.endsWith(".md"))
|
2023-07-23 07:27:41 +07:00
|
|
|
console.log(
|
|
|
|
`Found ${fps.length} input files from \`${argv.directory}\` in ${perf.timeSince("glob")}`,
|
|
|
|
)
|
2023-06-04 23:35:45 +07:00
|
|
|
|
2023-08-02 14:07:41 +07:00
|
|
|
const filePaths = fps.map((fp) => joinSegments(argv.directory, fp) as FilePath)
|
2023-08-07 10:52:17 +07:00
|
|
|
ctx.allSlugs = allFiles.map((fp) => slugifyFilePath(fp as FilePath))
|
2023-07-24 14:04:01 +07:00
|
|
|
|
2023-07-24 07:07:19 +07:00
|
|
|
const parsedFiles = await parseMarkdown(ctx, filePaths)
|
|
|
|
const filteredContent = filterContent(ctx, parsedFiles)
|
|
|
|
await emitContent(ctx, filteredContent)
|
2023-06-04 23:35:45 +07:00
|
|
|
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
|
|
|
|
|
|
|
|
if (argv.serve) {
|
2023-08-06 01:28:09 +07:00
|
|
|
return startServing(ctx, parsedFiles, clientRefresh)
|
2023-07-24 07:07:19 +07:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-08-06 01:28:09 +07:00
|
|
|
// setup watcher for rebuilds
|
|
|
|
async function startServing(
|
|
|
|
ctx: BuildCtx,
|
|
|
|
initialContent: ProcessedContent[],
|
|
|
|
clientRefresh: () => void,
|
|
|
|
) {
|
2023-07-24 07:07:19 +07:00
|
|
|
const { argv } = ctx
|
|
|
|
|
|
|
|
const ignored = await isGitIgnored()
|
|
|
|
const contentMap = new Map<FilePath, ProcessedContent>()
|
|
|
|
for (const content of initialContent) {
|
|
|
|
const [_tree, vfile] = content
|
|
|
|
contentMap.set(vfile.data.filePath!, content)
|
|
|
|
}
|
2023-07-23 06:06:36 +07:00
|
|
|
|
2023-08-08 11:41:18 +07:00
|
|
|
const initialSlugs = ctx.allSlugs
|
2023-07-24 14:04:01 +07:00
|
|
|
let timeoutId: ReturnType<typeof setTimeout> | null = null
|
|
|
|
let toRebuild: Set<FilePath> = new Set()
|
|
|
|
let toRemove: Set<FilePath> = new Set()
|
2023-08-07 10:52:17 +07:00
|
|
|
let trackedAssets: Set<FilePath> = new Set()
|
2023-07-24 14:04:01 +07:00
|
|
|
async function rebuild(fp: string, action: "add" | "change" | "delete") {
|
2023-08-07 10:52:17 +07:00
|
|
|
// don't do anything for gitignored files
|
|
|
|
if (ignored(fp)) {
|
2023-08-06 01:28:09 +07:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2023-08-07 10:52:17 +07:00
|
|
|
// dont bother rebuilding for non-content files, just track and refresh
|
2023-08-08 11:41:18 +07:00
|
|
|
fp = toPosixPath(fp)
|
|
|
|
const filePath = joinSegments(argv.directory, fp) as FilePath
|
2023-08-07 10:52:17 +07:00
|
|
|
if (path.extname(fp) !== ".md") {
|
2023-07-24 14:04:01 +07:00
|
|
|
if (action === "add" || action === "change") {
|
2023-08-07 10:52:17 +07:00
|
|
|
trackedAssets.add(filePath)
|
2023-07-24 14:04:01 +07:00
|
|
|
} else if (action === "delete") {
|
2023-08-08 11:41:18 +07:00
|
|
|
trackedAssets.delete(filePath)
|
2023-07-24 14:04:01 +07:00
|
|
|
}
|
2023-08-07 10:52:17 +07:00
|
|
|
clientRefresh()
|
|
|
|
return
|
|
|
|
}
|
2023-07-23 06:06:36 +07:00
|
|
|
|
2023-08-07 10:52:17 +07:00
|
|
|
if (action === "add" || action === "change") {
|
|
|
|
toRebuild.add(filePath)
|
|
|
|
} else if (action === "delete") {
|
|
|
|
toRemove.add(filePath)
|
|
|
|
}
|
|
|
|
|
|
|
|
if (timeoutId) {
|
|
|
|
clearTimeout(timeoutId)
|
|
|
|
}
|
2023-07-24 07:07:19 +07:00
|
|
|
|
2023-08-07 10:52:17 +07:00
|
|
|
// debounce rebuilds every 250ms
|
|
|
|
timeoutId = setTimeout(async () => {
|
|
|
|
const perf = new PerfTimer()
|
|
|
|
console.log(chalk.yellow("Detected change, rebuilding..."))
|
|
|
|
try {
|
|
|
|
const filesToRebuild = [...toRebuild].filter((fp) => !toRemove.has(fp))
|
|
|
|
|
2023-08-08 11:51:23 +07:00
|
|
|
const trackedSlugs = [...new Set([...contentMap.keys(), ...toRebuild, ...trackedAssets])]
|
|
|
|
.filter((fp) => !toRemove.has(fp))
|
|
|
|
.map((fp) => slugifyFilePath(path.posix.relative(argv.directory, fp) as FilePath))
|
2023-08-07 10:52:17 +07:00
|
|
|
|
2023-08-08 11:41:18 +07:00
|
|
|
ctx.allSlugs = [...new Set([...initialSlugs, ...trackedSlugs])]
|
2023-08-07 10:52:17 +07:00
|
|
|
const parsedContent = await parseMarkdown(ctx, filesToRebuild)
|
|
|
|
for (const content of parsedContent) {
|
|
|
|
const [_tree, vfile] = content
|
|
|
|
contentMap.set(vfile.data.filePath!, content)
|
2023-07-24 14:04:01 +07:00
|
|
|
}
|
2023-08-06 01:28:09 +07:00
|
|
|
|
2023-08-07 10:52:17 +07:00
|
|
|
for (const fp of toRemove) {
|
|
|
|
contentMap.delete(fp)
|
|
|
|
}
|
|
|
|
|
|
|
|
await rimraf(argv.output)
|
|
|
|
const parsedFiles = [...contentMap.values()]
|
|
|
|
const filteredContent = filterContent(ctx, parsedFiles)
|
|
|
|
await emitContent(ctx, filteredContent)
|
|
|
|
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
|
|
|
} catch {
|
|
|
|
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
|
|
|
}
|
|
|
|
|
|
|
|
clientRefresh()
|
|
|
|
toRebuild.clear()
|
|
|
|
toRemove.clear()
|
|
|
|
}, 250)
|
2023-07-24 07:07:19 +07:00
|
|
|
}
|
2023-07-23 06:06:36 +07:00
|
|
|
|
2023-07-24 07:07:19 +07:00
|
|
|
const watcher = chokidar.watch(".", {
|
|
|
|
persistent: true,
|
|
|
|
cwd: argv.directory,
|
|
|
|
ignoreInitial: true,
|
|
|
|
})
|
|
|
|
|
|
|
|
watcher
|
|
|
|
.on("add", (fp) => rebuild(fp, "add"))
|
|
|
|
.on("change", (fp) => rebuild(fp, "change"))
|
2023-07-24 14:04:01 +07:00
|
|
|
.on("unlink", (fp) => rebuild(fp, "delete"))
|
2023-06-04 23:35:45 +07:00
|
|
|
}
|
2023-07-24 01:49:26 +07:00
|
|
|
|
2023-08-06 01:28:09 +07:00
|
|
|
export default async (argv: Argv, clientRefresh: () => void) => {
|
2023-07-24 01:49:26 +07:00
|
|
|
try {
|
2023-08-06 01:28:09 +07:00
|
|
|
return await buildQuartz(argv, clientRefresh)
|
|
|
|
} catch (err) {
|
|
|
|
trace("\nExiting Quartz due to a fatal error", err as Error)
|
2023-07-24 01:49:26 +07:00
|
|
|
}
|
|
|
|
}
|