refactor static and asset emission to be actual emitter plugins
This commit is contained in:
parent
000eb4c3c0
commit
9e83af04a7
@ -3,6 +3,7 @@ draft: true
|
|||||||
---
|
---
|
||||||
|
|
||||||
## high priority
|
## high priority
|
||||||
|
|
||||||
- attachments path
|
- attachments path
|
||||||
- https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags??
|
- https://help.obsidian.md/Editing+and+formatting/Tags#Nested+tags nested tags??
|
||||||
- watch mode for config/source code
|
- watch mode for config/source code
|
||||||
@ -13,6 +14,7 @@ draft: true
|
|||||||
- note/header/block transcludes: https://help.obsidian.md/Linking+notes+and+files/Embedding+files
|
- note/header/block transcludes: https://help.obsidian.md/Linking+notes+and+files/Embedding+files
|
||||||
|
|
||||||
## misc
|
## misc
|
||||||
|
|
||||||
- breadcrumbs component
|
- breadcrumbs component
|
||||||
- filetree component
|
- filetree component
|
||||||
- recent notes component
|
- recent notes component
|
||||||
@ -25,6 +27,6 @@ draft: true
|
|||||||
- audio/video embed styling
|
- audio/video embed styling
|
||||||
- Canvas
|
- Canvas
|
||||||
- mermaid styling: https://mermaid.js.org/config/theming.html#theme-variables-reference-table
|
- mermaid styling: https://mermaid.js.org/config/theming.html#theme-variables-reference-table
|
||||||
- https://github.com/jackyzha0/quartz/issues/331
|
- https://github.com/jackyzha0/quartz/issues/331
|
||||||
- parse all images in page: use this for page lists if applicable?
|
- parse all images in page: use this for page lists if applicable?
|
||||||
- CV mode? with print stylesheet
|
- CV mode? with print stylesheet
|
||||||
|
@ -114,6 +114,8 @@ const config: QuartzConfig = {
|
|||||||
enableSiteMap: true,
|
enableSiteMap: true,
|
||||||
enableRSS: true,
|
enableRSS: true,
|
||||||
}),
|
}),
|
||||||
|
Plugin.Assets(),
|
||||||
|
Plugin.Static(),
|
||||||
],
|
],
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
156
quartz/build.ts
156
quartz/build.ts
@ -14,16 +14,14 @@ import { FilePath } from "./path"
|
|||||||
import chokidar from "chokidar"
|
import chokidar from "chokidar"
|
||||||
import { ProcessedContent } from "./plugins/vfile"
|
import { ProcessedContent } from "./plugins/vfile"
|
||||||
import WebSocket, { WebSocketServer } from "ws"
|
import WebSocket, { WebSocketServer } from "ws"
|
||||||
|
import { Argv, BuildCtx } from "./ctx"
|
||||||
interface Argv {
|
|
||||||
directory: string
|
|
||||||
verbose: boolean
|
|
||||||
output: string
|
|
||||||
serve: boolean
|
|
||||||
port: number
|
|
||||||
}
|
|
||||||
|
|
||||||
async function buildQuartz(argv: Argv, version: string) {
|
async function buildQuartz(argv: Argv, version: string) {
|
||||||
|
const ctx: BuildCtx = {
|
||||||
|
argv,
|
||||||
|
cfg,
|
||||||
|
}
|
||||||
|
|
||||||
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
|
console.log(chalk.bgGreen.black(`\n Quartz v${version} \n`))
|
||||||
const perf = new PerfTimer()
|
const perf = new PerfTimer()
|
||||||
const output = argv.output
|
const output = argv.output
|
||||||
@ -38,12 +36,10 @@ async function buildQuartz(argv: Argv, version: string) {
|
|||||||
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
|
console.log(` Emitters: ${pluginNames("emitters").join(", ")}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
// clean
|
|
||||||
perf.addEvent("clean")
|
perf.addEvent("clean")
|
||||||
await rimraf(output)
|
await rimraf(output)
|
||||||
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
console.log(`Cleaned output directory \`${output}\` in ${perf.timeSince("clean")}`)
|
||||||
|
|
||||||
// glob
|
|
||||||
perf.addEvent("glob")
|
perf.addEvent("glob")
|
||||||
const fps = await globby("**/*.md", {
|
const fps = await globby("**/*.md", {
|
||||||
cwd: argv.directory,
|
cwd: argv.directory,
|
||||||
@ -55,89 +51,87 @@ async function buildQuartz(argv: Argv, version: string) {
|
|||||||
)
|
)
|
||||||
|
|
||||||
const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
|
const filePaths = fps.map((fp) => `${argv.directory}${path.sep}${fp}` as FilePath)
|
||||||
const parsedFiles = await parseMarkdown(
|
const parsedFiles = await parseMarkdown(ctx, filePaths)
|
||||||
cfg.plugins.transformers,
|
const filteredContent = filterContent(ctx, parsedFiles)
|
||||||
argv.directory,
|
await emitContent(ctx, filteredContent)
|
||||||
filePaths,
|
|
||||||
argv.verbose,
|
|
||||||
)
|
|
||||||
const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
|
|
||||||
await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
|
|
||||||
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
|
console.log(chalk.green(`Done processing ${fps.length} files in ${perf.timeSince()}`))
|
||||||
|
|
||||||
if (argv.serve) {
|
if (argv.serve) {
|
||||||
const wss = new WebSocketServer({ port: 3001 })
|
await startServing(ctx, parsedFiles)
|
||||||
const connections: WebSocket[] = []
|
}
|
||||||
wss.on("connection", (ws) => connections.push(ws))
|
}
|
||||||
|
|
||||||
const ignored = await isGitIgnored()
|
async function startServing(ctx: BuildCtx, initialContent: ProcessedContent[]) {
|
||||||
const contentMap = new Map<FilePath, ProcessedContent>()
|
const { argv } = ctx
|
||||||
for (const content of parsedFiles) {
|
const wss = new WebSocketServer({ port: 3001 })
|
||||||
const [_tree, vfile] = content
|
const connections: WebSocket[] = []
|
||||||
contentMap.set(vfile.data.filePath!, content)
|
wss.on("connection", (ws) => connections.push(ws))
|
||||||
}
|
|
||||||
|
|
||||||
async function rebuild(fp: string, action: "add" | "change" | "unlink") {
|
const ignored = await isGitIgnored()
|
||||||
perf.addEvent("rebuild")
|
const contentMap = new Map<FilePath, ProcessedContent>()
|
||||||
if (!ignored(fp)) {
|
for (const content of initialContent) {
|
||||||
console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
|
const [_tree, vfile] = content
|
||||||
const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
|
contentMap.set(vfile.data.filePath!, content)
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
async function rebuild(fp: string, action: "add" | "change" | "unlink") {
|
||||||
if (action === "add" || action === "change") {
|
const perf = new PerfTimer()
|
||||||
const [parsedContent] = await parseMarkdown(
|
if (!ignored(fp)) {
|
||||||
cfg.plugins.transformers,
|
console.log(chalk.yellow(`Detected change in ${fp}, rebuilding...`))
|
||||||
argv.directory,
|
const fullPath = `${argv.directory}${path.sep}${fp}` as FilePath
|
||||||
[fullPath],
|
|
||||||
argv.verbose,
|
|
||||||
)
|
|
||||||
contentMap.set(fullPath, parsedContent)
|
|
||||||
} else if (action === "unlink") {
|
|
||||||
contentMap.delete(fullPath)
|
|
||||||
}
|
|
||||||
|
|
||||||
await rimraf(output)
|
try {
|
||||||
const parsedFiles = [...contentMap.values()]
|
if (action === "add" || action === "change") {
|
||||||
const filteredContent = filterContent(cfg.plugins.filters, parsedFiles, argv.verbose)
|
const [parsedContent] = await parseMarkdown(ctx, [fullPath])
|
||||||
await emitContent(argv.directory, output, cfg, filteredContent, argv.serve, argv.verbose)
|
contentMap.set(fullPath, parsedContent)
|
||||||
console.log(chalk.green(`Done rebuilding in ${perf.timeSince("rebuild")}`))
|
} else if (action === "unlink") {
|
||||||
} catch {
|
contentMap.delete(fullPath)
|
||||||
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
connections.forEach((conn) => conn.send("rebuild"))
|
await rimraf(argv.output)
|
||||||
|
const parsedFiles = [...contentMap.values()]
|
||||||
|
const filteredContent = filterContent(ctx, parsedFiles)
|
||||||
|
await emitContent(
|
||||||
|
ctx,
|
||||||
|
filteredContent,
|
||||||
|
)
|
||||||
|
console.log(chalk.green(`Done rebuilding in ${perf.timeSince()}`))
|
||||||
|
} catch {
|
||||||
|
console.log(chalk.yellow(`Rebuild failed. Waiting on a change to fix the error...`))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
connections.forEach((conn) => conn.send("rebuild"))
|
||||||
}
|
}
|
||||||
|
|
||||||
const watcher = chokidar.watch(".", {
|
|
||||||
persistent: true,
|
|
||||||
cwd: argv.directory,
|
|
||||||
ignoreInitial: true,
|
|
||||||
})
|
|
||||||
|
|
||||||
watcher
|
|
||||||
.on("add", (fp) => rebuild(fp, "add"))
|
|
||||||
.on("change", (fp) => rebuild(fp, "change"))
|
|
||||||
.on("unlink", (fp) => rebuild(fp, "unlink"))
|
|
||||||
|
|
||||||
const server = http.createServer(async (req, res) => {
|
|
||||||
await serveHandler(req, res, {
|
|
||||||
public: output,
|
|
||||||
directoryListing: false,
|
|
||||||
})
|
|
||||||
const status = res.statusCode
|
|
||||||
const statusString =
|
|
||||||
status >= 200 && status < 300
|
|
||||||
? chalk.green(`[${status}]`)
|
|
||||||
: status >= 300 && status < 400
|
|
||||||
? chalk.yellow(`[${status}]`)
|
|
||||||
: chalk.red(`[${status}]`)
|
|
||||||
console.log(statusString + chalk.grey(` ${req.url}`))
|
|
||||||
})
|
|
||||||
server.listen(argv.port)
|
|
||||||
console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
|
|
||||||
console.log("hint: exit with ctrl+c")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const watcher = chokidar.watch(".", {
|
||||||
|
persistent: true,
|
||||||
|
cwd: argv.directory,
|
||||||
|
ignoreInitial: true,
|
||||||
|
})
|
||||||
|
|
||||||
|
watcher
|
||||||
|
.on("add", (fp) => rebuild(fp, "add"))
|
||||||
|
.on("change", (fp) => rebuild(fp, "change"))
|
||||||
|
.on("unlink", (fp) => rebuild(fp, "unlink"))
|
||||||
|
|
||||||
|
const server = http.createServer(async (req, res) => {
|
||||||
|
await serveHandler(req, res, {
|
||||||
|
public: argv.output,
|
||||||
|
directoryListing: false,
|
||||||
|
})
|
||||||
|
const status = res.statusCode
|
||||||
|
const statusString =
|
||||||
|
status >= 200 && status < 300
|
||||||
|
? chalk.green(`[${status}]`)
|
||||||
|
: status >= 300 && status < 400
|
||||||
|
? chalk.yellow(`[${status}]`)
|
||||||
|
: chalk.red(`[${status}]`)
|
||||||
|
console.log(statusString + chalk.grey(` ${req.url}`))
|
||||||
|
})
|
||||||
|
server.listen(argv.port)
|
||||||
|
console.log(chalk.cyan(`Started a Quartz server listening at http://localhost:${argv.port}`))
|
||||||
|
console.log("hint: exit with ctrl+c")
|
||||||
}
|
}
|
||||||
|
|
||||||
export default async (argv: Argv, version: string) => {
|
export default async (argv: Argv, version: string) => {
|
||||||
|
@ -10,6 +10,5 @@ export interface Argv {
|
|||||||
|
|
||||||
export interface BuildCtx {
|
export interface BuildCtx {
|
||||||
argv: Argv
|
argv: Argv
|
||||||
version: string
|
|
||||||
cfg: QuartzConfig
|
cfg: QuartzConfig
|
||||||
}
|
}
|
||||||
|
@ -13,12 +13,12 @@ export const AliasRedirects: QuartzEmitterPlugin = () => ({
|
|||||||
getQuartzComponents() {
|
getQuartzComponents() {
|
||||||
return []
|
return []
|
||||||
},
|
},
|
||||||
async emit(contentFolder, _cfg, content, _resources, emit): Promise<FilePath[]> {
|
async emit({argv}, content, _resources, emit): Promise<FilePath[]> {
|
||||||
const fps: FilePath[] = []
|
const fps: FilePath[] = []
|
||||||
|
|
||||||
for (const [_tree, file] of content) {
|
for (const [_tree, file] of content) {
|
||||||
const ogSlug = canonicalizeServer(file.data.slug!)
|
const ogSlug = canonicalizeServer(file.data.slug!)
|
||||||
const dir = path.relative(contentFolder, file.dirname ?? contentFolder)
|
const dir = path.relative(argv.directory, file.dirname ?? argv.directory)
|
||||||
|
|
||||||
let aliases: CanonicalSlug[] = []
|
let aliases: CanonicalSlug[] = []
|
||||||
if (file.data.frontmatter?.aliases) {
|
if (file.data.frontmatter?.aliases) {
|
||||||
|
@ -0,0 +1,36 @@
|
|||||||
|
import { globbyStream } from "globby"
|
||||||
|
import {
|
||||||
|
FilePath, slugifyFilePath,
|
||||||
|
} from "../../path"
|
||||||
|
import { QuartzEmitterPlugin } from "../types"
|
||||||
|
import path from "path"
|
||||||
|
import fs from "fs"
|
||||||
|
|
||||||
|
export const Assets: QuartzEmitterPlugin = () => ({
|
||||||
|
name: "Assets",
|
||||||
|
getQuartzComponents() {
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
|
||||||
|
// glob all non MD/MDX/HTML files in content folder and copy it over
|
||||||
|
const assetsPath = path.join(argv.output, "assets")
|
||||||
|
|
||||||
|
const fps: FilePath[] = []
|
||||||
|
for await (const rawFp of globbyStream("**", {
|
||||||
|
ignore: ["**/*.md"],
|
||||||
|
cwd: argv.directory,
|
||||||
|
})) {
|
||||||
|
const fp = rawFp as FilePath
|
||||||
|
const ext = path.extname(fp)
|
||||||
|
const src = path.join(argv.directory, fp) as FilePath
|
||||||
|
const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
|
||||||
|
const dest = path.join(assetsPath, name) as FilePath
|
||||||
|
const dir = path.dirname(dest) as FilePath
|
||||||
|
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
|
||||||
|
await fs.promises.copyFile(src, dest)
|
||||||
|
fps.push(path.join("assets", fp) as FilePath)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fps
|
||||||
|
},
|
||||||
|
})
|
@ -68,7 +68,8 @@ export const ContentIndex: QuartzEmitterPlugin<Partial<Options>> = (opts) => {
|
|||||||
opts = { ...defaultOptions, ...opts }
|
opts = { ...defaultOptions, ...opts }
|
||||||
return {
|
return {
|
||||||
name: "ContentIndex",
|
name: "ContentIndex",
|
||||||
async emit(_contentDir, cfg, content, _resources, emit) {
|
async emit(ctx, content, _resources, emit) {
|
||||||
|
const cfg = ctx.cfg.configuration
|
||||||
const emitted: FilePath[] = []
|
const emitted: FilePath[] = []
|
||||||
const linkIndex: ContentIndex = new Map()
|
const linkIndex: ContentIndex = new Map()
|
||||||
for (const [_tree, file] of content) {
|
for (const [_tree, file] of content) {
|
||||||
|
@ -22,7 +22,8 @@ export const ContentPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
|
|||||||
getQuartzComponents() {
|
getQuartzComponents() {
|
||||||
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
||||||
},
|
},
|
||||||
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
|
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
|
||||||
|
const cfg = ctx.cfg.configuration
|
||||||
const fps: FilePath[] = []
|
const fps: FilePath[] = []
|
||||||
const allFiles = content.map((c) => c[1].data)
|
const allFiles = content.map((c) => c[1].data)
|
||||||
for (const [tree, file] of content) {
|
for (const [tree, file] of content) {
|
||||||
|
@ -22,9 +22,10 @@ export const FolderPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
|
|||||||
getQuartzComponents() {
|
getQuartzComponents() {
|
||||||
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
||||||
},
|
},
|
||||||
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
|
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
|
||||||
const fps: FilePath[] = []
|
const fps: FilePath[] = []
|
||||||
const allFiles = content.map((c) => c[1].data)
|
const allFiles = content.map((c) => c[1].data)
|
||||||
|
const cfg = ctx.cfg.configuration
|
||||||
|
|
||||||
const folders: Set<CanonicalSlug> = new Set(
|
const folders: Set<CanonicalSlug> = new Set(
|
||||||
allFiles.flatMap((data) => {
|
allFiles.flatMap((data) => {
|
||||||
|
@ -3,3 +3,5 @@ export { TagPage } from "./tagPage"
|
|||||||
export { FolderPage } from "./folderPage"
|
export { FolderPage } from "./folderPage"
|
||||||
export { ContentIndex } from "./contentIndex"
|
export { ContentIndex } from "./contentIndex"
|
||||||
export { AliasRedirects } from "./aliases"
|
export { AliasRedirects } from "./aliases"
|
||||||
|
export { Assets } from "./assets"
|
||||||
|
export { Static } from "./static"
|
21
quartz/plugins/emitters/static.ts
Normal file
21
quartz/plugins/emitters/static.ts
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
import { globby } from "globby"
|
||||||
|
import {
|
||||||
|
FilePath, QUARTZ
|
||||||
|
} from "../../path"
|
||||||
|
import { QuartzEmitterPlugin } from "../types"
|
||||||
|
import path from "path"
|
||||||
|
import fs from "fs"
|
||||||
|
|
||||||
|
|
||||||
|
export const Static: QuartzEmitterPlugin = () => ({
|
||||||
|
name: "Static",
|
||||||
|
getQuartzComponents() {
|
||||||
|
return []
|
||||||
|
},
|
||||||
|
async emit({ argv }, _content, _resources, _emit): Promise<FilePath[]> {
|
||||||
|
const staticPath = path.join(QUARTZ, "static")
|
||||||
|
const fps = await globby("*", { cwd: staticPath })
|
||||||
|
await fs.promises.cp(staticPath, path.join(argv.output, "static"), { recursive: true })
|
||||||
|
return fps.map(fp => path.join("static", fp)) as FilePath[]
|
||||||
|
},
|
||||||
|
})
|
@ -21,9 +21,10 @@ export const TagPage: QuartzEmitterPlugin<FullPageLayout> = (opts) => {
|
|||||||
getQuartzComponents() {
|
getQuartzComponents() {
|
||||||
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
return [Head, Header, Body, ...header, ...beforeBody, Content, ...left, ...right, Footer]
|
||||||
},
|
},
|
||||||
async emit(_contentDir, cfg, content, resources, emit): Promise<FilePath[]> {
|
async emit(ctx, content, resources, emit): Promise<FilePath[]> {
|
||||||
const fps: FilePath[] = []
|
const fps: FilePath[] = []
|
||||||
const allFiles = content.map((c) => c[1].data)
|
const allFiles = content.map((c) => c[1].data)
|
||||||
|
const cfg = ctx.cfg.configuration
|
||||||
|
|
||||||
const tags: Set<string> = new Set(allFiles.flatMap((data) => data.frontmatter?.tags ?? []))
|
const tags: Set<string> = new Set(allFiles.flatMap((data) => data.frontmatter?.tags ?? []))
|
||||||
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
|
const tagDescriptions: Record<string, ProcessedContent> = Object.fromEntries(
|
||||||
|
@ -110,7 +110,7 @@ const commentRegex = new RegExp(/%%(.+)%%/, "g")
|
|||||||
// from https://github.com/escwxyz/remark-obsidian-callout/blob/main/src/index.ts
|
// from https://github.com/escwxyz/remark-obsidian-callout/blob/main/src/index.ts
|
||||||
const calloutRegex = new RegExp(/^\[\!(\w+)\]([+-]?)/)
|
const calloutRegex = new RegExp(/^\[\!(\w+)\]([+-]?)/)
|
||||||
// (?:^| ) -> non-capturing group, tag should start be separated by a space or be the start of the line
|
// (?:^| ) -> non-capturing group, tag should start be separated by a space or be the start of the line
|
||||||
// #(\w+) -> tag itself is # followed by a string of alpha-numeric characters
|
// #(\w+) -> tag itself is # followed by a string of alpha-numeric characters
|
||||||
const tagRegex = new RegExp(/(?:^| )#(\w+)/, "g")
|
const tagRegex = new RegExp(/(?:^| )#(\w+)/, "g")
|
||||||
|
|
||||||
export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | undefined> = (
|
export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options> | undefined> = (
|
||||||
@ -225,7 +225,7 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
|
|||||||
findAndReplace(tree, commentRegex, (_value: string, ..._capture: string[]) => {
|
findAndReplace(tree, commentRegex, (_value: string, ..._capture: string[]) => {
|
||||||
return {
|
return {
|
||||||
type: "text",
|
type: "text",
|
||||||
value: ""
|
value: "",
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
@ -296,8 +296,9 @@ export const ObsidianFlavoredMarkdown: QuartzTransformerPlugin<Partial<Options>
|
|||||||
node.data = {
|
node.data = {
|
||||||
hProperties: {
|
hProperties: {
|
||||||
...(node.data?.hProperties ?? {}),
|
...(node.data?.hProperties ?? {}),
|
||||||
className: `callout ${collapse ? "is-collapsible" : ""} ${defaultState === "collapsed" ? "is-collapsed" : ""
|
className: `callout ${collapse ? "is-collapsible" : ""} ${
|
||||||
}`,
|
defaultState === "collapsed" ? "is-collapsed" : ""
|
||||||
|
}`,
|
||||||
"data-callout": calloutType,
|
"data-callout": calloutType,
|
||||||
"data-callout-fold": collapse,
|
"data-callout-fold": collapse,
|
||||||
},
|
},
|
||||||
|
@ -4,6 +4,7 @@ import { ProcessedContent } from "./vfile"
|
|||||||
import { GlobalConfiguration } from "../cfg"
|
import { GlobalConfiguration } from "../cfg"
|
||||||
import { QuartzComponent } from "../components/types"
|
import { QuartzComponent } from "../components/types"
|
||||||
import { FilePath, ServerSlug } from "../path"
|
import { FilePath, ServerSlug } from "../path"
|
||||||
|
import { BuildCtx } from "../ctx"
|
||||||
|
|
||||||
export interface PluginTypes {
|
export interface PluginTypes {
|
||||||
transformers: QuartzTransformerPluginInstance[]
|
transformers: QuartzTransformerPluginInstance[]
|
||||||
@ -37,8 +38,7 @@ export type QuartzEmitterPlugin<Options extends OptionType = undefined> = (
|
|||||||
export type QuartzEmitterPluginInstance = {
|
export type QuartzEmitterPluginInstance = {
|
||||||
name: string
|
name: string
|
||||||
emit(
|
emit(
|
||||||
contentDir: string,
|
ctx: BuildCtx,
|
||||||
cfg: GlobalConfiguration,
|
|
||||||
content: ProcessedContent[],
|
content: ProcessedContent[],
|
||||||
resources: StaticResources,
|
resources: StaticResources,
|
||||||
emitCallback: EmitCallback,
|
emitCallback: EmitCallback,
|
||||||
|
@ -1,6 +1,5 @@
|
|||||||
import path from "path"
|
import path from "path"
|
||||||
import fs from "fs"
|
import fs from "fs"
|
||||||
import { GlobalConfiguration, QuartzConfig } from "../cfg"
|
|
||||||
import { PerfTimer } from "../perf"
|
import { PerfTimer } from "../perf"
|
||||||
import {
|
import {
|
||||||
ComponentResources,
|
ComponentResources,
|
||||||
@ -10,8 +9,7 @@ import {
|
|||||||
} from "../plugins"
|
} from "../plugins"
|
||||||
import { EmitCallback } from "../plugins/types"
|
import { EmitCallback } from "../plugins/types"
|
||||||
import { ProcessedContent } from "../plugins/vfile"
|
import { ProcessedContent } from "../plugins/vfile"
|
||||||
import { FilePath, QUARTZ, slugifyFilePath } from "../path"
|
import { FilePath } from "../path"
|
||||||
import { globbyStream } from "globby"
|
|
||||||
|
|
||||||
// @ts-ignore
|
// @ts-ignore
|
||||||
import spaRouterScript from "../components/scripts/spa.inline"
|
import spaRouterScript from "../components/scripts/spa.inline"
|
||||||
@ -24,13 +22,15 @@ import { StaticResources } from "../resources"
|
|||||||
import { QuartzLogger } from "../log"
|
import { QuartzLogger } from "../log"
|
||||||
import { googleFontHref } from "../theme"
|
import { googleFontHref } from "../theme"
|
||||||
import { trace } from "../trace"
|
import { trace } from "../trace"
|
||||||
|
import { BuildCtx } from "../ctx"
|
||||||
|
|
||||||
function addGlobalPageResources(
|
function addGlobalPageResources(
|
||||||
cfg: GlobalConfiguration,
|
ctx: BuildCtx,
|
||||||
reloadScript: boolean,
|
|
||||||
staticResources: StaticResources,
|
staticResources: StaticResources,
|
||||||
componentResources: ComponentResources,
|
componentResources: ComponentResources,
|
||||||
) {
|
) {
|
||||||
|
const cfg = ctx.cfg.configuration
|
||||||
|
const reloadScript = ctx.argv.serve
|
||||||
staticResources.css.push(googleFontHref(cfg.theme))
|
staticResources.css.push(googleFontHref(cfg.theme))
|
||||||
|
|
||||||
// popovers
|
// popovers
|
||||||
@ -85,19 +85,17 @@ function addGlobalPageResources(
|
|||||||
}
|
}
|
||||||
|
|
||||||
export async function emitContent(
|
export async function emitContent(
|
||||||
contentFolder: string,
|
ctx: BuildCtx,
|
||||||
output: string,
|
|
||||||
cfg: QuartzConfig,
|
|
||||||
content: ProcessedContent[],
|
content: ProcessedContent[],
|
||||||
reloadScript: boolean,
|
|
||||||
verbose: boolean,
|
|
||||||
) {
|
) {
|
||||||
|
const { argv, cfg }= ctx
|
||||||
|
const contentFolder = argv.directory
|
||||||
const perf = new PerfTimer()
|
const perf = new PerfTimer()
|
||||||
const log = new QuartzLogger(verbose)
|
const log = new QuartzLogger(ctx.argv.verbose)
|
||||||
|
|
||||||
log.start(`Emitting output files`)
|
log.start(`Emitting output files`)
|
||||||
const emit: EmitCallback = async ({ slug, ext, content }) => {
|
const emit: EmitCallback = async ({ slug, ext, content }) => {
|
||||||
const pathToPage = path.join(output, slug + ext) as FilePath
|
const pathToPage = path.join(argv.output, slug + ext) as FilePath
|
||||||
const dir = path.dirname(pathToPage)
|
const dir = path.dirname(pathToPage)
|
||||||
await fs.promises.mkdir(dir, { recursive: true })
|
await fs.promises.mkdir(dir, { recursive: true })
|
||||||
await fs.promises.writeFile(pathToPage, content)
|
await fs.promises.writeFile(pathToPage, content)
|
||||||
@ -113,11 +111,11 @@ export async function emitContent(
|
|||||||
// important that this goes *after* component scripts
|
// important that this goes *after* component scripts
|
||||||
// as the "nav" event gets triggered here and we should make sure
|
// as the "nav" event gets triggered here and we should make sure
|
||||||
// that everyone else had the chance to register a listener for it
|
// that everyone else had the chance to register a listener for it
|
||||||
addGlobalPageResources(cfg.configuration, reloadScript, staticResources, componentResources)
|
addGlobalPageResources(ctx, staticResources, componentResources)
|
||||||
|
|
||||||
let emittedFiles = 0
|
let emittedFiles = 0
|
||||||
const emittedResources = await emitComponentResources(cfg.configuration, componentResources, emit)
|
const emittedResources = await emitComponentResources(cfg.configuration, componentResources, emit)
|
||||||
if (verbose) {
|
if (argv.verbose) {
|
||||||
for (const file of emittedResources) {
|
for (const file of emittedResources) {
|
||||||
emittedFiles += 1
|
emittedFiles += 1
|
||||||
console.log(`[emit:Resources] ${file}`)
|
console.log(`[emit:Resources] ${file}`)
|
||||||
@ -128,15 +126,14 @@ export async function emitContent(
|
|||||||
for (const emitter of cfg.plugins.emitters) {
|
for (const emitter of cfg.plugins.emitters) {
|
||||||
try {
|
try {
|
||||||
const emitted = await emitter.emit(
|
const emitted = await emitter.emit(
|
||||||
contentFolder,
|
ctx,
|
||||||
cfg.configuration,
|
|
||||||
content,
|
content,
|
||||||
staticResources,
|
staticResources,
|
||||||
emit,
|
emit,
|
||||||
)
|
)
|
||||||
emittedFiles += emitted.length
|
emittedFiles += emitted.length
|
||||||
|
|
||||||
if (verbose) {
|
if (ctx.argv.verbose) {
|
||||||
for (const file of emitted) {
|
for (const file of emitted) {
|
||||||
console.log(`[emit:${emitter.name}] ${file}`)
|
console.log(`[emit:${emitter.name}] ${file}`)
|
||||||
}
|
}
|
||||||
@ -147,31 +144,5 @@ export async function emitContent(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const staticPath = path.join(QUARTZ, "static")
|
log.end(`Emitted ${emittedFiles} files to \`${argv.output}\` in ${perf.timeSince()}`)
|
||||||
await fs.promises.cp(staticPath, path.join(output, "static"), { recursive: true })
|
|
||||||
if (verbose) {
|
|
||||||
console.log(`[emit:Static] ${path.join("static", "**")}`)
|
|
||||||
}
|
|
||||||
|
|
||||||
// glob all non MD/MDX/HTML files in content folder and copy it over
|
|
||||||
const assetsPath = path.join(output, "assets")
|
|
||||||
for await (const rawFp of globbyStream("**", {
|
|
||||||
ignore: ["**/*.md"],
|
|
||||||
cwd: contentFolder,
|
|
||||||
})) {
|
|
||||||
const fp = rawFp as FilePath
|
|
||||||
const ext = path.extname(fp)
|
|
||||||
const src = path.join(contentFolder, fp) as FilePath
|
|
||||||
const name = (slugifyFilePath(fp as FilePath) + ext) as FilePath
|
|
||||||
const dest = path.join(assetsPath, name) as FilePath
|
|
||||||
const dir = path.dirname(dest) as FilePath
|
|
||||||
await fs.promises.mkdir(dir, { recursive: true }) // ensure dir exists
|
|
||||||
await fs.promises.copyFile(src, dest)
|
|
||||||
emittedFiles += 1
|
|
||||||
if (verbose) {
|
|
||||||
console.log(`[emit:Assets] ${path.join("assets", name)}`)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
log.end(`Emitted ${emittedFiles} files to \`${output}\` in ${perf.timeSince()}`)
|
|
||||||
}
|
}
|
||||||
|
@ -1,18 +1,18 @@
|
|||||||
|
import { BuildCtx } from "../ctx"
|
||||||
import { PerfTimer } from "../perf"
|
import { PerfTimer } from "../perf"
|
||||||
import { QuartzFilterPluginInstance } from "../plugins/types"
|
import { QuartzFilterPluginInstance } from "../plugins/types"
|
||||||
import { ProcessedContent } from "../plugins/vfile"
|
import { ProcessedContent } from "../plugins/vfile"
|
||||||
|
|
||||||
export function filterContent(
|
export function filterContent(
|
||||||
plugins: QuartzFilterPluginInstance[],
|
{ cfg, argv }: BuildCtx,
|
||||||
content: ProcessedContent[],
|
content: ProcessedContent[],
|
||||||
verbose: boolean,
|
|
||||||
): ProcessedContent[] {
|
): ProcessedContent[] {
|
||||||
const perf = new PerfTimer()
|
const perf = new PerfTimer()
|
||||||
const initialLength = content.length
|
const initialLength = content.length
|
||||||
for (const plugin of plugins) {
|
for (const plugin of cfg.plugins.filters) {
|
||||||
const updatedContent = content.filter(plugin.shouldPublish)
|
const updatedContent = content.filter(plugin.shouldPublish)
|
||||||
|
|
||||||
if (verbose) {
|
if (argv.verbose) {
|
||||||
const diff = content.filter((x) => !updatedContent.includes(x))
|
const diff = content.filter((x) => !updatedContent.includes(x))
|
||||||
for (const file of diff) {
|
for (const file of diff) {
|
||||||
console.log(`[filter:${plugin.name}] ${file[1].data.slug}`)
|
console.log(`[filter:${plugin.name}] ${file[1].data.slug}`)
|
||||||
|
@ -14,6 +14,7 @@ import workerpool, { Promise as WorkerPromise } from "workerpool"
|
|||||||
import { QuartzTransformerPluginInstance } from "../plugins/types"
|
import { QuartzTransformerPluginInstance } from "../plugins/types"
|
||||||
import { QuartzLogger } from "../log"
|
import { QuartzLogger } from "../log"
|
||||||
import { trace } from "../trace"
|
import { trace } from "../trace"
|
||||||
|
import { BuildCtx } from "../ctx"
|
||||||
|
|
||||||
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
|
export type QuartzProcessor = Processor<MDRoot, HTMLRoot, void>
|
||||||
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
|
export function createProcessor(transformers: QuartzTransformerPluginInstance[]): QuartzProcessor {
|
||||||
@ -72,13 +73,7 @@ async function transpileWorkerScript() {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
export function createFileParser(
|
export function createFileParser({ argv, cfg }: BuildCtx, fps: FilePath[], allSlugs: ServerSlug[]) {
|
||||||
transformers: QuartzTransformerPluginInstance[],
|
|
||||||
baseDir: string,
|
|
||||||
fps: FilePath[],
|
|
||||||
allSlugs: ServerSlug[],
|
|
||||||
verbose: boolean,
|
|
||||||
) {
|
|
||||||
return async (processor: QuartzProcessor) => {
|
return async (processor: QuartzProcessor) => {
|
||||||
const res: ProcessedContent[] = []
|
const res: ProcessedContent[] = []
|
||||||
for (const fp of fps) {
|
for (const fp of fps) {
|
||||||
@ -89,12 +84,12 @@ export function createFileParser(
|
|||||||
file.value = file.value.toString().trim()
|
file.value = file.value.toString().trim()
|
||||||
|
|
||||||
// Text -> Text transforms
|
// Text -> Text transforms
|
||||||
for (const plugin of transformers.filter((p) => p.textTransform)) {
|
for (const plugin of cfg.plugins.transformers.filter((p) => p.textTransform)) {
|
||||||
file.value = plugin.textTransform!(file.value)
|
file.value = plugin.textTransform!(file.value)
|
||||||
}
|
}
|
||||||
|
|
||||||
// base data properties that plugins may use
|
// base data properties that plugins may use
|
||||||
file.data.slug = slugifyFilePath(path.relative(baseDir, file.path) as FilePath)
|
file.data.slug = slugifyFilePath(path.relative(argv.directory, file.path) as FilePath)
|
||||||
file.data.allSlugs = allSlugs
|
file.data.allSlugs = allSlugs
|
||||||
file.data.filePath = fp
|
file.data.filePath = fp
|
||||||
|
|
||||||
@ -102,7 +97,7 @@ export function createFileParser(
|
|||||||
const newAst = await processor.run(ast, file)
|
const newAst = await processor.run(ast, file)
|
||||||
res.push([newAst, file])
|
res.push([newAst, file])
|
||||||
|
|
||||||
if (verbose) {
|
if (argv.verbose) {
|
||||||
console.log(`[process] ${fp} -> ${file.data.slug}`)
|
console.log(`[process] ${fp} -> ${file.data.slug}`)
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -115,29 +110,25 @@ export function createFileParser(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function parseMarkdown(
|
export async function parseMarkdown(ctx: BuildCtx, fps: FilePath[]): Promise<ProcessedContent[]> {
|
||||||
transformers: QuartzTransformerPluginInstance[],
|
const { argv, cfg } = ctx
|
||||||
baseDir: string,
|
|
||||||
fps: FilePath[],
|
|
||||||
verbose: boolean,
|
|
||||||
): Promise<ProcessedContent[]> {
|
|
||||||
const perf = new PerfTimer()
|
const perf = new PerfTimer()
|
||||||
const log = new QuartzLogger(verbose)
|
const log = new QuartzLogger(argv.verbose)
|
||||||
|
|
||||||
const CHUNK_SIZE = 128
|
const CHUNK_SIZE = 128
|
||||||
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
|
let concurrency = fps.length < CHUNK_SIZE ? 1 : os.availableParallelism()
|
||||||
|
|
||||||
// get all slugs ahead of time as each thread needs a copy
|
// get all slugs ahead of time as each thread needs a copy
|
||||||
const allSlugs = fps.map((fp) =>
|
const allSlugs = fps.map((fp) =>
|
||||||
slugifyFilePath(path.relative(baseDir, path.resolve(fp)) as FilePath),
|
slugifyFilePath(path.relative(argv.directory, path.resolve(fp)) as FilePath),
|
||||||
)
|
)
|
||||||
|
|
||||||
let res: ProcessedContent[] = []
|
let res: ProcessedContent[] = []
|
||||||
log.start(`Parsing input files using ${concurrency} threads`)
|
log.start(`Parsing input files using ${concurrency} threads`)
|
||||||
if (concurrency === 1) {
|
if (concurrency === 1) {
|
||||||
try {
|
try {
|
||||||
const processor = createProcessor(transformers)
|
const processor = createProcessor(cfg.plugins.transformers)
|
||||||
const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
|
const parse = createFileParser(ctx, fps, allSlugs)
|
||||||
res = await parse(processor)
|
res = await parse(processor)
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
log.end()
|
log.end()
|
||||||
@ -153,7 +144,7 @@ export async function parseMarkdown(
|
|||||||
|
|
||||||
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
const childPromises: WorkerPromise<ProcessedContent[]>[] = []
|
||||||
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
for (const chunk of chunks(fps, CHUNK_SIZE)) {
|
||||||
childPromises.push(pool.exec("parseFiles", [baseDir, chunk, allSlugs, verbose]))
|
childPromises.push(pool.exec("parseFiles", [argv, chunk, allSlugs]))
|
||||||
}
|
}
|
||||||
|
|
||||||
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
|
const results: ProcessedContent[][] = await WorkerPromise.all(childPromises)
|
||||||
|
@ -1,17 +1,18 @@
|
|||||||
import config from "../quartz.config"
|
import cfg from "../quartz.config"
|
||||||
|
import { Argv, BuildCtx } from "./ctx"
|
||||||
import { FilePath, ServerSlug } from "./path"
|
import { FilePath, ServerSlug } from "./path"
|
||||||
import { createFileParser, createProcessor } from "./processors/parse"
|
import { createFileParser, createProcessor } from "./processors/parse"
|
||||||
|
|
||||||
const transformers = config.plugins.transformers
|
const transformers = cfg.plugins.transformers
|
||||||
const processor = createProcessor(transformers)
|
const processor = createProcessor(transformers)
|
||||||
|
|
||||||
// only called from worker thread
|
// only called from worker thread
|
||||||
export async function parseFiles(
|
export async function parseFiles(argv: Argv, fps: FilePath[], allSlugs: ServerSlug[]) {
|
||||||
baseDir: string,
|
const ctx: BuildCtx = {
|
||||||
fps: FilePath[],
|
cfg,
|
||||||
allSlugs: ServerSlug[],
|
argv,
|
||||||
verbose: boolean,
|
}
|
||||||
) {
|
|
||||||
const parse = createFileParser(transformers, baseDir, fps, allSlugs, verbose)
|
const parse = createFileParser(ctx, fps, allSlugs)
|
||||||
return parse(processor)
|
return parse(processor)
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user