// Trash - a stupid, simple website compiler.
// Licensing information at the bottom of this file.
package main
import (
"bufio"
"bytes"
"context"
"encoding/json"
"fmt"
"html/template"
"io"
"maps"
"math"
"math/rand/v2"
"net/http"
"net/url"
"os"
"os/exec"
"path/filepath"
"reflect"
"regexp"
"slices"
"sort"
"strconv"
"strings"
"sync"
textTemplate "text/template"
"time"
_ "embed"
d2 "github.com/FurqanSoftware/goldmark-d2"
chromahtml "github.com/alecthomas/chroma/v2/formatters/html"
"github.com/davecgh/go-spew/spew"
"github.com/expr-lang/expr"
"github.com/fatih/color"
"github.com/fsnotify/fsnotify"
"github.com/gorilla/websocket"
pikchr "github.com/jchenry/goldmark-pikchr"
figure "github.com/mangoumbrella/goldmark-figure"
"github.com/pelletier/go-toml/v2"
fences "github.com/stefanfritsch/goldmark-fences"
"github.com/tdewolff/minify/v2"
"github.com/tdewolff/minify/v2/css"
minifyHtml "github.com/tdewolff/minify/v2/html"
"github.com/tdewolff/minify/v2/js"
minifyJson "github.com/tdewolff/minify/v2/json"
"github.com/tdewolff/minify/v2/svg"
"github.com/tdewolff/minify/v2/xml"
treeblood "github.com/wyatt915/goldmark-treeblood"
"github.com/yuin/goldmark"
emoji "github.com/yuin/goldmark-emoji"
east "github.com/yuin/goldmark-emoji/ast"
highlighting "github.com/yuin/goldmark-highlighting/v2"
gast "github.com/yuin/goldmark/ast"
"github.com/yuin/goldmark/extension"
"github.com/yuin/goldmark/parser"
"github.com/yuin/goldmark/renderer/html"
"github.com/yuin/goldmark/text"
"github.com/yuin/goldmark/util"
enclave "github.com/zeozeozeo/goldmark-enclave"
enclaveCallout "github.com/zeozeozeo/goldmark-enclave/callout"
enclaveCore "github.com/zeozeozeo/goldmark-enclave/core"
enclaveMark "github.com/zeozeozeo/goldmark-enclave/mark"
subtext "github.com/zeozeozeo/goldmark-subtext"
"go.abhg.dev/goldmark/anchor"
"go.abhg.dev/goldmark/frontmatter"
"go.abhg.dev/goldmark/mermaid"
"go.abhg.dev/goldmark/mermaid/mermaidcdp"
"go.abhg.dev/goldmark/toc"
"golang.org/x/text/cases"
"golang.org/x/text/language"
)
var (
//go:embed mermaid.min.js
mermaidJSSource string
mermaidCompiler *mermaidcdp.Compiler
noSandbox = os.Getenv("TRASH_NO_SANDBOX") != ""
)
func usage() {
programName := getProgramName()
fmt.Printf("Usage: %s <command> [directory]\n\n", programName)
fmt.Println("A stupid, simple website compiler.")
fmt.Println("\nCommands:")
fmt.Println(" init Initialize a new site in the directory (default: current).")
fmt.Println(" build Build the site.")
fmt.Println(" watch Watch for changes and rebuild.")
fmt.Println(" serve Serve the site with live reload.")
fmt.Println(" help Show this help message.")
}
func printerr(format string, a ...any) {
fmt.Print(color.HiRedString("error"))
fmt.Print(": ")
fmt.Printf(format, a...)
fmt.Print("\n")
}
func printwarn(format string, a ...any) {
fmt.Print(color.YellowString("warn"))
fmt.Print(": ")
fmt.Printf(format, a...)
fmt.Print("\n")
}
func main() {
if len(os.Args) == 2 &&
(os.Args[1] == "help" ||
os.Args[1] == "-h" ||
os.Args[1] == "--help" ||
os.Args[1] == "?") {
usage()
return
}
defer func() {
if mermaidCompiler != nil {
mermaidCompiler.Close()
}
}()
var cmd string
var targetDir string
if len(os.Args) > 1 {
cmd = os.Args[1]
}
if len(os.Args) > 2 {
targetDir = os.Args[2]
}
if targetDir != "" {
originalDir, err := os.Getwd()
if err != nil {
printerr("Failed to get current directory: %v", err)
os.Exit(1)
}
if err := os.Chdir(targetDir); err != nil {
printerr("Failed to change to directory `%s`: %v", targetDir, err)
os.Exit(1)
}
defer func() {
if err := os.Chdir(originalDir); err != nil {
printerr("Failed to change back to original directory: %v", err)
}
}()
}
switch cmd {
case "", "build":
build(false, true)
case "init":
initCmd()
case "watch":
watchCmd()
case "serve":
serveCmd()
default:
printerr("No such command `%s`.\n", cmd)
usage()
os.Exit(1)
}
}
// -- util --
func checkAllDirsExist(dirs ...string) bool {
for _, dir := range dirs {
if stat, err := os.Stat(dir); err != nil || !stat.IsDir() {
return false
}
}
return true
}
func isEmptyDir() bool {
entries, err := os.ReadDir(".")
if err != nil {
return false
}
return len(entries) == 0
}
func ask(prompt string) bool {
for {
fmt.Print(prompt)
var resp string
_, err := fmt.Scanln(&resp)
if err != nil {
printerr("%v", err)
os.Exit(1)
}
switch strings.TrimSpace(resp) {
case "", "y", "Y":
return true
case "n", "N":
return false
default:
fmt.Println("enter y or n.")
}
}
}
func getProgramName() string {
basename := filepath.Base(os.Args[0])
return strings.TrimSuffix(basename, filepath.Ext(basename))
}
func writefile(content string, elem ...string) {
if !strings.HasSuffix(content, "\n") {
content += "\n"
}
path := filepath.Join(elem...)
if _, err := os.Stat(path); os.IsNotExist(err) {
_ = os.WriteFile(path, []byte(content), 0o644)
fmt.Printf("%s file: %s\n", color.HiGreenString("Created"), filepath.ToSlash(path))
}
}
func makedirs(dirs ...string) {
for _, d := range dirs {
if err := os.MkdirAll(d, 0o755); err != nil {
printerr("Mkdir %s: %v", d, err)
os.Exit(1)
}
fmt.Printf("%s directory: %s\n", color.HiGreenString("Created"), d)
}
}
func queryMap(m map[string]any, path ...string) (any, bool) {
for i, key := range path {
if m == nil {
return nil, false
}
if i == len(path)-1 {
val, exists := m[key]
return val, exists
}
if next, ok := m[key]; ok {
if nextMap, ok := next.(map[string]any); ok {
m = nextMap
} else {
return nil, false
}
} else {
return nil, false
}
}
return nil, false
}
func queryMapOrDefault[T any](m map[string]any, fallback T, path ...string) T {
val, exists := queryMap(m, path...)
if !exists {
return fallback
}
if converted, ok := val.(T); ok {
return converted
}
return fallback
}
// -- init --
const (
pagesDir = "pages"
staticDir = "static"
layoutsDir = "layouts"
outputDir = "out"
trashConfigFilename = "Trash.toml"
)
func initCmd() {
if !isEmptyDir() {
if !ask("The current directory is not empty. Are you sure you want to continue? (Y/n): ") {
fmt.Println("Aborting.")
return
}
}
makedirs(pagesDir+"/posts", staticDir, layoutsDir)
// layouts/base.html
writefile(`<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>{{ .Page.Metadata.title }}</title>
<link rel="stylesheet" href="/static/style.css">
</head>
<body>
<div class="container">
<h1>{{ .Page.Metadata.title }}</h1>
<main>
{{ .Page.Content }}
</main>
</div>
{{ if .IsServing }}
<script>
const socket = new WebSocket("ws://" + window.location.host + "/ws");
socket.addEventListener("message", (event) => {
if (event.data === "reload") {
window.location.reload();
}
});
socket.addEventListener("close", () => {
console.log("Live reload connection lost. Please refresh manually.");
});
</script>
{{ end }}
</body>
</html>`, layoutsDir, "base.html")
// static/style.css
writefile(`body {
font-family: sans-serif;
color: #333;
}
.container {
max-width: 800px;
margin: 2rem auto;
padding: 0 1rem;
}`, staticDir, "style.css")
// pages/posts/first-post.md
writefile(`---
title: "Welcome!"
---
This is the home page.
## Blog Posts
{{ $posts := readDir "posts" | sortBy "date" "desc" }}
<ul>
{{- range $posts }}
<li><a href="{{ .Permalink }}">{{ .Metadata.title }}</a> - {{ .Metadata.date }}</li>
{{- end }}
</ul>`, pagesDir, "index.md")
writefile(`---
title: "My First Post"
date: "2025-08-24"
---
Hello, world! This is my first post.`, pagesDir, "posts", "first-post.md")
// .gitignore
writefile("/out", ".gitignore")
// Trash.toml
writefile(`# The structure of this config file is not forced upon you, it is just useful to
# have the permalink/other configuration stored somewhere so you can access it in templates
[site]
url = "https://example.com/" # Access this like {{ .Config.site.url }}`, trashConfigFilename)
programName := getProgramName()
fmt.Printf("You can now do %s to build your site, %s to rebuild on file changes, or %s to start a server with live reloading.\n", color.HiBlueString(programName), color.HiBlueString(programName+" watch"), color.HiBlueString(programName+" serve"))
}
// -- build --
type Page struct {
// SourcePath is the path to the original .md file relative to the project root.
SourcePath string
// IsMarkdown is true when the file is a markdown file.
IsMarkdown bool
// Permalink is the final URL path for the page.
Permalink string
// RawContent is the raw file content.
RawContent []byte
// Content is the final HTML content after all processing.
Content template.HTML
// Metadata is the parsed YAML/TOML front matter.
Metadata map[string]any
// Doc is the markdown document parsed during the first discovery pass (frontmatter, TOC)
Doc gast.Node
}
type Site struct {
Pages []*Page
}
type TemplateData struct {
Site Site
Page *Page
Config map[string]any
IsServing bool
}
var hasMmdc bool
func maybeInitMermaidCDP(theme string) {
if hasMmdc {
return
}
_, err := exec.LookPath("mmdc")
if err == nil {
hasMmdc = true
return
}
mermaidCompiler, err = mermaidcdp.New(&mermaidcdp.Config{
JSSource: mermaidJSSource,
NoSandbox: noSandbox,
Theme: theme,
})
if err != nil {
printerr("Failed to initialize Mermaid with CDP: %v; falling back to clientside JS", err)
}
}
func newMinifier() *minify.M {
m := minify.New()
m.AddFunc("text/css", css.Minify)
m.AddFunc("text/html", minifyHtml.Minify)
m.AddFunc("image/svg+xml", svg.Minify)
m.AddFuncRegexp(regexp.MustCompile("^(application|text)/(x-)?(java|ecma)script$"), js.Minify)
m.AddFuncRegexp(regexp.MustCompile("[/+]json$"), minifyJson.Minify)
m.AddFuncRegexp(regexp.MustCompile("[/+]xml$"), xml.Minify)
m.AddFunc("importmap", minifyJson.Minify)
m.AddFunc("speculationrules", minifyJson.Minify)
aspMinifier := &minifyHtml.Minifier{}
aspMinifier.TemplateDelims = [2]string{"<%", "%>"}
m.Add("text/asp", aspMinifier)
m.Add("text/x-ejs-template", aspMinifier)
return m
}
func minifyBuf(m *minify.M, text bytes.Buffer, mime string) bytes.Buffer {
var buf bytes.Buffer
err := m.Minify(mime, &buf, &text)
if err != nil {
printerr("Failed to minify %s: %v", mime, err)
return text
}
return buf
}
var minifyTypes = map[string]string{
".css": "text/css",
".html": "text/html",
".htm": "text/html",
".svg": "image/svg+xml",
".js": "application/javascript",
".mjs": "application/javascript",
".cjs": "application/javascript",
".json": "application/json",
".xml": "application/xml",
".importmap": "importmap",
".speculationrules": "speculationrules",
".asp": "text/asp",
".ejs": "text/x-ejs-template",
}
func minifyStaticFile(m *minify.M, srcPath, dstPath string, info os.FileInfo) error {
ext := strings.ToLower(filepath.Ext(srcPath))
if err := os.MkdirAll(filepath.Dir(dstPath), 0o755); err != nil {
return err
}
srcFile, err := os.Open(srcPath)
if err != nil {
return err
}
defer srcFile.Close()
outFile, err := os.Create(dstPath)
if err != nil {
return err
}
defer outFile.Close()
if mediaType, ok := minifyTypes[ext]; ok {
// a minifier is registered for the extension
writer := bufio.NewWriter(outFile)
if err := m.Minify(mediaType, writer, srcFile); err != nil {
return err
}
if err := writer.Flush(); err != nil {
return err
}
} else {
// just copy
if _, err := io.Copy(outFile, srcFile); err != nil {
return err
}
}
return os.Chmod(dstPath, info.Mode())
}
var timeFormats = map[string]string{
"Layout": time.Layout,
"ANSIC": time.ANSIC,
"UnixDate": time.UnixDate,
"RubyDate": time.RubyDate,
"RFC822": time.RFC822,
"RFC822Z": time.RFC822Z,
"RFC850": time.RFC850,
"RFC1123": time.RFC1123,
"RFC1123Z": time.RFC1123Z,
"RFC3339": time.RFC3339,
"RFC3339Nano": time.RFC3339Nano,
"Kitchen": time.Kitchen,
"Stamp": time.Stamp,
"StampMilli": time.StampMilli,
"StampMicro": time.StampMicro,
"StampNano": time.StampNano,
"DateTime": time.DateTime,
"DateOnly": time.DateOnly,
"TimeOnly": time.TimeOnly,
}
func getValueByPath(item any, path string) any {
if item == nil {
return nil
}
// handle map
if m, ok := item.(map[string]any); ok {
return queryMapOrDefault[any](m, nil, strings.Split(path, ".")...)
}
// handle *Page
if page, ok := item.(*Page); ok {
return queryMapOrDefault[any](page.Metadata, nil, strings.Split(path, ".")...)
}
// handle struct
val := reflect.ValueOf(item)
for val.Kind() == reflect.Ptr {
val = val.Elem()
}
if val.Kind() != reflect.Struct {
return nil
}
parts := strings.Split(path, ".")
current := val
for _, part := range parts {
if current.Kind() == reflect.Ptr {
current = current.Elem()
}
if current.Kind() != reflect.Struct {
return nil
}
field := current.FieldByName(strings.Title(part))
if !field.IsValid() {
return nil
}
current = field
}
if current.IsValid() {
return current.Interface()
}
return nil
}
func mathOp(a, b any, intOp func(int, int) int, floatOp func(float64, float64) float64) (any, error) {
av := reflect.ValueOf(a)
bv := reflect.ValueOf(b)
if av.Kind() != bv.Kind() {
return nil, fmt.Errorf("type mismatch")
}
switch av.Kind() {
case reflect.Int:
return intOp(av.Interface().(int), bv.Interface().(int)), nil
case reflect.Float64:
return floatOp(av.Interface().(float64), bv.Interface().(float64)), nil
default:
return nil, fmt.Errorf("unsupported type")
}
}
type DirEntry struct {
Name string
IsDir bool
}
func (ctx *buildContext) stdFuncMap(page *Page, allPages []*Page, markdown goldmark.Markdown) textTemplate.FuncMap {
return textTemplate.FuncMap{
// FS utilities
"readDir": func(dir string) []*Page {
var results []*Page
for _, p := range allPages {
relPath, _ := filepath.Rel(pagesDir, p.SourcePath)
if strings.HasPrefix(filepath.ToSlash(relPath), dir+"/") {
results = append(results, p)
}
}
return results
},
"listDir": func(dir string) ([]DirEntry, error) {
entries, err := os.ReadDir(dir)
if err != nil {
return nil, err
}
results := make([]DirEntry, 0, len(entries))
for _, entry := range entries {
results = append(results, DirEntry{
Name: entry.Name(),
IsDir: entry.IsDir(),
})
}
return results, nil
},
"readFile": func(path string) (string, error) {
content, err := os.ReadFile(filepath.Clean(path))
return string(content), err
},
"pathExists": func(path string) bool {
_, err := os.Stat(filepath.Clean(path))
return err == nil
},
// time utilities
"formatTime": func(format string, v any) string {
var t time.Time
var err error
switch val := v.(type) {
case time.Time:
t = val
case string:
for _, layout := range timeFormats {
t, err = time.Parse(layout, val)
if err == nil {
break
}
}
default:
return fmt.Sprintf("%v", v) // IDK the type, return the original
}
if err != nil {
return fmt.Sprintf("%v", v)
}
realFormat, ok := timeFormats[format]
if !ok {
realFormat = format
}
return t.UTC().Format(realFormat)
},
"now": func() time.Time { return time.Now().UTC() },
// random utilities
"rand": func(a, b any) (any, error) {
av := reflect.ValueOf(a)
bv := reflect.ValueOf(b)
switch av.Kind() {
case reflect.Int:
if bv.Kind() != reflect.Int {
return nil, fmt.Errorf("rand expects both arguments to be same type")
}
min, max := av.Int(), bv.Int()
return rand.IntN(int(max-min+1)) + int(min), nil
case reflect.Float64:
if bv.Kind() != reflect.Float64 {
return nil, fmt.Errorf("rand expects both arguments to be same type")
}
min, max := av.Float(), bv.Float()
return min + rand.Float64()*(max-min), nil
default:
return nil, fmt.Errorf("rand requires int or float64 arguments")
}
},
"choice": func(choices ...any) any {
if len(choices) == 0 {
return nil
}
return choices[rand.IntN(len(choices))]
},
"shuffle": func(slice []any) []any {
shuffled := make([]any, len(slice))
copy(shuffled, slice)
rand.Shuffle(len(shuffled), func(i, j int) {
shuffled[i], shuffled[j] = shuffled[j], shuffled[i]
})
return shuffled
},
// string & URL utilities
"concatURL": func(base string, elements ...string) string {
u, err := url.Parse(base)
if err != nil {
allSegments := make([]string, 1, 1+len(elements))
allSegments[0] = base
allSegments = append(allSegments, elements...)
return strings.Join(allSegments, "/")
}
u = u.JoinPath(elements...)
return u.String()
},
"joinPath": func(elem ...string) string { return filepath.Join(elem...) },
"truncate": func(maxLength int, s string) string {
if len(s) <= maxLength {
return s
}
return s[:maxLength] + "…"
},
"pluralize": func(count int, singular, plural string) string {
if count == 1 {
return fmt.Sprintf("%d %s", count, singular)
}
return fmt.Sprintf("%d %s", count, plural)
},
"markdownify": func(s string) (string, error) {
var buf bytes.Buffer
if err := ctx.MarkdownParser.Convert([]byte(s), &buf); err != nil {
return "", err
}
return buf.String(), nil
},
"replace": func(old, new, s string) string { return strings.ReplaceAll(s, old, new) },
"startsWith": func(prefix, s string) bool { return strings.HasPrefix(s, prefix) },
"endsWith": func(suffix, s string) bool { return strings.HasSuffix(s, suffix) },
"repeat": func(count int, s string) string { return strings.Repeat(s, count) },
"toUpper": func(s string) string { return strings.ToUpper(s) },
"toLower": func(s string) string { return strings.ToLower(s) },
"title": func(s string) string { return cases.Title(language.English).String(s) },
"strip": func(s string) string { return strings.TrimSpace(s) },
"split": func(sep, s string) []string { return strings.Split(s, sep) },
"fields": func(s string) []string { return strings.Fields(s) },
"count": func(substr, s string) int { return strings.Count(s, substr) },
"regexMatch": func(pattern, s string) (bool, error) {
reg, err := regexp.Compile(pattern)
if err != nil {
return false, err
}
return reg.MatchString(s), nil
},
"regexReplace": func(pattern, new, s string) (string, error) {
reg, err := regexp.Compile(pattern)
if err != nil {
return "", err
}
return reg.ReplaceAllString(s, new), nil
},
// math utilities
"add": func(a, b any) (any, error) {
return mathOp(a, b, func(ai, bi int) int { return ai + bi }, func(af, bf float64) float64 { return af + bf })
},
"subtract": func(a, b any) (any, error) {
return mathOp(a, b, func(ai, bi int) int { return ai - bi }, func(af, bf float64) float64 { return af - bf })
},
"multiply": func(a, b any) (any, error) {
return mathOp(a, b, func(ai, bi int) int { return ai * bi }, func(af, bf float64) float64 { return af * bf })
},
"divide": func(a, b any) (any, error) {
return mathOp(a, b,
func(ai, bi int) int {
if bi == 0 {
return 0
}
return ai / bi
},
func(af, bf float64) float64 {
if bf == 0 {
return 0
}
return af / bf
})
},
"max": func(a, b any) (any, error) {
return mathOp(a, b,
func(ai, bi int) int { return max(ai, bi) },
func(af, bf float64) float64 { return math.Max(af, bf) })
},
"min": func(a, b any) (any, error) {
return mathOp(a, b,
func(ai, bi int) int { return min(ai, bi) },
func(af, bf float64) float64 { return math.Min(af, bf) })
},
// array utilities
"contains": func(value, item any) (bool, error) {
switch val := value.(type) {
case []any:
return slices.Contains(val, item), nil
case string:
str, ok := item.(string)
if !ok {
return false, fmt.Errorf("contains: item must be a string but got %T", item)
}
return strings.Contains(val, str), nil
default:
return false, fmt.Errorf("contains: unsupported type %T for value %v", value, value)
}
},
"first": func(slice []any) any {
if len(slice) == 0 {
return nil
}
return slice[0]
},
"last": func(slice []any) any {
if len(slice) == 0 {
return nil
}
return slice[len(slice)-1]
},
"reverse": func(slice []any) []any {
result := make([]any, len(slice))
for i, v := range slice {
result[len(slice)-1-i] = v
}
return result
},
// type conversion utilities
"toString": func(v any) string { return fmt.Sprintf("%v", v) },
"toInt": func(v any) int {
switch val := v.(type) {
case int:
return val
case float64:
return int(val)
case string:
if i, err := strconv.Atoi(val); err == nil {
return i
}
}
return 0
},
"toFloat": func(v any) float64 {
switch val := v.(type) {
case float64:
return val
case int:
return float64(val)
case string:
if f, err := strconv.ParseFloat(val, 64); err == nil {
return f
}
}
return 0
},
// conditional utilities
"default": func(def, val any) any {
if val == nil || val == "" || val == false {
return def
}
return val
},
"ternary": func(condition bool, trueVal, falseVal any) any {
if condition {
return trueVal
}
return falseVal
},
// json utilities
"toJSON": func(v any) string {
b, err := json.Marshal(v)
if err != nil {
return fmt.Sprintf("error: %v", err)
}
return string(b)
},
"fromJSON": func(s string) any {
var result any
if err := json.Unmarshal([]byte(s), &result); err != nil {
return nil
}
return result
},
// query utilities
"sortBy": func(key string, order string, collection any) any {
val := reflect.ValueOf(collection)
if val.Kind() != reflect.Slice {
return collection
}
length := val.Len()
items := make([]any, length)
for i := range length {
items[i] = val.Index(i).Interface()
}
sort.SliceStable(items, func(i, j int) bool {
valI := getValueByPath(items[i], key)
valJ := getValueByPath(items[j], key)
strI := fmt.Sprintf("%v", valI)
strJ := fmt.Sprintf("%v", valJ)
if strings.ToLower(order) == "desc" {
return strI > strJ
}
return strI < strJ
})
return items
},
"where": func(key string, value any, collection any) any {
val := reflect.ValueOf(collection)
if val.Kind() != reflect.Slice {
return collection
}
length := val.Len()
var result []any
for i := range length {
item := val.Index(i).Interface()
if itemVal := getValueByPath(item, key); itemVal == value {
result = append(result, item)
}
}
return result
},
"groupBy": func(key string, collection any) map[string][]any {
val := reflect.ValueOf(collection)
if val.Kind() != reflect.Slice {
return map[string][]any{"": {collection}}
}
groups := make(map[string][]any)
length := val.Len()
for i := range length {
item := val.Index(i).Interface()
groupKey := fmt.Sprintf("%v", getValueByPath(item, key))
groups[groupKey] = append(groups[groupKey], item)
}
return groups
},
"select": func(key string, collection any) []any {
val := reflect.ValueOf(collection)
if val.Kind() != reflect.Slice {
return []any{getValueByPath(collection, key)}
}
var result []any
length := val.Len()
for i := range length {
item := val.Index(i).Interface()
if val := getValueByPath(item, key); val != nil {
result = append(result, val)
}
}
return result
},
"has": func(key string, item any) bool {
return getValueByPath(item, key) != nil
},
// print & formatting utilities
"print": func(a ...any) string {
s := spew.Sdump(a)
fmt.Println(s)
return s
},
"sprint": func(format string, a ...any) string {
return spew.Sprintf(format, a)
},
// expression evaluation
"expr": func(expression string, data ...any) (any, error) {
env := map[string]any{
"Site": ctx.Site,
"Config": ctx.Config,
}
if len(data) > 0 {
if userEnv, ok := data[0].(map[string]any); ok {
maps.Copy(env, userEnv)
}
}
program, err := expr.Compile(expression, expr.Env(env))
if err != nil {
return nil, err
}
result, err := expr.Run(program, env)
if err != nil {
return nil, err
}
return result, nil
},
// other
"dict": func(values ...any) (map[string]any, error) {
if len(values)%2 != 0 {
return nil, fmt.Errorf("dict expects an even number of arguments")
}
m := make(map[string]any)
for i := 0; i < len(values); i += 2 {
key, ok := values[i].(string)
if !ok {
return nil, fmt.Errorf("dict keys must be strings")
}
m[key] = values[i+1]
}
return m, nil
},
"toc": func() string {
if page == nil || page.Doc == nil {
return "> [!CAUTION]\n> Couldn't generate table of contents: failed to parse this file"
}
tree, err := toc.Inspect(page.Doc, page.RawContent, toc.Compact(true))
if err != nil {
return "> [!CAUTION]\n> Couldn't generate table of contents: " + err.Error()
}
list := toc.RenderList(tree)
if list == nil {
return ""
}
var out bytes.Buffer
if err := markdown.Renderer().Render(&out, page.RawContent, list); err != nil {
return "> [!CAUTION]\n> Couldn't generate table of contents: " + err.Error()
}
return out.String()
},
}
}
type buildContext struct {
Config map[string]any
Templates *template.Template
MarkdownParser goldmark.Markdown
Minifier *minify.M
Site Site
IsServing bool
}
func build(isServing, copyStatic bool) {
if !checkAllDirsExist(pagesDir, layoutsDir) {
printerr("No project files in current directory\n")
usage()
os.Exit(1)
}
if !isServing {
_ = os.RemoveAll(outputDir)
}
if err := os.MkdirAll(outputDir, 0o755); err != nil {
printerr("Failed to create output directory: %v", err)
os.Exit(1)
}
start := time.Now()
ctx := &buildContext{
Config: parseConfig(),
IsServing: isServing,
Minifier: newMinifier(),
}
if err := ctx.discoverAndParsePages(); err != nil {
printerr("Failed to discover pages: %v", err)
}
if err := ctx.loadTemplates(); err != nil {
printerr("Failed to load templates: %v", err)
}
for _, page := range ctx.Site.Pages {
if err := ctx.compileAndRenderPage(page); err != nil {
printerr("Failed to process page %s: %v", page.SourcePath, err)
}
}
// copy static files
if copyStatic {
if err := ctx.copyStaticFiles(); err != nil {
printerr("Failed to copy static files: %v", err)
}
}
fmt.Printf("%s in %s.\n", color.HiGreenString("Build complete"), time.Since(start))
}
func (ctx *buildContext) loadTemplates() error {
t := template.New("").Funcs(ctx.stdFuncMap(nil, ctx.Site.Pages, ctx.MarkdownParser))
var layoutFiles []string
err := filepath.Walk(layoutsDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if !info.IsDir() && strings.HasSuffix(path, ".html") {
layoutFiles = append(layoutFiles, path)
}
return nil
})
if err != nil {
return fmt.Errorf("could not walk layouts directory: %w", err)
}
if len(layoutFiles) > 0 {
t, err = t.ParseFiles(layoutFiles...)
if err != nil {
return fmt.Errorf("could not parse layout files: %w", err)
}
}
ctx.Templates = t
return nil
}
func (ctx *buildContext) discoverAndParsePages() error {
var allPages []*Page
discoverParser := goldmark.New(
goldmark.WithRendererOptions(html.WithUnsafe()),
goldmark.WithParserOptions(parser.WithAutoHeadingID(), parser.WithAttribute()),
goldmark.WithExtensions(
extension.Typographer,
extension.CJK,
emoji.Emoji,
extension.GFM,
enclaveMark.New(),
&frontmatter.Extender{Mode: frontmatter.SetMetadata},
),
)
err := filepath.Walk(pagesDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
fileContent, err := os.ReadFile(path)
if err != nil {
return fmt.Errorf("failed to read %s: %w", path, err)
}
page := &Page{
SourcePath: path,
IsMarkdown: strings.HasSuffix(path, ".md"),
RawContent: fileContent,
Metadata: make(map[string]any),
}
page.Doc = discoverParser.Parser().Parse(text.NewReader(page.RawContent))
page.Metadata = page.Doc.OwnerDocument().Meta()
if _, ok := page.Metadata["title"]; !ok && page.IsMarkdown {
page.Metadata["title"] = "Untitled"
}
relPath, _ := filepath.Rel(pagesDir, path)
if page.IsMarkdown {
if strings.HasSuffix(relPath, "index.md") {
dir := filepath.ToSlash(filepath.Dir(relPath))
if dir == "." {
dir = ""
}
page.Permalink = dir + "/"
} else {
outPath := strings.TrimSuffix(relPath, filepath.Ext(relPath))
page.Permalink = filepath.ToSlash(outPath) + ".html"
}
} else {
page.Permalink = filepath.ToSlash(relPath)
}
if mermaidCompiler == nil && strings.Contains(string(page.RawContent), "```mermaid") {
maybeInitMermaidCDP(queryMapOrDefault(ctx.Config, "", "mermaid", "theme"))
}
allPages = append(allPages, page)
return nil
})
if err != nil {
return fmt.Errorf("error during page discovery: %w", err)
}
ctx.Site.Pages = allPages
return nil
}
func (ctx *buildContext) compileAndRenderPage(page *Page) error {
if ctx.MarkdownParser == nil {
ctx.MarkdownParser = createMarkdownParser(ctx.Config)
}
tmpl, err := textTemplate.New(page.SourcePath).Funcs(ctx.stdFuncMap(page, ctx.Site.Pages, ctx.MarkdownParser)).Parse(string(page.RawContent))
if err != nil {
return fmt.Errorf("failed to parse markdown template: %w", err)
}
var processedContent bytes.Buffer
templateData := TemplateData{Site: ctx.Site, Page: page, Config: ctx.Config, IsServing: ctx.IsServing}
if err := tmpl.Execute(&processedContent, templateData); err != nil {
return fmt.Errorf("failed to execute markdown template: %w", err)
}
if page.IsMarkdown {
var finalContent bytes.Buffer
if err := ctx.MarkdownParser.Convert(processedContent.Bytes(), &finalContent); err != nil {
return fmt.Errorf("failed to convert markdown: %w", err)
}
page.Content = template.HTML(finalContent.String())
} else {
page.Content = template.HTML(processedContent.String())
}
outputPath := filepath.Join(outputDir, strings.TrimPrefix(page.Permalink, "/"))
if strings.HasSuffix(page.Permalink, "/") {
outputPath = filepath.Join(outputPath, "index.html")
}
return ctx.renderWithLayout(page, outputPath)
}
func (ctx *buildContext) renderWithLayout(page *Page, outputPath string) error {
layoutName := "base.html"
useLayout := true
if customLayout, ok := page.Metadata["layout"].(string); ok {
if customLayout == "none" {
useLayout = false
} else {
layoutName = customLayout + ".html"
}
}
var finalBuf bytes.Buffer
if useLayout && page.IsMarkdown {
templateData := TemplateData{Site: ctx.Site, Page: page, Config: ctx.Config, IsServing: ctx.IsServing}
err := ctx.Templates.ExecuteTemplate(&finalBuf, layoutName, templateData)
if err != nil {
if ctx.Templates.Lookup(layoutName) == nil {
printwarn("Layout `%s` not found, falling back to base.html", layoutName)
err = ctx.Templates.ExecuteTemplate(&finalBuf, "base.html", templateData)
}
if err != nil {
return fmt.Errorf("failed to render layout for %s: %w", page.SourcePath, err)
}
}
} else {
finalBuf.WriteString(string(page.Content))
}
// write output
if err := os.MkdirAll(filepath.Dir(outputPath), 0o755); err != nil {
return fmt.Errorf("failed to create directory for %s: %w", outputPath, err)
}
file, err := os.Create(outputPath)
if err != nil {
return fmt.Errorf("failed to create file %s: %w", outputPath, err)
}
defer file.Close()
// minify based on file extension
ext := filepath.Ext(outputPath)
if mime, ok := minifyTypes[ext]; ok {
minified := minifyBuf(ctx.Minifier, finalBuf, mime)
_, err = io.Copy(file, &minified)
} else {
_, err = io.Copy(file, &finalBuf)
}
return err
}
func (ctx *buildContext) copyStaticFiles() error {
return filepath.Walk(staticDir, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
dstPath := filepath.Join(outputDir, path)
return minifyStaticFile(ctx.Minifier, path, dstPath, info)
})
}
func parseConfig() map[string]any {
cfg := make(map[string]any)
cfgFile, err := os.Open(trashConfigFilename)
if err != nil {
printwarn("No `%s` config file found", trashConfigFilename)
return cfg
}
defer cfgFile.Close()
if err := toml.NewDecoder(cfgFile).Decode(&cfg); err != nil {
printerr("Error parsing config file: %v", err)
}
return cfg
}
type anchorTexter struct {
text []byte
}
func (a *anchorTexter) AnchorText(*anchor.HeaderInfo) []byte {
if a == nil {
return nil
}
return a.text
}
type mermaidCliBuilder struct{}
func (cli mermaidCliBuilder) CommandContext(ctx context.Context, args ...string) *exec.Cmd {
// workaround for https://github.com/abhinav/goldmark-mermaid/issues/141
if noSandbox {
const puppeteerConfig = "puppeteer-config.json"
_, err := os.Stat(puppeteerConfig)
if err != nil {
if os.IsNotExist(err) {
f, err := os.Create(puppeteerConfig)
if err != nil {
printerr("Error creating %s: %v", puppeteerConfig, err)
}
defer f.Close()
_, err = f.WriteString(`{
"args": [
"--no-sandbox"
]
}`)
if err != nil {
printerr("Error writing to %s: %v", puppeteerConfig, err)
}
}
}
args = append(args, "-p"+puppeteerConfig)
}
return exec.CommandContext(ctx, "mmdc", args...)
}
func emojiRenderer(w util.BufWriter, source []byte, node *east.Emoji, config *emoji.RendererConfig) {
s := " /"
if !config.XHTML {
s = ""
}
hasZWJ := slices.Contains(node.Value.Unicode, 0x200d)
var filename string
if hasZWJ {
values := make([]string, len(node.Value.Unicode))
for i, r := range node.Value.Unicode {
values[i] = fmt.Sprintf("%x", r)
}
filename = strings.Join(values, "-")
} else {
// for non-ZWG sequences, filter out variation selectors
filenameValues := make([]string, 0, len(node.Value.Unicode))
for _, r := range node.Value.Unicode {
if r != '\uFE0E' && r != '\uFE0F' {
filenameValues = append(filenameValues, fmt.Sprintf("%x", r))
}
}
filename = strings.Join(filenameValues, "-")
}
var emojiText strings.Builder
emojiText.Grow(len(node.Value.Unicode))
for _, r := range node.Value.Unicode {
emojiText.WriteRune(r)
}
fmt.Fprintf(
w,
config.TwemojiTemplate,
util.EscapeHTML(util.StringToReadOnlyBytes(node.Value.Name)), // 1: name
filename, // 2: filename
s, // 3: XHTML suffix
util.EscapeHTML(util.StringToReadOnlyBytes(emojiText.String())), // 4: unicode emoji
)
}
const defaultTwemojiTemplate = `<img class="emoji" draggable="false" alt="%[4]s" style="height:1em;vertical-align:middle;" src="https://cdn.jsdelivr.net/gh/jdecked/twemoji@latest/assets/72x72/%[2]s.png"%[3]s>`
func createMarkdownParser(config map[string]any) goldmark.Markdown {
var anchorText *string
if text, exists := queryMap(config, "anchor", "text"); exists {
if str, ok := text.(string); ok {
anchorText = &str
} else {
anchorText = nil
}
} else {
anchorText = nil
}
anchorPosition := anchor.After
if queryMapOrDefault(config, "", "anchor", "position") == "before" {
anchorPosition = anchor.Before
}
d2Theme := queryMapOrDefault[int64](config, -1, "d2", "theme")
var d2ThemeId *int64
if d2Theme >= 0 {
d2ThemeId = &d2Theme
}
var texter anchor.Texter
if anchorText != nil {
texter = &anchorTexter{text: []byte(*anchorText)}
}
var compiler mermaid.Compiler
if mermaidCompiler != nil {
compiler = mermaidCompiler
}
var emojiExt goldmark.Extender = emoji.Emoji
if queryMapOrDefault(config, false, "emoji", "custom") {
emojiExt = emoji.New(
emoji.WithRenderingMethod(emoji.Func),
emoji.WithTwemojiTemplate(queryMapOrDefault(config, defaultTwemojiTemplate, "emoji", "template")),
emoji.WithRendererFunc(emojiRenderer),
)
}
extensions := []goldmark.Extender{
extension.GFM,
extension.DefinitionList,
extension.Footnote,
extension.Typographer,
extension.CJK,
emojiExt,
treeblood.MathML(),
&frontmatter.Extender{},
&d2.Extender{Sketch: queryMapOrDefault(config, true, "d2", "sketch"), ThemeID: d2ThemeId},
&mermaid.Extender{
Compiler: compiler,
CLI: &mermaidCliBuilder{},
Theme: queryMapOrDefault(config, "", "mermaid", "theme"),
},
&pikchr.Extender{DarkMode: queryMapOrDefault(config, false, "pikchr", "dark")},
enclave.New(&enclaveCore.Config{}),
enclaveCallout.New(),
enclaveMark.New(),
&fences.Extender{},
figure.Figure,
&anchor.Extender{
Texter: texter,
Position: anchorPosition,
},
subtext.New(),
}
if queryMapOrDefault(config, true, "highlight", "enabled") {
extensions = append(extensions, highlighting.NewHighlighting(
highlighting.WithFormatOptions(
chromahtml.ClassPrefix(queryMapOrDefault(config, "highlight-", "highlight", "prefix")),
chromahtml.WithClasses(true),
chromahtml.WithLineNumbers(queryMapOrDefault(config, false, "highlight", "gutter", "enabled")),
chromahtml.LineNumbersInTable(queryMapOrDefault(config, false, "highlight", "gutter", "table")),
),
))
}
return goldmark.New(
goldmark.WithRendererOptions(html.WithUnsafe()),
goldmark.WithParserOptions(parser.WithAutoHeadingID(), parser.WithAttribute()),
goldmark.WithExtensions(extensions...),
)
}
// -- watch --
func watchAndRebuild(onRebuild func()) {
watcher, err := fsnotify.NewWatcher()
if err != nil {
printerr("Failed to create file watcher: %v", err)
os.Exit(1)
}
defer watcher.Close()
for _, path := range []string{pagesDir, staticDir, layoutsDir, trashConfigFilename} {
filepath.Walk(path, func(path string, info os.FileInfo, err error) error {
if info.IsDir() || filepath.Base(path) == trashConfigFilename {
if err := watcher.Add(path); err != nil {
printerr("Failed to watch directory `%s`: %v", path, err)
}
}
return nil
})
}
var (
rebuildTimer *time.Timer
mu sync.Mutex
)
debounceDuration := 250 * time.Millisecond
for {
select {
case event, ok := <-watcher.Events:
if !ok {
return
}
if event.Has(fsnotify.Write) || event.Has(fsnotify.Create) || event.Has(fsnotify.Remove) || event.Has(fsnotify.Rename) {
mu.Lock()
if rebuildTimer != nil {
rebuildTimer.Stop()
}
rebuildTimer = time.AfterFunc(debounceDuration, func() {
build(true, strings.HasPrefix(event.Name, staticDir+string(filepath.Separator)))
onRebuild()
})
mu.Unlock()
}
case err, ok := <-watcher.Errors:
if !ok {
return
}
printerr("Watcher error: %v", err)
}
}
}
func watchCmd() {
build(false, true)
fmt.Println("Watching for changes...")
watchAndRebuild(func() {})
}
// -- serve --
var upgrader = websocket.Upgrader{
ReadBufferSize: 1024,
WriteBufferSize: 1024,
CheckOrigin: func(r *http.Request) bool { return true },
}
type Hub struct {
clients map[*websocket.Conn]bool
broadcast chan []byte
register chan *websocket.Conn
unregister chan *websocket.Conn
mu sync.Mutex
}
func newHub() *Hub {
return &Hub{
clients: make(map[*websocket.Conn]bool),
broadcast: make(chan []byte),
register: make(chan *websocket.Conn),
unregister: make(chan *websocket.Conn),
}
}
func (h *Hub) run() {
for {
select {
case conn := <-h.register:
h.mu.Lock()
h.clients[conn] = true
h.mu.Unlock()
case conn := <-h.unregister:
h.mu.Lock()
if _, ok := h.clients[conn]; ok {
delete(h.clients, conn)
conn.Close()
}
h.mu.Unlock()
case message := <-h.broadcast:
h.mu.Lock()
for conn := range h.clients {
if err := conn.WriteMessage(websocket.TextMessage, message); err != nil {
printerr("Error broadcasting to client: %v", err)
}
}
h.mu.Unlock()
}
}
}
func serveCmd() {
// initial build
build(true, true)
hub := newHub()
go hub.run()
go watchAndRebuild(func() {
hub.broadcast <- []byte("reload")
})
// live reload websocket handler
http.HandleFunc("/ws", func(w http.ResponseWriter, r *http.Request) {
conn, err := upgrader.Upgrade(w, r, nil)
if err != nil {
printerr("Failed to upgrade WebSocket connection: %v", err)
return
}
hub.register <- conn
go func() {
defer func() {
hub.unregister <- conn
}()
for {
if _, _, err := conn.NextReader(); err != nil {
break
}
}
}()
})
fs := http.FileServer(http.Dir("out"))
http.Handle("/", fs)
port := "8080"
fmt.Printf("%s on http://localhost:%s\n", color.HiCyanString("Server starting"), port)
fmt.Println("Watching for changes...")
if err := http.ListenAndServe(":"+port, nil); err != nil {
printerr("Failed to start server: %v", err)
os.Exit(1)
}
}
/*
This is free and unencumbered software released into the public domain.
Anyone is free to copy, modify, publish, use, compile, sell, or
distribute this software, either in source code form or as a compiled
binary, for any purpose, commercial or non-commercial, and by any
means.
In jurisdictions that recognize copyright laws, the author or authors
of this software dedicate any and all copyright interest in the
software to the public domain. We make this dedication for the benefit
of the public at large and to the detriment of our heirs and
successors. We intend this dedication to be an overt act of
relinquishment in perpetuity of all present and future rights to this
software under copyright law.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
For more information, please refer to <https://unlicense.org/>
*/