diff qwb.go @ 5:125e599b1217

agbubu
author Atarwn Gard <a@qwa.su>
date Tue, 17 Mar 2026 22:18:02 +0500
parents ce2b6dde4c10
children bd0d3a189f5b
line wrap: on
line diff
--- a/qwb.go	Sat Mar 14 14:01:51 2026 +0500
+++ b/qwb.go	Tue Mar 17 22:18:02 2026 +0500
@@ -16,248 +16,69 @@
 <meta charset="UTF-8">
 <meta name="viewport" content="width=device-width, initial-scale=1">
 <title>{{TITLE}}</title>
-<link rel="stylesheet" href="{{CSS}}">
+<link rel="stylesheet" href="/x.css"> 
 </head>
 <body>
 <nav>{{NAV}}</nav>
-<header><h1>{{SITE_TITLE}}</h1></header>
+<header><h1>{{PAGE_TITLE}}</h1></header>
 <main>{{CONTENT}}</main>
-<footer>
-<p>{{FOOTER_TEXT}}</p>
-<p>Built with <a href="https://hg.reactionary.software/qwb">qwb</a></p>
-</footer>
+<footer><p>{{FOOTER_TEXT}}</p></footer>
 </body>
 </html>`
 
 type config struct {
-	headertext string
-	footertext string
-	cssfile    string
-	tplfile    string
+	SiteTitle, FooterText string
 }
 
-type section struct {
+type sect struct {
 	title string
-	index string
+
+	href string
+
 	pages []page
+
+	hasIndex bool
+
+	children []sect
 }
 
 type page struct {
 	title string
-	path  string
+	href  string
 }
 
-func ParseINI(r io.Reader) (map[string]map[string]string, error) {
-	res := make(map[string]map[string]string)
-	sec := "default"
-	scanner := bufio.NewScanner(r)
-	for scanner.Scan() {
-		line := strings.TrimSpace(scanner.Text())
-		if line == "" || strings.HasPrefix(line, ";") || strings.HasPrefix(line, "#") {
-			continue
-		}
-		if strings.HasPrefix(line, "[") && strings.HasSuffix(line, "]") {
-			sec = line[1 : len(line)-1]
-			if res[sec] == nil {
-				res[sec] = make(map[string]string)
-			}
+func parseini(r io.Reader) map[string]string {
+	res := make(map[string]string)
+	sc := bufio.NewScanner(r)
+	for sc.Scan() {
+		line := strings.TrimSpace(sc.Text())
+		if line == "" || strings.HasPrefix(line, ";") || strings.HasPrefix(line, "#") || strings.HasPrefix(line, "[") {
 			continue
 		}
 		if k, v, ok := strings.Cut(line, "="); ok {
-			if res[sec] == nil {
-				res[sec] = make(map[string]string)
-			}
-			res[sec][strings.TrimSpace(k)] = strings.TrimSpace(v)
+			res[strings.TrimSpace(k)] = strings.TrimSpace(v)
 		}
 	}
-	return res, scanner.Err()
+	return res
 }
 
-func loadconfig(src string) config {
-	cfg := config{
-		headertext: "My Site",
-		footertext: "&copy; Me",
-		cssfile:    "/style.css",
-	}
+func loadcfg(src string) config {
+	cfg := config{SiteTitle: "My Site", FooterText: "© 2026"}
 	f, err := os.Open(filepath.Join(src, "qwb.ini"))
-	if err != nil {
-		return cfg
-	}
-	defer f.Close()
-	ini, err := ParseINI(f)
-	if err != nil {
-		return cfg
-	}
-	s := ini["site"]
-	set := func(key string, target *string) {
-		if v, ok := s[key]; ok {
-			*target = v
+	if err == nil {
+		defer f.Close()
+		ini := parseini(f)
+		if v, ok := ini["SiteTitle"]; ok {
+			cfg.SiteTitle = v
+		}
+		if v, ok := ini["FooterText"]; ok {
+			cfg.FooterText = v
 		}
 	}
-	set("header",   &cfg.headertext)
-	set("footer",   &cfg.footertext)
-	set("style",    &cfg.cssfile)
-	set("template", &cfg.tplfile)
 	return cfg
 }
 
-func loadtemplate(cfg config) string {
-	if cfg.tplfile == "" {
-		return tpldefault
-	}
-	b, err := os.ReadFile(cfg.tplfile)
-	if err != nil {
-		fmt.Fprintf(os.Stderr, "cannot read template %s: %v", cfg.tplfile, err)
-		return tpldefault
-	}
-	return string(b)
-}
-
-func collectsections(root, siteTitle string) (section, []section) {
-	var subs []section
-	root_ := section{title: siteTitle}
-	entries, _ := os.ReadDir(root)
-	for _, e := range entries {
-		full := filepath.Join(root, e.Name())
-		if e.IsDir() {
-			s := scansection(full, root)
-			if s.index != "" || len(s.pages) > 0 {
-				subs = append(subs, s)
-			}
-			continue
-		}
-		if !strings.HasSuffix(e.Name(), ".md") {
-			continue
-		}
-		if e.Name() == "index.md" {
-			root_.index = "/index.html"
-		} else {
-			rel, _ := filepath.Rel(root, full)
-			root_.pages = append(root_.pages, page{
-				title: titlefromname(e.Name()),
-				path:  "/" + strings.TrimSuffix(rel, ".md") + ".html",
-			})
-		}
-	}
-	return root_, subs
-}
-
-func scansection(dir, root string) section {
-	s := section{title: titlefromname(filepath.Base(dir))}
-	entries, _ := os.ReadDir(dir)
-	for _, e := range entries {
-		if e.IsDir() || !strings.HasSuffix(e.Name(), ".md") {
-			continue
-		}
-		rel, _ := filepath.Rel(root, filepath.Join(dir, e.Name()))
-		htmlpath := "/" + strings.TrimSuffix(rel, ".md") + ".html"
-		if e.Name() == "index.md" {
-			s.index = htmlpath
-		} else {
-			s.pages = append(s.pages, page{titlefromname(e.Name()), htmlpath})
-		}
-	}
-	return s
-}
-
-func navlink(b *strings.Builder, p page, cur string) {
-	if p.path == cur {
-		fmt.Fprintf(b, "<li><b><a href=\"%s\">%s</a></b></li>\n", p.path, p.title)
-	} else {
-		fmt.Fprintf(b, "<li><a href=\"%s\">%s</a></li>\n", p.path, p.title)
-	}
-}
-
-func buildnav(root section, subs []section, cur string) string {
-	var b strings.Builder
-	b.WriteString("<ul>\n")
-	if root.index != "" {
-		navlink(&b, page{"Home", root.index}, cur)
-	}
-	for _, p := range root.pages {
-		navlink(&b, p, cur)
-	}
-	for _, s := range subs {
-		link := s.index
-		if link == "" && len(s.pages) > 0 {
-			link = s.pages[0].path
-		}
-		if link == "" {
-			continue
-		}
-		navlink(&b, page{s.title, link}, cur)
-	}
-	b.WriteString("</ul>\n")
-	for _, s := range subs {
-		if !sectioncontains(s, cur) {
-			continue
-		}
-		total := len(s.pages)
-		if s.index != "" {
-			total++
-		}
-		b.WriteString("<ul>\n")
-		if s.index != "" {
-			navlink(&b, page{"Index", s.index}, cur)
-		}
-		for _, p := range s.pages {
-			navlink(&b, p, cur)
-		}
-		b.WriteString("</ul>\n")
-		break
-	}
-	return b.String()
-}
-
-func sectioncontains(s section, cur string) bool {
-	if s.index == cur {
-		return true
-	}
-	for _, p := range s.pages {
-		if p.path == cur {
-			return true
-		}
-	}
-	return false
-}
-
-func extracth1(html string) (title, rest string) {
-	start := strings.Index(html, "<h1")
-	if start == -1 {
-		return "", html
-	}
-	close := strings.Index(html[start:], ">")
-	if close == -1 {
-		return "", html
-	}
-	content := start + close + 1
-	end := strings.Index(html[content:], "</h1>")
-	if end == -1 {
-		return "", html
-	}
-	title = html[content : content+end]
-	rest = html[:start] + html[content+end+len("</h1>"):]
-	return
-}
-
-func mdtohtml(path string) (string, error) {
-	cmd := exec.Command("lowdown", "-T", "html", "--html-no-skiphtml", "--html-no-escapehtml")
-	f, err := os.Open(path)
-	if err != nil {
-		return "", err
-	}
-	defer f.Close()
-	var buf strings.Builder
-	cmd.Stdin = f
-	cmd.Stdout = &buf
-	cmd.Stderr = os.Stderr
-	if err := cmd.Run(); err != nil {
-		return "", err
-	}
-	return buf.String(), nil
-}
-
-func titlefromname(name string) string {
+func gentitle(name string) string {
 	name = strings.TrimSuffix(name, ".md")
 	name = strings.ReplaceAll(name, "-", " ")
 	if len(name) > 0 {
@@ -266,95 +87,383 @@
 	return name
 }
 
-func fixlinks(s string) string {
-	return strings.NewReplacer(
-		".md)", ".html)",
-		".md\"", ".html\"",
-		".md'", ".html'",
-		".md#", ".html#",
-		".md>", ".html>",
-		".md ", ".html ",
-		".md,", ".html,",
-	).Replace(s)
+func md2html(path string) (string, error) {
+	cmd := exec.Command("lowdown", "-Thtml", "--html-no-skiphtml")
+	f, err := os.Open(path)
+	if err != nil {
+		return "", err
+	}
+	defer f.Close()
+	var buf strings.Builder
+	cmd.Stdin, cmd.Stdout = f, &buf
+	if err := cmd.Run(); err != nil {
+		return "", err
+	}
+	html := buf.String()
+
+	if trimmed := strings.TrimSpace(html); strings.HasPrefix(trimmed, "<h1") {
+		if end := strings.Index(trimmed, "</h1>"); end >= 0 {
+			html = strings.TrimSpace(trimmed[end+5:])
+		}
+	}
+	return html, nil
+}
+
+func mdtitle(path string) string {
+	f, err := os.Open(path)
+	if err != nil {
+		return ""
+	}
+	defer f.Close()
+	sc := bufio.NewScanner(f)
+	for sc.Scan() {
+		line := strings.TrimSpace(sc.Text())
+		if strings.HasPrefix(line, "# ") {
+			return strings.TrimSpace(line[2:])
+		}
+	}
+	return ""
+}
+
+//,
+
+func mdnavhref(path string) (string, bool) {
+	f, err := os.Open(path)
+	if err != nil {
+		return "", false
+	}
+	defer f.Close()
+	sc := bufio.NewScanner(f)
+	if !sc.Scan() {
+		return "", false
+	}
+	line := strings.TrimSpace(sc.Text())
+	if strings.Contains(line, "://") {
+		return line, true
+	}
+	return "", false
+}
+
+func scansrc(src string) ([]sect, error) {
+	root := sect{title: "Home", href: "/index.html"}
+
+	var walk func(dir, rel string, s *sect) error
+	walk = func(dir, rel string, s *sect) error {
+		entries, err := os.ReadDir(dir)
+		if err != nil {
+			return err
+		}
+		for _, e := range entries {
+			name := e.Name()
+			childRel := filepath.Join(rel, name)
+			childAbs := filepath.Join(dir, name)
+			if e.IsDir() {
+				child := sect{
+					title: gentitle(name),
+					href:  "/" + filepath.ToSlash(childRel) + "/index.html",
+				}
+				if err := walk(childAbs, childRel, &child); err != nil {
+					return err
+				}
+				s.children = append(s.children, child)
+				continue
+			}
+			if !strings.HasSuffix(name, ".md") {
+				continue
+			}
+			if name == "index.md" {
+				s.hasIndex = true
+				if ext, ok := mdnavhref(childAbs); ok {
+					s.href = ext
+				}
+			} else {
+				href := "/" + filepath.ToSlash(strings.TrimSuffix(childRel, ".md")+".html")
+				if ext, ok := mdnavhref(childAbs); ok {
+					href = ext
+				}
+				s.pages = append(s.pages, page{
+					title: gentitle(name),
+					href:  href,
+				})
+			}
+		}
+		return nil
+	}
+
+	if err := walk(src, "", &root); err != nil {
+		return nil, err
+	}
+	return []sect{root}, nil
+}
+
+func findCurSect(sects []sect, cur string) *sect {
+	for i := range sects {
+		s := &sects[i]
+		if s.href == cur {
+			return s
+		}
+		for _, p := range s.pages {
+			if p.href == cur {
+				return s
+			}
+		}
+		if found := findCurSect(s.children, cur); found != nil {
+			return found
+		}
+	}
+	return nil
 }
 
-func copyfile(src, dst string) error {
-	in, err := os.Open(src)
-	if err != nil {
+func navX(roots []sect, cur string) string {
+	if len(roots) == 0 {
+		return ""
+	}
+	root := &roots[0]
+
+	var path []*sect
+	var findPath func(s *sect) bool
+	findPath = func(s *sect) bool {
+		path = append(path, s)
+		if s.href == cur {
+			return true
+		}
+		for _, p := range s.pages {
+			if p.href == cur {
+				return true
+			}
+		}
+		for i := range s.children {
+			if findPath(&s.children[i]) {
+				return true
+			}
+		}
+		path = path[:len(path)-1]
+		return false
+	}
+	if !findPath(root) {
+		return ""
+	}
+
+	sectLI := func(b *strings.Builder, s *sect, active bool) {
+		b.WriteString(" <li>")
+		if s.hasIndex {
+			cls := ""
+			if active {
+				cls = " current"
+			}
+			b.WriteString(`<a` + cls + ` href="` + s.href + `">` + s.title + `</a>`)
+		} else {
+			b.WriteString(s.title)
+		}
+		b.WriteString("</li>\n")
+	}
+
+	var b strings.Builder
+
+	{
+		var active1 *sect
+
+		if len(path) > 1 {
+			active1 = path[1]
+		}
+		b.WriteString("<ul>\n")
+		sectLI(&b, root, len(path) == 1)
+
+		for i := range root.children {
+			c := &root.children[i]
+			sectLI(&b, c, c == active1)
+		}
+		b.WriteString("</ul>\n")
+	}
+
+	for depth := 1; depth < len(path); depth++ {
+		s := path[depth]
+		if len(s.children) == 0 && len(s.pages) == 0 {
+			continue
+		}
+		var activeChild *sect
+		if depth+1 < len(path) {
+			activeChild = path[depth+1]
+		}
+		b.WriteString("<ul>\n")
+		for i := range s.children {
+			c := &s.children[i]
+			sectLI(&b, c, c == activeChild)
+		}
+		for _, p := range s.pages {
+			cls := ""
+			if p.href == cur {
+				cls = " current"
+			}
+			b.WriteString(` <li><a` + cls + ` href="` + p.href + `">` + p.title + `</a></li>` + "\n")
+		}
+		b.WriteString("</ul>\n")
+	}
+
+	return b.String()
+}
+
+func navY(roots []sect, cur string) string {
+	curSect := findCurSect(roots, cur)
+
+	var isAncestor func(s *sect) bool
+	isAncestor = func(s *sect) bool {
+		if s == curSect {
+			return true
+		}
+		for i := range s.children {
+			if isAncestor(&s.children[i]) {
+				return true
+			}
+		}
+		return false
+	}
+
+	var b strings.Builder
+	var renderSects func(sects []sect, depth int)
+	renderSects = func(sects []sect, depth int) {
+		indent := strings.Repeat(" ", depth)
+		b.WriteString(indent + "<ul>\n")
+		for i := range sects {
+			s := &sects[i]
+			b.WriteString(indent + " <li>")
+			if s.hasIndex {
+				cls := ""
+				if s == curSect {
+					cls = " current"
+				}
+				b.WriteString(`<a` + cls + ` href="` + s.href + `">` + s.title + `</a>`)
+			} else {
+				b.WriteString(s.title)
+			}
+
+			if isAncestor(s) {
+				if len(s.pages) > 0 || len(s.children) > 0 {
+					b.WriteString("\n")
+					if len(s.children) > 0 {
+						renderSects(s.children, depth+2)
+					}
+					if len(s.pages) > 0 {
+						b.WriteString(indent + "  <ul>\n")
+						for _, p := range s.pages {
+							cls := ""
+							if p.href == cur {
+								cls = " current"
+							}
+							b.WriteString(indent + `   <li><a` + cls + ` href="` + p.href + `">` + p.title + `</a></li>` + "\n")
+						}
+						b.WriteString(indent + "  </ul>\n")
+					}
+					b.WriteString(indent + " ")
+				}
+			}
+			b.WriteString("</li>\n")
+		}
+		b.WriteString(indent + "</ul>\n")
+	}
+	renderSects(roots, 0)
+	return b.String()
+}
+
+func render(tpl, pageTitle, nav, content string, cfg config) string {
+	compositeTitle := cfg.SiteTitle + " | " + pageTitle
+	r := strings.NewReplacer(
+		"{{TITLE}}", compositeTitle,
+		"{{NAV}}", nav,
+		"{{PAGE_TITLE}}", pageTitle,
+		"{{CONTENT}}", content,
+		"{{FOOTER_TEXT}}", cfg.FooterText,
+	)
+	return r.Replace(tpl)
+}
+
+func writepage(outpath, html string) error {
+	if err := os.MkdirAll(filepath.Dir(outpath), 0755); err != nil {
 		return err
 	}
-	defer in.Close()
-	out, err := os.Create(dst)
-	if err != nil {
-		return err
-	}
-	defer out.Close()
-	_, err = io.Copy(out, in)
-	return err
+	return os.WriteFile(outpath, []byte(html), 0644)
 }
 
 func main() {
-	if len(os.Args) != 3 {
-		fmt.Fprintln(os.Stderr, "usage: qwb <in> <out>")
-		os.Exit(1)
+	if len(os.Args) < 3 {
+		fmt.Println("usage: qwb <src> <out> [-x|-y]")
+		return
 	}
 	src, out := os.Args[1], os.Args[2]
-
-	if entries, err := os.ReadDir(out); err == nil && len(entries) > 0 {
-		fmt.Fprintf(os.Stderr, "qwb: %s is not empty, overwrite? [y/N] ", out)
-		s, _ := bufio.NewReader(os.Stdin).ReadString('\n')
-		if strings.TrimSpace(strings.ToLower(s)) != "y" {
-			fmt.Fprintln(os.Stderr, "aborted")
-			os.Exit(1)
-		}
-		if err := os.RemoveAll(out); err != nil {
-			fmt.Fprintln(os.Stderr, err)
-			os.Exit(1)
+	mode := "x"
+	if len(os.Args) == 4 {
+		switch os.Args[3] {
+		case "-x":
+			mode = "x"
+		case "-y":
+			mode = "y"
+		default:
+			fmt.Println("usage: qwb <src> <out> [-x|-y]")
+			return
 		}
 	}
 
-	cfg := loadconfig(src)
-	tmpl := loadtemplate(cfg)
-	rootsec, subs := collectsections(src, cfg.headertext)
+	cfg := loadcfg(src)
 
-	err := filepath.WalkDir(src, func(path string, d os.DirEntry, err error) error {
-		if err != nil {
-			return err
-		}
-		rel, _ := filepath.Rel(src, path)
-		outpath := filepath.Join(out, rel)
-		if d.IsDir() {
-			return os.MkdirAll(outpath, 0755)
-		}
-		if !strings.HasSuffix(path, ".md") {
-			return copyfile(path, outpath)
-		}
-		body, err := mdtohtml(path)
-		if err != nil {
-			return err
-		}
-		body = fixlinks(body)
-		pageTitle, body := extracth1(body)
-		if pageTitle == "" {
-			pageTitle = cfg.headertext
-		}
-		cur := "/" + strings.TrimSuffix(rel, ".md") + ".html"
-		title := cfg.headertext
-		if filepath.Base(path) != "index.md" {
-			title = cfg.headertext + " | " + pageTitle
-		}
-		pg := strings.ReplaceAll(tmpl, "{{TITLE}}", title)
-		pg = strings.ReplaceAll(pg, "{{SITE_TITLE}}", pageTitle)
-		pg = strings.ReplaceAll(pg, "{{FOOTER_TEXT}}", cfg.footertext)
-		pg = strings.ReplaceAll(pg, "{{CSS}}", cfg.cssfile)
-		pg = strings.ReplaceAll(pg, "{{NAV}}", buildnav(rootsec, subs, cur))
-		pg = strings.ReplaceAll(pg, "{{CONTENT}}", body)
-		outpath = strings.TrimSuffix(outpath, ".md") + ".html"
-		return os.WriteFile(outpath, []byte(pg), 0644)
-	})
-
+	sects, err := scansrc(src)
 	if err != nil {
-		fmt.Fprintln(os.Stderr, err)
+		fmt.Fprintln(os.Stderr, "scan:", err)
 		os.Exit(1)
 	}
-}
\ No newline at end of file
+
+	var process func(dir, rel string)
+	process = func(dir, rel string) {
+		entries, err := os.ReadDir(dir)
+		if err != nil {
+			return
+		}
+		for _, e := range entries {
+			name := e.Name()
+			childRel := filepath.Join(rel, name)
+			childAbs := filepath.Join(dir, name)
+			if e.IsDir() {
+				process(childAbs, childRel)
+				continue
+			}
+			if !strings.HasSuffix(name, ".md") {
+
+				dst := filepath.Join(out, childRel)
+				_ = os.MkdirAll(filepath.Dir(dst), 0755)
+				if data, err := os.ReadFile(childAbs); err == nil {
+					_ = os.WriteFile(dst, data, 0644)
+				}
+				continue
+			}
+			htmlRel := strings.TrimSuffix(childRel, ".md") + ".html"
+			href := "/" + filepath.ToSlash(htmlRel)
+
+			var nav string
+			if mode == "y" {
+				nav = navY(sects, href)
+			} else {
+				nav = navX(sects, href)
+			}
+
+			pageTitle := mdtitle(childAbs)
+			if pageTitle == "" {
+				pageTitle = gentitle(name)
+			}
+
+			content, err := md2html(childAbs)
+			if err != nil {
+				fmt.Fprintf(os.Stderr, "md2html %s: %v\n", childAbs, err)
+				content = "<p>render error</p>"
+			}
+
+			html := render(tpldefault, pageTitle, nav, content, cfg)
+			outpath := filepath.Join(out, htmlRel)
+			if err := writepage(outpath, html); err != nil {
+				fmt.Fprintf(os.Stderr, "write %s: %v\n", outpath, err)
+			} else {
+				fmt.Println("→", outpath)
+			}
+		}
+	}
+
+	process(src, "")
+}