]> Sergey Matveev's repositories - sgblog.git/blobdiff - cmd/sgblog/http.go
Proper XML header
[sgblog.git] / cmd / sgblog / http.go
index 968555826bba8621a28e59578b0b859e6b626359..527f51eacec80cfc7aad4e1286603b4153691bda 100644 (file)
@@ -1,6 +1,6 @@
 /*
-SGBlog -- Git-based CGI blogging engine
-Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
+SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
+Copyright (C) 2020-2022 Sergey Matveev <stargrave@stargrave.org>
 
 This program is free software: you can redistribute it and/or modify
 it under the terms of the GNU Affero General Public License as
@@ -15,56 +15,82 @@ You should have received a copy of the GNU Affero General Public License
 along with this program.  If not, see <http://www.gnu.org/licenses/>.
 */
 
-// Git-based CGI blogging engine
 package main
 
 import (
        "bytes"
        "compress/gzip"
+       "crypto/sha1"
+       _ "embed"
        "encoding/hex"
-       "encoding/json"
        "encoding/xml"
        "errors"
        "fmt"
        "hash"
        "html"
        "io"
-       "io/ioutil"
        "log"
+       "net/http"
        "net/url"
        "os"
        "strconv"
        "strings"
+       "text/template"
        "time"
 
-       "github.com/hjson/hjson-go"
+       "github.com/go-git/go-git/v5"
+       "github.com/go-git/go-git/v5/plumbing"
+       "github.com/go-git/go-git/v5/plumbing/object"
        "go.stargrave.org/sgblog"
+       "go.stargrave.org/sgblog/cmd/sgblog/atom"
        "golang.org/x/crypto/blake2b"
-       "golang.org/x/tools/blog/atom"
-       "gopkg.in/src-d/go-git.v4"
-       "gopkg.in/src-d/go-git.v4/plumbing"
-       "gopkg.in/src-d/go-git.v4/plumbing/object"
 )
 
 const (
-       AtomFeed = "feed.atom"
+       AtomPostsFeed    = "feed.atom"
+       AtomCommentsFeed = "comments.atom"
 )
 
 var (
-       defaultLinks = []string{}
-
        renderableSchemes = map[string]struct{}{
-               "ftp":    struct{}{},
-               "gopher": struct{}{},
-               "http":   struct{}{},
-               "https":  struct{}{},
-               "telnet": struct{}{},
+               "finger": {},
+               "ftp":    {},
+               "gemini": {},
+               "gopher": {},
+               "http":   {},
+               "https":  {},
+               "irc":    {},
+               "ircs":   {},
+               "news":   {},
+               "telnet": {},
        }
+
+       //go:embed http-index.tmpl
+       TmplHTMLIndexRaw string
+       TmplHTMLIndex    = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
+
+       //go:embed http-entry.tmpl
+       TmplHTMLEntryRaw string
+       TmplHTMLEntry    = template.Must(template.New("http-entry").Funcs(
+               template.FuncMap{"lineURLize": lineURLizeInTemplate},
+       ).Parse(TmplHTMLEntryRaw))
 )
 
 type TableEntry struct {
-       commit      *object.Commit
-       commentsRaw []byte
+       Commit      *object.Commit
+       CommentsRaw []byte
+       TopicsRaw   []byte
+       Num         int
+       Title       string
+       LinesNum    int
+       CommentsNum int
+       DomainURLs  []string
+       Topics      []string
+}
+
+type CommentEntry struct {
+       HeaderLines []string
+       BodyLines   []string
 }
 
 func makeA(href, text string) string {
@@ -91,16 +117,18 @@ func lineURLize(urlPrefix, line string) string {
                        cols[i] = makeA(col, col)
                        continue
                }
-               cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
-                       urlPrefix+"/$1", "$1",
-               ))
+               cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
        }
        return strings.Join(cols, " ")
 }
 
+func lineURLizeInTemplate(urlPrefix, line interface{}) string {
+       return lineURLize(urlPrefix.(string), line.(string))
+}
+
 func startHeader(etag hash.Hash, gziped bool) string {
        lines := []string{
-               "Content-Type: text/html; charset=UTF-8",
+               "Content-Type: text/html; charset=utf-8",
                "ETag: " + etagString(etag),
        }
        if gziped {
@@ -111,25 +139,11 @@ func startHeader(etag hash.Hash, gziped bool) string {
        return strings.Join(lines, "\n")
 }
 
-func startHTML(title string, additional []string) string {
-       return fmt.Sprintf(`<html>
-<head>
-       <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
-       <meta name="generator" content="SGBlog %s">
-       <title>%s</title>
-       %s
-</head>
-<body>
-`,
-               sgblog.Version, title,
-               strings.Join(append(defaultLinks, additional...), "\n   "),
-       )
-}
-
-func makeErr(err error) {
-       fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
+func makeErr(err error, status int) {
+       fmt.Println("Status:", status)
+       fmt.Print("Content-Type: text/plain; charset=utf-8\n\n")
        fmt.Println(err)
-       panic(err)
+       log.Fatalln(err)
 }
 
 func checkETag(etag hash.Hash) {
@@ -140,80 +154,69 @@ func checkETag(etag hash.Hash) {
        }
 }
 
+func bytes2uuid(b []byte) string {
+       raw := new([16]byte)
+       copy(raw[:], b)
+       raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
+       return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
+}
+
+type CommitIterNext interface {
+       Next() (*object.Commit, error)
+}
+
 func serveHTTP() {
        cfgPath := os.Getenv("SGBLOG_CFG")
        if cfgPath == "" {
                log.Fatalln("SGBLOG_CFG is not set")
        }
-       cfgRaw, err := ioutil.ReadFile(cfgPath)
-       if err != nil {
-               makeErr(err)
-       }
-       var cfgGeneral map[string]interface{}
-       if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
-               makeErr(err)
-       }
-       cfgRaw, err = json.Marshal(cfgGeneral)
+       cfg, err := readCfg(cfgPath)
        if err != nil {
-               makeErr(err)
+               log.Fatalln(err)
        }
-       var cfg *Cfg
-       if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
-               makeErr(err)
-       }
-       pathInfo, exists := os.LookupEnv("PATH_INFO")
-       if !exists {
+
+       pathInfo := os.Getenv("PATH_INFO")
+       if len(pathInfo) == 0 {
                pathInfo = "/"
        }
        queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
        if err != nil {
-               makeErr(err)
+               makeErr(err, http.StatusBadRequest)
        }
 
        etagHash, err := blake2b.New256(nil)
        if err != nil {
                panic(err)
        }
-       etagHash.Write([]byte("SGBLOG"))
-       etagHash.Write([]byte(sgblog.Version))
-       etagHash.Write([]byte(cfg.GitPath))
-       etagHash.Write([]byte(cfg.Branch))
-       etagHash.Write([]byte(cfg.Title))
-       etagHash.Write([]byte(cfg.URLPrefix))
-       etagHash.Write([]byte(cfg.AtomBaseURL))
-       etagHash.Write([]byte(cfg.AtomId))
-       etagHash.Write([]byte(cfg.AtomAuthor))
-
-       etagHashForWeb := [][]byte{}
-       if cfg.CSS != "" {
-               defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
-               etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
-       }
-       if cfg.Webmaster != "" {
-               defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
-               etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
+       for _, s := range []string{
+               "SGBLOG",
+               sgblog.Version,
+               cfg.GitPath,
+               cfg.Branch,
+               cfg.Title,
+               cfg.URLPrefix,
+               cfg.AtomBaseURL,
+               cfg.AtomId,
+               cfg.AtomAuthor,
+       } {
+               if _, err = etagHash.Write([]byte(s)); err != nil {
+                       panic(err)
+               }
        }
-       if cfg.AboutURL != "" {
-               etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
+       etagHashForWeb := []string{
+               cfg.CSS,
+               cfg.Webmaster,
+               cfg.AboutURL,
+               cfg.CommentsNotesRef,
+               cfg.CommentsEmail,
        }
        for _, gitURL := range cfg.GitURLs {
-               defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
-               etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
-       }
-       if cfg.CommentsNotesRef != "" {
-               etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
+               etagHashForWeb = append(etagHashForWeb, gitURL)
        }
-       if cfg.CommentsEmail != "" {
-               etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
-       }
-
-       defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
-       atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
-       defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
 
        headHash, err := initRepo(cfg)
        if err != nil {
-               makeErr(err)
+               makeErr(err, http.StatusInternalServerError)
        }
 
        if notes, err := repo.Notes(); err == nil {
@@ -257,276 +260,531 @@ func serveHTTP() {
                if offsetRaw, exists := queryValues["offset"]; exists {
                        offset, err = strconv.Atoi(offsetRaw[0])
                        if err != nil {
-                               makeErr(err)
+                               makeErr(err, http.StatusBadRequest)
                        }
                }
                repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
                if err != nil {
-                       makeErr(err)
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               topicsCache, err := getTopicsCache(cfg, repoLog)
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
                }
+
                commitN := 0
-               for i := 0; i < offset; i++ {
-                       if _, err = repoLog.Next(); err != nil {
-                               break
+               var commits CommitIterNext
+               var topic string
+               if t, exists := queryValues["topic"]; exists {
+                       topic = t[0]
+                       hashes := topicsCache[topic]
+                       if hashes == nil {
+                               makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
                        }
-                       commitN++
+                       if len(hashes) > offset {
+                               hashes = hashes[offset:]
+                               commitN += offset
+                       }
+                       commits = &HashesIter{hashes}
+               } else {
+                       for i := 0; i < offset; i++ {
+                               if _, err = repoLog.Next(); err != nil {
+                                       break
+                               }
+                               commitN++
+                       }
+                       commits = repoLog
                }
 
                entries := make([]TableEntry, 0, PageEntries)
                logEnded := false
                for _, data := range etagHashForWeb {
-                       etagHash.Write(data)
+                       etagHash.Write([]byte(data))
                }
                etagHash.Write([]byte("INDEX"))
+               etagHash.Write([]byte(topic))
                for i := 0; i < PageEntries; i++ {
-                       commit, err := repoLog.Next()
+                       commit, err := commits.Next()
                        if err != nil {
                                logEnded = true
                                break
                        }
                        etagHash.Write(commit.Hash[:])
-                       commentsRaw := getNote(commentsTree, commit.Hash)
+                       commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
                        etagHash.Write(commentsRaw)
-                       entries = append(entries, TableEntry{commit, commentsRaw})
+                       topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
+                       etagHash.Write(topicsRaw)
+                       entries = append(entries, TableEntry{
+                               Commit:      commit,
+                               CommentsRaw: commentsRaw,
+                               TopicsRaw:   topicsRaw,
+                       })
                }
                checkETag(etagHash)
 
-               var table bytes.Buffer
-               table.WriteString(
-                       "<table border=1>\n" +
-                               "<caption>Comments</caption>\n<tr>" +
-                               "<th>N</th>" +
-                               "<th>When</th>" +
-                               "<th>Title</th>" +
-                               `<th size="5%"><a title="Lines">L</a></th>` +
-                               `<th size="5%"><a title="Comments">C</a></th>` +
-                               "<th>Linked to</th></tr>\n")
-               var yearPrev int
-               var monthPrev time.Month
-               var dayPrev int
-               for _, entry := range entries {
-                       yearCur, monthCur, dayCur := entry.commit.Author.When.Date()
-                       if dayCur != dayPrev || monthCur != monthPrev || yearCur != yearPrev {
-                               table.WriteString(fmt.Sprintf(
-                                       "<tr><td colspan=6><center><tt>%04d-%02d-%02d</tt></center></td></tr>\n",
-                                       yearCur, monthCur, dayCur,
-                               ))
-                               yearPrev, monthPrev, dayPrev = yearCur, monthCur, dayCur
-                       }
+               for i, entry := range entries {
                        commitN++
-                       lines := msgSplit(entry.commit.Message)
-                       domains := []string{}
+                       entry.Num = commitN
+                       lines := msgSplit(entry.Commit.Message)
+                       entry.Title = lines[0]
+                       entry.LinesNum = len(lines) - 2
                        for _, line := range lines[2:] {
-                               if u := urlParse(line); u == nil {
+                               u := urlParse(line)
+                               if u == nil {
                                        break
-                               } else {
-                                       domains = append(domains, makeA(line, u.Host))
                                }
+                               entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
                        }
-                       var commentsValue string
-                       if l := len(parseComments(entry.commentsRaw)); l > 0 {
-                               commentsValue = strconv.Itoa(l)
-                       } else {
-                               commentsValue = "&nbsp;"
-                       }
-                       table.WriteString(fmt.Sprintf(
-                               "<tr><td>%d</td><td><tt>%02d:%02d</tt></td>"+
-                                       "<td>%s</td>"+
-                                       "<td>%d</td><td>%s</td>"+
-                                       "<td>%s</td></tr>\n",
-                               commitN,
-                               entry.commit.Author.When.Hour(),
-                               entry.commit.Author.When.Minute(),
-                               makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
-                               len(lines)-2,
-                               commentsValue,
-                               strings.Join(domains, " "),
-                       ))
+                       entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
+                       entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
+                       entries[i] = entry
                }
-               table.WriteString("</table>")
-
-               var href string
-               var links []string
-               var refs bytes.Buffer
-               if offset > 0 {
-                       if offsetPrev := offset - PageEntries; offsetPrev > 0 {
-                               href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
-                       } else {
-                               href = cfg.URLPrefix + "/"
+               offsetPrev := offset - PageEntries
+               if offsetPrev < 0 {
+                       offsetPrev = 0
+               }
+               os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
+               err = TmplHTMLIndex.Execute(out, struct {
+                       Version          string
+                       Cfg              *Cfg
+                       Topic            string
+                       TopicsEnabled    bool
+                       Topics           []string
+                       CommentsEnabled  bool
+                       AtomPostsFeed    string
+                       AtomCommentsFeed string
+                       Offset           int
+                       OffsetPrev       int
+                       OffsetNext       int
+                       LogEnded         bool
+                       Entries          []TableEntry
+               }{
+                       Version:          sgblog.Version,
+                       Cfg:              cfg,
+                       Topic:            topic,
+                       TopicsEnabled:    topicsTree != nil,
+                       Topics:           topicsCache.Topics(),
+                       CommentsEnabled:  commentsTree != nil,
+                       AtomPostsFeed:    AtomPostsFeed,
+                       AtomCommentsFeed: AtomCommentsFeed,
+                       Offset:           offset,
+                       OffsetPrev:       offsetPrev,
+                       OffsetNext:       offset + PageEntries,
+                       LogEnded:         logEnded,
+                       Entries:          entries,
+               })
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+       } else if pathInfo == "/twtxt.txt" {
+               commit, err := repo.CommitObject(*headHash)
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               etagHash.Write([]byte("TWTXT POSTS"))
+               etagHash.Write(commit.Hash[:])
+               checkETag(etagHash)
+               repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               for i := 0; i < PageEntries; i++ {
+                       commit, err = repoLog.Next()
+                       if err != nil {
+                               break
                        }
-                       links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
-                       refs.WriteString("\n" + makeA(href, "[prev]"))
+                       fmt.Fprintf(
+                               out, "%s\t%s\n",
+                               commit.Author.When.Format(time.RFC3339),
+                               msgSplit(commit.Message)[0],
+                       )
                }
-               if !logEnded {
-                       href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
-                       links = append(links, `<link rel="next" href="`+href+`" title="older">`)
-                       refs.WriteString("\n" + makeA(href, "[next]"))
+               os.Stdout.WriteString("Content-Type: text/plain; charset=utf-8\n")
+               os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
+               if gzipWriter != nil {
+                       os.Stdout.WriteString("Content-Encoding: gzip\n")
+                       gzipWriter.Close()
                }
-
-               os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
-               out.Write([]byte(startHTML(
-                       fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
-                       links,
-               )))
-               if cfg.AboutURL != "" {
-                       out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
-               }
-               out.Write(refs.Bytes())
-               out.Write(table.Bytes())
-               out.Write(refs.Bytes())
-               out.Write([]byte("\n"))
-       } else if pathInfo == "/"+AtomFeed {
+               os.Stdout.WriteString("\n")
+               os.Stdout.Write(outBuf.Bytes())
+               return
+       } else if pathInfo == "/"+AtomPostsFeed {
                commit, err := repo.CommitObject(*headHash)
                if err != nil {
-                       makeErr(err)
+                       makeErr(err, http.StatusInternalServerError)
+               }
+
+               var topic string
+               if t, exists := queryValues["topic"]; exists {
+                       topic = t[0]
                }
-               etagHash.Write([]byte("ATOM"))
+
+               etagHash.Write([]byte("ATOM POSTS"))
+               etagHash.Write([]byte(topic))
                etagHash.Write(commit.Hash[:])
                checkETag(etagHash)
+               var title string
+               if topic == "" {
+                       title = cfg.Title
+               } else {
+                       title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
+               }
+               idHasher, err := blake2b.New256(nil)
+               if err != nil {
+                       panic(err)
+               }
+               idHasher.Write([]byte("ATOM POSTS"))
+               idHasher.Write([]byte(cfg.AtomId))
+               idHasher.Write([]byte(topic))
                feed := atom.Feed{
-                       Title:   cfg.Title,
-                       ID:      cfg.AtomId,
+                       Title:   title,
+                       ID:      "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
                        Updated: atom.Time(commit.Author.When),
                        Link: []atom.Link{{
                                Rel:  "self",
-                               Href: atomURL,
+                               Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
                        }},
                        Author: &atom.Person{Name: cfg.AtomAuthor},
                }
+
                repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
                if err != nil {
-                       makeErr(err)
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               var commits CommitIterNext
+               if topic == "" {
+                       commits = repoLog
+               } else {
+                       topicsCache, err := getTopicsCache(cfg, repoLog)
+                       if err != nil {
+                               makeErr(err, http.StatusInternalServerError)
+                       }
+                       hashes := topicsCache[topic]
+                       if hashes == nil {
+                               makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
+                       }
+                       commits = &HashesIter{hashes}
                }
+
                for i := 0; i < PageEntries; i++ {
-                       commit, err = repoLog.Next()
+                       commit, err = commits.Next()
                        if err != nil {
                                break
                        }
-
-                       feedIdRaw := new([16]byte)
-                       copy(feedIdRaw[:], commit.Hash[:])
-                       feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
-                       feedId := fmt.Sprintf(
-                               "%x-%x-%x-%x-%x",
-                               feedIdRaw[0:4],
-                               feedIdRaw[4:6],
-                               feedIdRaw[6:8],
-                               feedIdRaw[8:10],
-                               feedIdRaw[10:],
-                       )
-
                        lines := msgSplit(commit.Message)
+                       var categories []atom.Category
+                       for _, topic := range sgblog.ParseTopics(sgblog.GetNote(
+                               repo, topicsTree, commit.Hash,
+                       )) {
+                               categories = append(categories, atom.Category{Term: topic})
+                       }
+                       htmlized := make([]string, 0, len(lines))
+                       htmlized = append(htmlized, "<pre>")
+                       for _, l := range lines[2:] {
+                               htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
+                       }
+                       htmlized = append(htmlized, "</pre>")
                        feed.Entry = append(feed.Entry, &atom.Entry{
                                Title: lines[0],
-                               ID:    "urn:uuid:" + feedId,
+                               ID:    "urn:uuid:" + bytes2uuid(commit.Hash[:]),
                                Link: []atom.Link{{
                                        Rel:  "alternate",
                                        Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
                                }},
                                Published: atom.Time(commit.Author.When),
                                Updated:   atom.Time(commit.Author.When),
-                               Summary: &atom.Text{
-                                       Type: "text",
-                                       Body: lines[0],
-                               },
+                               Summary:   &atom.Text{Type: "text", Body: lines[0]},
                                Content: &atom.Text{
-                                       Type: "text",
-                                       Body: strings.Join(lines[2:], "\n"),
+                                       Type: "html",
+                                       Body: strings.Join(htmlized, "\n"),
                                },
+                               Category: categories,
                        })
                }
                data, err := xml.MarshalIndent(&feed, "", "  ")
                if err != nil {
-                       makeErr(err)
+                       makeErr(err, http.StatusInternalServerError)
                }
+               out.Write([]byte(xml.Header))
                out.Write(data)
-               os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
-               os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
-               if gzipWriter != nil {
-                       os.Stdout.WriteString("Content-Encoding: gzip\n")
-                       gzipWriter.Close()
+               goto AtomFinish
+       } else if pathInfo == "/"+AtomCommentsFeed {
+               commit, err := repo.CommitObject(commentsRef.Hash())
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
                }
-               os.Stdout.WriteString("\n")
-               os.Stdout.Write(outBuf.Bytes())
-               return
+               etagHash.Write([]byte("ATOM COMMENTS"))
+               etagHash.Write(commit.Hash[:])
+               checkETag(etagHash)
+               idHasher, err := blake2b.New256(nil)
+               if err != nil {
+                       panic(err)
+               }
+               idHasher.Write([]byte("ATOM COMMENTS"))
+               idHasher.Write([]byte(cfg.AtomId))
+               feed := atom.Feed{
+                       Title:   cfg.Title + " comments",
+                       ID:      "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
+                       Updated: atom.Time(commit.Author.When),
+                       Link: []atom.Link{{
+                               Rel:  "self",
+                               Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
+                       }},
+                       Author: &atom.Person{Name: cfg.AtomAuthor},
+               }
+               repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               for i := 0; i < PageEntries; i++ {
+                       commit, err = repoLog.Next()
+                       if err != nil {
+                               break
+                       }
+                       fileStats, err := commit.Stats()
+                       if err != nil {
+                               makeErr(err, http.StatusInternalServerError)
+                       }
+                       t, err := commit.Tree()
+                       if err != nil {
+                               makeErr(err, http.StatusInternalServerError)
+                       }
+                       commentedHash := plumbing.NewHash(strings.ReplaceAll(
+                               fileStats[0].Name, "/", "",
+                       ))
+                       commit, err = repo.CommitObject(commentedHash)
+                       if err != nil {
+                               continue
+                       }
+                       comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
+                       if len(comments) == 0 {
+                               continue
+                       }
+                       commentN := strconv.Itoa(len(comments) - 1)
+                       lines := strings.Split(comments[len(comments)-1], "\n")
+                       from := strings.TrimPrefix(lines[0], "From: ")
+                       date := strings.TrimPrefix(lines[1], "Date: ")
+                       htmlized := make([]string, 0, len(lines))
+                       htmlized = append(htmlized, "<pre>")
+                       for _, l := range lines[2:] {
+                               htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
+                       }
+                       htmlized = append(htmlized, "</pre>")
+                       idHasher.Reset()
+                       idHasher.Write([]byte("COMMENT"))
+                       idHasher.Write(commit.Hash[:])
+                       idHasher.Write([]byte(commentN))
+                       feed.Entry = append(feed.Entry, &atom.Entry{
+                               Title: fmt.Sprintf(
+                                       "Comment %s for \"%s\" by %s",
+                                       commentN, msgSplit(commit.Message)[0], from,
+                               ),
+                               Author: &atom.Person{Name: from},
+                               ID:     "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
+                               Link: []atom.Link{{
+                                       Rel: "alternate",
+                                       Href: strings.Join([]string{
+                                               cfg.AtomBaseURL, cfg.URLPrefix, "/",
+                                               commit.Hash.String(), "#comment", commentN,
+                                       }, ""),
+                               }},
+                               Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
+                               Updated:   atom.TimeStr(strings.Replace(date, " ", "T", -1)),
+                               Content: &atom.Text{
+                                       Type: "html",
+                                       Body: strings.Join(htmlized, "\n"),
+                               },
+                       })
+               }
+               data, err := xml.MarshalIndent(&feed, "", "  ")
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
+               }
+               out.Write([]byte(xml.Header))
+               out.Write(data)
+               goto AtomFinish
        } else if sha1DigestRe.MatchString(pathInfo[1:]) {
-               commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
+               commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
                if err != nil {
-                       makeErr(err)
+                       makeErr(err, http.StatusBadRequest)
                }
                for _, data := range etagHashForWeb {
-                       etagHash.Write(data)
+                       etagHash.Write([]byte(data))
                }
                etagHash.Write([]byte("ENTRY"))
                etagHash.Write(commit.Hash[:])
-               notesRaw := getNote(notesTree, commit.Hash)
-               etagHash.Write(notesRaw)
-               commentsRaw := getNote(commentsTree, commit.Hash)
+               atomCommentsURL := strings.Join([]string{
+                       cfg.AtomBaseURL, cfg.URLPrefix, "/",
+                       commit.Hash.String(), "/", AtomCommentsFeed,
+               }, "")
+               commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
                etagHash.Write(commentsRaw)
+               topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
+               etagHash.Write(topicsRaw)
+               if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
+                       etagHash.Write([]byte("ATOM COMMENTS"))
+                       checkETag(etagHash)
+                       type Comment struct {
+                               n    string
+                               from string
+                               date string
+                               body []string
+                       }
+                       commentsRaw := sgblog.ParseComments(commentsRaw)
+                       var toSkip int
+                       if len(commentsRaw) > PageEntries {
+                               toSkip = len(commentsRaw) - PageEntries
+                       }
+                       comments := make([]Comment, 0, len(commentsRaw)-toSkip)
+                       for i := len(commentsRaw) - 1; i >= toSkip; i-- {
+                               lines := strings.Split(commentsRaw[i], "\n")
+                               from := strings.TrimPrefix(lines[0], "From: ")
+                               date := strings.TrimPrefix(lines[1], "Date: ")
+                               comments = append(comments, Comment{
+                                       n:    strconv.Itoa(i),
+                                       from: from,
+                                       date: strings.Replace(date, " ", "T", 1),
+                                       body: lines[3:],
+                               })
+                       }
+                       idHasher, err := blake2b.New256(nil)
+                       if err != nil {
+                               panic(err)
+                       }
+                       idHasher.Write([]byte("ATOM COMMENTS"))
+                       idHasher.Write(commit.Hash[:])
+                       feed := atom.Feed{
+                               Title:  fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
+                               ID:     "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
+                               Link:   []atom.Link{{Rel: "self", Href: atomCommentsURL}},
+                               Author: &atom.Person{Name: cfg.AtomAuthor},
+                       }
+                       if len(comments) > 0 {
+                               feed.Updated = atom.TimeStr(comments[0].date)
+                       } else {
+                               feed.Updated = atom.Time(commit.Author.When)
+                       }
+                       for _, comment := range comments {
+                               idHasher.Reset()
+                               idHasher.Write([]byte("COMMENT"))
+                               idHasher.Write(commit.Hash[:])
+                               idHasher.Write([]byte(comment.n))
+                               htmlized := make([]string, 0, len(comment.body))
+                               htmlized = append(htmlized, "<pre>")
+                               for _, l := range comment.body {
+                                       htmlized = append(
+                                               htmlized,
+                                               lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
+                                       )
+                               }
+                               htmlized = append(htmlized, "</pre>")
+                               feed.Entry = append(feed.Entry, &atom.Entry{
+                                       Title:  fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
+                                       Author: &atom.Person{Name: comment.from},
+                                       ID:     "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
+                                       Link: []atom.Link{{
+                                               Rel: "alternate",
+                                               Href: strings.Join([]string{
+                                                       cfg.AtomBaseURL,
+                                                       cfg.URLPrefix, "/",
+                                                       commit.Hash.String(),
+                                                       "#comment", comment.n,
+                                               }, ""),
+                                       }},
+                                       Published: atom.TimeStr(
+                                               strings.Replace(comment.date, " ", "T", -1),
+                                       ),
+                                       Updated: atom.TimeStr(
+                                               strings.Replace(comment.date, " ", "T", -1),
+                                       ),
+                                       Content: &atom.Text{
+                                               Type: "html",
+                                               Body: strings.Join(htmlized, "\n"),
+                                       },
+                               })
+                       }
+                       data, err := xml.MarshalIndent(&feed, "", "  ")
+                       if err != nil {
+                               makeErr(err, http.StatusInternalServerError)
+                       }
+                       out.Write([]byte(xml.Header))
+                       out.Write(data)
+                       goto AtomFinish
+               }
+               notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
+               etagHash.Write(notesRaw)
                checkETag(etagHash)
+
                lines := msgSplit(commit.Message)
                title := lines[0]
                when := commit.Author.When.Format(sgblog.WhenFmt)
-               os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
-               links := []string{}
                var parent string
                if len(commit.ParentHashes) > 0 {
                        parent = commit.ParentHashes[0].String()
-                       links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
-               }
-               out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
-               if cfg.AboutURL != "" {
-                       out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
-               }
-               out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
-               if parent != "" {
-                       out.Write([]byte(fmt.Sprintf(
-                               "[%s]\n",
-                               makeA(cfg.URLPrefix+"/"+parent, "older"),
-                       )))
-               }
-               out.Write([]byte(fmt.Sprintf(
-                       "[<tt><a title=\"When\">%s</a></tt>]\n"+
-                               "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
-                               "<hr/>\n<h2>%s</h2>\n<pre>\n",
-                       when, commit.Hash.String(), title,
-               )))
-               for _, line := range lines[2:] {
-                       out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
-               }
-               out.Write([]byte("</pre>\n<hr/>\n"))
+               }
+               commentsParsed := sgblog.ParseComments(commentsRaw)
+               comments := make([]CommentEntry, 0, len(commentsParsed))
+               for _, comment := range commentsParsed {
+                       lines := strings.Split(comment, "\n")
+                       comments = append(comments, CommentEntry{lines[:3], lines[3:]})
+               }
+               var notesLines []string
                if len(notesRaw) > 0 {
-                       out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
-               }
-               if cfg.CommentsEmail != "" {
-                       out.Write([]byte("[" + makeA(
-                               "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
-                               "write comment",
-                       ) + "]\n"))
-               }
-               out.Write([]byte("<dl>\n"))
-               for i, comment := range parseComments(commentsRaw) {
-                       out.Write([]byte(fmt.Sprintf(
-                               "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
-                                       "</dt>\n<dd><pre>\n",
-                               i, i, i,
-                       )))
-                       lines = strings.Split(comment, "\n")
-                       for _, line := range lines[:3] {
-                               out.Write([]byte(line + "\n"))
-                       }
-                       for _, line := range lines[3:] {
-                               out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
-                       }
-                       out.Write([]byte("</pre></dd>\n"))
+                       notesLines = strings.Split(string(notesRaw), "\n")
+               }
+
+               os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
+               err = TmplHTMLEntry.Execute(out, struct {
+                       Version         string
+                       Cfg             *Cfg
+                       Title           string
+                       TitleEscaped    string
+                       When            string
+                       AtomCommentsURL string
+                       Parent          string
+                       Commit          *object.Commit
+                       Lines           []string
+                       NoteLines       []string
+                       Comments        []CommentEntry
+                       Topics          []string
+               }{
+                       Version:         sgblog.Version,
+                       Cfg:             cfg,
+                       Title:           title,
+                       TitleEscaped:    url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
+                       When:            when,
+                       AtomCommentsURL: atomCommentsURL,
+                       Parent:          parent,
+                       Commit:          commit,
+                       Lines:           lines[2:],
+                       NoteLines:       notesLines,
+                       Comments:        comments,
+                       Topics:          sgblog.ParseTopics(topicsRaw),
+               })
+               if err != nil {
+                       makeErr(err, http.StatusInternalServerError)
                }
-               out.Write([]byte("</dl>\n"))
        } else {
-               makeErr(errors.New("unknown URL action"))
+               makeErr(errors.New("unknown URL action"), http.StatusNotFound)
        }
        out.Write([]byte("</body></html>\n"))
        if gzipWriter != nil {
                gzipWriter.Close()
        }
        os.Stdout.Write(outBuf.Bytes())
+       return
+
+AtomFinish:
+       os.Stdout.WriteString("Content-Type: application/atom+xml; charset=utf-8\n")
+       os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
+       if gzipWriter != nil {
+               os.Stdout.WriteString("Content-Encoding: gzip\n")
+               gzipWriter.Close()
+       }
+       os.Stdout.WriteString("\n")
+       os.Stdout.Write(outBuf.Bytes())
 }