--- /dev/null
+/*
+SGBlog -- Git-based CGI blogging engine
+Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Affero General Public License as
+published by the Free Software Foundation, version 3 of the License.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU Affero General Public License
+along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+package main
+
+import (
+ "bytes"
+ "encoding/base64"
+ "errors"
+ "io"
+ "io/ioutil"
+ "mime"
+ "mime/multipart"
+ "mime/quotedprintable"
+ "net/mail"
+ "strings"
+)
+
+const (
+ CT = "Content-Type"
+ CTE = "Content-Transfer-Encoding"
+ TP = "text/plain"
+)
+
+func processTP(ct, cte string, body io.Reader) (io.Reader, error) {
+ _, params, err := mime.ParseMediaType(ct)
+ if err != nil {
+ return nil, err
+ }
+ if c := params["charset"]; !(c == "" || c == "utf-8" || c == "iso-8859-1" || c == "us-ascii") {
+ return nil, errors.New("only utf-8/iso-8859-1/us-ascii charsets supported")
+ }
+ switch cte {
+ case "quoted-printable":
+ return quotedprintable.NewReader(body), nil
+ case "base64":
+ return base64.NewDecoder(base64.StdEncoding, body), nil
+ }
+ return body, nil
+}
+
+func parseEmail(msg *mail.Message) (subj string, body io.Reader, err error) {
+ subj = msg.Header.Get("Subject")
+ if subj == "" {
+ err = errors.New("no Subject")
+ return
+ }
+ words := strings.Fields(subj)
+ for i, word := range words {
+ if strings.HasPrefix(word, "=?") && strings.HasSuffix(word, "?=") {
+ word, err = new(mime.WordDecoder).Decode(word)
+ if err != nil {
+ return
+ }
+ words[i] = word
+ }
+ }
+ subj = strings.Join(words, " ")
+
+ ct := msg.Header.Get(CT)
+ if ct == "" {
+ ct = "text/plain"
+ }
+ if strings.HasPrefix(ct, TP) {
+ body, err = processTP(ct, msg.Header.Get(CTE), msg.Body)
+ return
+ }
+ ct, params, err := mime.ParseMediaType(ct)
+ if ct != "multipart/signed" {
+ err = errors.New("only text/plain and multipart/signed+text/plain Content-Type supported")
+ return
+ }
+ boundary := params["boundary"]
+ if len(boundary) == 0 {
+ err = errors.New("no boundary string")
+ return
+ }
+ data, err := ioutil.ReadAll(msg.Body)
+ if err != nil {
+ return
+ }
+ boundaryIdx := bytes.Index(data, []byte("--"+boundary))
+ if boundaryIdx == -1 {
+ err = errors.New("no boundary found")
+ return
+ }
+ mpr := multipart.NewReader(bytes.NewReader(data[boundaryIdx:]), boundary)
+ var part *multipart.Part
+ for {
+ part, err = mpr.NextPart()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return
+ }
+ ct = part.Header.Get(CT)
+ if strings.HasPrefix(ct, TP) {
+ body, err = processTP(ct, part.Header.Get(CTE), part)
+ return
+ }
+ if strings.HasPrefix(ct, "multipart/mixed") {
+ ct, params, err = mime.ParseMediaType(ct)
+ boundary = params["boundary"]
+ if len(boundary) == 0 {
+ err = errors.New("no boundary string")
+ return
+ }
+ mpr := multipart.NewReader(part, boundary)
+ for {
+ part, err = mpr.NextPart()
+ if err != nil {
+ if err == io.EOF {
+ break
+ }
+ return
+ }
+ ct = part.Header.Get(CT)
+ if strings.HasPrefix(ct, TP) {
+ body, err = processTP(ct, part.Header.Get(CTE), part)
+ return
+ }
+ }
+ }
+ }
+ err = errors.New("no text/plain part found")
+ return
+}
"bytes"
"compress/gzip"
"encoding/hex"
+ "encoding/json"
"encoding/xml"
"errors"
"fmt"
"hash"
+ "html"
"io"
"io/ioutil"
+ "log"
"net/url"
"os"
"regexp"
"strconv"
"strings"
- "github.com/google/uuid"
+ "github.com/hjson/hjson-go"
+ "go.cypherpunks.ru/netstring/v2"
"golang.org/x/crypto/blake2b"
"golang.org/x/tools/blog/atom"
"gopkg.in/src-d/go-git.v4"
)
var (
- Version = "0.0.2"
- ETagVersion = []byte("2")
+ Version = "0.1.0"
sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
defaultLinks = []string{}
repo *git.Repository
commentsTree *object.Tree
renderableSchemes = map[string]struct{}{
- "http": struct{}{},
- "https": struct{}{},
"ftp": struct{}{},
"gopher": struct{}{},
+ "http": struct{}{},
+ "https": struct{}{},
}
)
+type TableEntry struct {
+ commit *object.Commit
+ commentsRaw []byte
+}
+
+type Cfg struct {
+ GitPath string
+ Branch string
+ Title string
+
+ BaseURL string
+ URLPrefix string
+
+ AtomId string
+ AtomAuthor string
+
+ CSS string
+ Webmaster string
+ AboutURL string
+ GitURLs []string
+
+ CommentsNotesRef string
+ CommentsEmail string
+}
+
func makeA(href, text string) string {
- return fmt.Sprintf(`<a href="%s">%s</a>`, href, text)
+ return `<a href="` + href + `">` + text + `</a>`
}
func etagString(etag hash.Hash) string {
return lines
}
-func getNote(what plumbing.Hash) string {
+func getCommentsRaw(what plumbing.Hash) []byte {
if commentsTree == nil {
- return ""
+ return nil
}
entry, err := commentsTree.FindEntry(what.String())
if err != nil {
- return ""
+ return nil
}
blob, err := repo.BlobObject(entry.Hash)
if err != nil {
- return ""
+ return nil
}
r, err := blob.Reader()
if err != nil {
- return ""
+ return nil
}
data, err := ioutil.ReadAll(r)
if err != nil {
- return ""
+ return nil
}
- return string(data)
+ return bytes.TrimSuffix(data, []byte{'\n'})
+}
+
+func parseComments(data []byte) []string {
+ comments := []string{}
+ nsr := netstring.NewReader(bytes.NewReader(data))
+ for {
+ if _, err := nsr.Next(); err != nil {
+ break
+ }
+ if comment, err := ioutil.ReadAll(nsr); err == nil {
+ comments = append(comments, string(comment))
+ }
+ }
+ return comments
}
func startHeader(etag hash.Hash, gziped bool) string {
}
func makeErr(err error) {
- fmt.Println("Content-Type: text/plain; charset=UTF-8\n")
+ fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
fmt.Println(err)
panic(err)
}
func checkETag(etag hash.Hash) {
ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
- fmt.Println("Status: 304\nETag:", ifNoneMatch, "\n")
+ fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
os.Exit(0)
}
}
func main() {
- gitPath, exists := os.LookupEnv("SGBLOG_GIT_PATH")
- if !exists {
- makeErr(errors.New("SGBLOG_GIT_PATH is unset"))
+ cfgPath := os.Getenv("SGBLOG_CFG")
+ if cfgPath == "" {
+ log.Fatalln("SGBLOG_CFG is not set")
}
- branchName, exists := os.LookupEnv("SGBLOG_BRANCH")
+ pathInfo, exists := os.LookupEnv("PATH_INFO")
if !exists {
- makeErr(errors.New("SGBLOG_BRANCH is unset"))
+ pathInfo = "/"
}
- blogBaseURL, exists := os.LookupEnv("SGBLOG_BASE_URL")
- if !exists {
- makeErr(errors.New("SGBLOG_BASE_URL is unset"))
+ queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
+ if err != nil {
+ makeErr(err)
}
- blogTitle, exists := os.LookupEnv("SGBLOG_TITLE")
- if !exists {
- makeErr(errors.New("SGBLOG_TITLE is unset"))
+
+ cfgRaw, err := ioutil.ReadFile(cfgPath)
+ if err != nil {
+ makeErr(err)
}
- atomId, exists := os.LookupEnv("SGBLOG_ATOM_ID")
- if !exists {
- makeErr(errors.New("SGBLOG_ATOM_ID is unset"))
+ var cfgGeneral map[string]interface{}
+ if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
+ makeErr(err)
}
- atomAuthorName, exists := os.LookupEnv("SGBLOG_ATOM_AUTHOR")
- if !exists {
- makeErr(errors.New("SGBLOG_ATOM_AUTHOR is unset"))
+ cfgRaw, err = json.Marshal(cfgGeneral)
+ if err != nil {
+ makeErr(err)
+ }
+ var cfg *Cfg
+ if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
+ makeErr(err)
}
etagHash, err := blake2b.New256(nil)
if err != nil {
panic(err)
}
- etagHash.Write(ETagVersion)
- etagHash.Write([]byte(gitPath))
- etagHash.Write([]byte(branchName))
- etagHash.Write([]byte(blogBaseURL))
- etagHash.Write([]byte(blogTitle))
- etagHash.Write([]byte(atomId))
- etagHash.Write([]byte(atomAuthorName))
-
- // SGBLOG_URL_PREFIX
- urlPrefix := os.Getenv("SGBLOG_URL_PREFIX")
- etagHash.Write([]byte(urlPrefix))
-
- // SGBLOG_CSS
- if cssUrl, exists := os.LookupEnv("SGBLOG_CSS"); exists {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="stylesheet" type="text/css" href="%s">`,
- cssUrl,
- ))
- etagHash.Write([]byte(cssUrl))
- }
-
- // SGBLOG_WEBMASTER
- if webmaster, exists := os.LookupEnv("SGBLOG_WEBMASTER"); exists {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rev="made" href="mailto:%s">`,
- webmaster,
- ))
- etagHash.Write([]byte(webmaster))
- }
-
- // SGBLOG_ABOUT
- aboutUrl := os.Getenv("SGBLOG_ABOUT")
- etagHash.Write([]byte(aboutUrl))
-
- // SGBLOG_GIT_URLS
- if gitUrls, exists := os.LookupEnv("SGBLOG_GIT_URLS"); exists {
- for _, gitUrl := range strings.Split(gitUrls, " ") {
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="vcs-git" href="%s" title="Git repository">`,
- gitUrl,
- ))
- }
- etagHash.Write([]byte(gitUrls))
- }
-
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="top" href="%s/" title="top">`,
- urlPrefix,
- ))
- atomUrl := blogBaseURL + urlPrefix + "/" + AtomFeed
- defaultLinks = append(defaultLinks, fmt.Sprintf(
- `<link rel="alternate" title="Atom feed" href="%s" type="application/atom+xml">`,
- atomUrl,
- ))
+ etagHash.Write([]byte("SGBLOG"))
+ etagHash.Write([]byte(cfg.GitPath))
+ etagHash.Write([]byte(cfg.Branch))
+ etagHash.Write([]byte(cfg.Title))
+ etagHash.Write([]byte(cfg.BaseURL))
+ etagHash.Write([]byte(cfg.URLPrefix))
+ etagHash.Write([]byte(cfg.AtomId))
+ etagHash.Write([]byte(cfg.AtomAuthor))
- pathInfo, exists := os.LookupEnv("PATH_INFO")
- if !exists {
- pathInfo = "/"
+ etagHashForWeb := [][]byte{}
+ if cfg.CSS != "" {
+ defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
}
- queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
- if err != nil {
- makeErr(err)
+ if cfg.Webmaster != "" {
+ defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
}
+ if cfg.AboutURL != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
+ }
+ for _, gitURL := range cfg.GitURLs {
+ defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
+ }
+ if cfg.CommentsNotesRef != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
+ }
+ if cfg.CommentsEmail != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
+ }
+
+ defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
+ atomURL := cfg.BaseURL + cfg.URLPrefix + "/" + AtomFeed
+ defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
- repo, err = git.PlainOpen(gitPath)
+ repo, err = git.PlainOpen(cfg.GitPath)
if err != nil {
makeErr(err)
}
- head, err := repo.Reference(plumbing.ReferenceName(branchName), false)
+ head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
if err != nil {
makeErr(err)
}
- if notes, err := repo.Notes(); err == nil {
- var comments *plumbing.Reference
- notes.ForEach(func(ref *plumbing.Reference) error {
- if ref.Name() == "refs/notes/commits" {
- comments = ref
- }
- return nil
- })
- if comments != nil {
- if commentsCommit, err := repo.CommitObject(comments.Hash()); err == nil {
- commentsTree, _ = commentsCommit.Tree()
+
+ if cfg.CommentsNotesRef != "" {
+ if notes, err := repo.Notes(); err == nil {
+ var comments *plumbing.Reference
+ notes.ForEach(func(ref *plumbing.Reference) error {
+ if string(ref.Name()) == cfg.CommentsNotesRef {
+ comments = ref
+ }
+ return nil
+ })
+ if comments != nil {
+ if commentsCommit, err := repo.CommitObject(comments.Hash()); err == nil {
+ commentsTree, _ = commentsCommit.Tree()
+ }
}
}
}
}
}
- var commit *object.Commit
if pathInfo == "/" {
offset := 0
if offsetRaw, exists := queryValues["offset"]; exists {
makeErr(err)
}
}
- var table bytes.Buffer
- table.WriteString("<table border=1>\n<tr><th>When</th><th>Title</th><th>Comment of</th></tr>\n")
log, err := repo.Log(&git.LogOptions{From: head.Hash()})
if err != nil {
makeErr(err)
}
- errOccured := false
+ commentN := 0
for i := 0; i < offset; i++ {
- commit, err = log.Next()
- if err != nil {
+ if _, err = log.Next(); err != nil {
break
}
+ commentN++
+ }
+
+ entries := make([]TableEntry, 0, PageEntries)
+ logEnded := false
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
}
+ etagHash.Write([]byte("INDEX"))
for i := 0; i < PageEntries; i++ {
- commit, err = log.Next()
+ commit, err := log.Next()
if err != nil {
- errOccured = true
+ logEnded = true
break
}
- if i == 0 {
- etagHash.Write(commit.Hash[:])
- checkETag(etagHash)
- }
- lines := msgSplit(commit.Message)
+ etagHash.Write(commit.Hash[:])
+ commentsRaw := getCommentsRaw(commit.Hash)
+ etagHash.Write(commentsRaw)
+ entries = append(entries, TableEntry{commit, commentsRaw})
+ }
+ checkETag(etagHash)
+
+ var table bytes.Buffer
+ table.WriteString(
+ "<table border=1>\n<tr>" +
+ "<th>N</th>" +
+ "<th>When</th>" +
+ "<th>Title</th>" +
+ "<th size=\"5%\">L</th>" +
+ "<th size=\"5%\">C</th>" +
+ "<th>Linked to</th></tr>\n")
+ for _, entry := range entries {
+ commentN++
+ lines := msgSplit(entry.commit.Message)
domains := []string{}
for _, line := range lines[2:] {
if u := urlParse(line); u == nil {
domains = append(domains, makeA(line, u.Host))
}
}
- entry := []string{
- makeA(urlPrefix+"/"+commit.Hash.String(), lines[0]),
- fmt.Sprintf("(%dL)", len(lines)-2),
- }
- if note := getNote(commit.Hash); note != "" {
- entry = append(entry, "(N)")
+ var commentsValue string
+ if l := len(parseComments(entry.commentsRaw)); l > 0 {
+ commentsValue = strconv.Itoa(l)
+ } else {
+ commentsValue = " "
}
table.WriteString(fmt.Sprintf(
- "<tr><td><tt>%s</tt></td><td>%s</td><td>%s</td></tr>\n",
- commit.Author.When.Format(WhenFmt),
- strings.Join(entry, " "),
+ "<tr><td>%d</td><td><tt>%s</tt></td>"+
+ "<td>%s</td>"+
+ "<td>%d</td><td>%s</td>"+
+ "<td>%s</td></tr>\n",
+ commentN, entry.commit.Author.When.Format(WhenFmt),
+ makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
+ len(lines)-2,
+ commentsValue,
strings.Join(domains, " "),
))
}
table.WriteString("</table>")
+
+ var href string
var links []string
var refs bytes.Buffer
if offset > 0 {
- offsetPrev := offset - PageEntries
- if offsetPrev < 0 {
- offsetPrev = 0
+ if offsetPrev := offset - PageEntries; offsetPrev > 0 {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
+ } else {
+ href = cfg.URLPrefix + "/"
}
- href := urlPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
- links = append(links, fmt.Sprintf(
- `<link rel="prev" href="%s" title="newer">`, href,
- ))
- refs.WriteString(makeA(href, "[prev]"))
+ links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
+ refs.WriteString(makeA(href, " [prev]"))
}
- if !errOccured {
- href := urlPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
- links = append(links, fmt.Sprintf(
- `<link rel="next" href="%s" title="older">`, href,
- ))
- refs.WriteString(makeA(href, "[next]"))
+ if !logEnded {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
+ links = append(links, `<link rel="next" href="`+href+`" title="older">`)
+ refs.WriteString(makeA(href, " [next]"))
}
+
os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
out.Write([]byte(startHTML(
- fmt.Sprintf("%s (%d-%d)", blogTitle, offset, offset+PageEntries),
+ fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
links,
)))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
+ }
out.Write(refs.Bytes())
out.Write(table.Bytes())
out.Write(refs.Bytes())
out.Write([]byte("\n"))
} else if pathInfo == "/"+AtomFeed {
- commit, err = repo.CommitObject(head.Hash())
+ commit, err := repo.CommitObject(head.Hash())
if err != nil {
makeErr(err)
}
- etagHash.Write(commit.Hash[:])
etagHash.Write([]byte("ATOM"))
+ etagHash.Write(commit.Hash[:])
checkETag(etagHash)
feed := atom.Feed{
- Title: blogTitle,
- ID: atomId,
+ Title: cfg.Title,
+ ID: cfg.AtomId,
Updated: atom.Time(commit.Author.When),
Link: []atom.Link{{
Rel: "self",
- Href: atomUrl,
+ Href: atomURL,
}},
- Author: &atom.Person{Name: atomAuthorName},
+ Author: &atom.Person{Name: cfg.AtomAuthor},
}
log, err := repo.Log(&git.LogOptions{From: head.Hash()})
if err != nil {
if err != nil {
break
}
+
+ feedIdRaw := new([16]byte)
+ copy(feedIdRaw[:], commit.Hash[:])
+ feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
+ feedId := fmt.Sprintf(
+ "%x-%x-%x-%x-%x",
+ feedIdRaw[0:4],
+ feedIdRaw[4:6],
+ feedIdRaw[6:8],
+ feedIdRaw[8:10],
+ feedIdRaw[10:],
+ )
+
lines := msgSplit(commit.Message)
- feedId, err := uuid.FromBytes(commit.Hash[:16])
- if err != nil {
- panic(err)
- }
feed.Entry = append(feed.Entry, &atom.Entry{
Title: lines[0],
- ID: "urn:uuid:" + feedId.String(),
+ ID: "urn:uuid:" + feedId,
Link: []atom.Link{{
Rel: "alternate",
- Href: blogBaseURL + urlPrefix + "/" + commit.Hash.String(),
+ Href: cfg.BaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
}},
Published: atom.Time(commit.Author.When),
Updated: atom.Time(commit.Author.When),
os.Stdout.Write(outBuf.Bytes())
return
} else if sha1DigestRe.MatchString(pathInfo[1:]) {
- commit, err = repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
+ commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
if err != nil {
makeErr(err)
}
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
+ }
+ etagHash.Write([]byte("ENTRY"))
etagHash.Write(commit.Hash[:])
+ commentsRaw := getCommentsRaw(commit.Hash)
+ etagHash.Write(commentsRaw)
checkETag(etagHash)
lines := msgSplit(commit.Message)
title := lines[0]
var parent string
if len(commit.ParentHashes) > 0 {
parent = commit.ParentHashes[0].String()
- links = append(links, fmt.Sprintf(
- `<link rel="prev" href="%s" title="older">`,
- urlPrefix+"/"+parent,
- ))
+ links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
}
out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s] ", makeA(cfg.AboutURL, "about"))))
+ }
if parent != "" {
out.Write([]byte(fmt.Sprintf(
- "[%s] [<tt>%s</tt>]\n<hr/>\n",
- makeA(urlPrefix+"/"+parent, "older"),
- when,
+ "[%s] ",
+ makeA(cfg.URLPrefix+"/"+parent, "older"),
)))
}
- out.Write([]byte(fmt.Sprintf("<h2>%s</h2>\n<pre>\n", title)))
+ out.Write([]byte(fmt.Sprintf(
+ "[<tt>%s</tt>] [<tt>%s</tt>]<hr/>\n<h2>%s</h2>\n<pre>\n",
+ when, commit.Hash.String(), title,
+ )))
for _, line := range lines[2:] {
- line = strings.ReplaceAll(line, "&", "&")
- line = strings.ReplaceAll(line, "<", "<")
- line = strings.ReplaceAll(line, ">", ">")
+ line = html.EscapeString(line)
cols := strings.Split(line, " ")
for i, col := range cols {
if u := urlParse(col); u != nil {
cols[i] = makeA(col, col)
continue
}
- cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
+ cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
+ cfg.URLPrefix+"/$1", "$1",
+ ))
}
line = strings.Join(cols, " ")
out.Write([]byte(line + "\n"))
}
- out.Write([]byte("</pre>\n"))
- if note := getNote(commit.Hash); note != "" {
- out.Write([]byte(fmt.Sprintf("Note:\n<pre>\n%s</pre>\n", note)))
+ out.Write([]byte("</pre>\n<hr/>\n"))
+ if cfg.CommentsEmail != "" {
+ out.Write([]byte("[" + makeA(
+ "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
+ "write comment",
+ ) + "]\n"))
}
+ out.Write([]byte("<dl>\n"))
+ for i, comment := range parseComments(commentsRaw) {
+ out.Write([]byte(fmt.Sprintf(
+ "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
+ "</dt>\n<dd><pre>\n%s\n</pre></dd>\n",
+ i, i, i, html.EscapeString(comment),
+ )))
+ }
+ out.Write([]byte("</dl>\n"))
} else {
makeErr(errors.New("unknown URL action"))
}
- if aboutUrl != "" {
- out.Write([]byte(fmt.Sprintf(
- "<hr/>%s %s\n",
- makeA(aboutUrl, "About"),
- blogTitle,
- )))
- }
out.Write([]byte("</body></html>\n"))
if gzipWriter != nil {
gzipWriter.Close()