/*
-SGBlog -- Git-based CGI blogging engine
+SGBlog -- Git-backed CGI/inetd blogging/phlogging engine
Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
This program is free software: you can redistribute it and/or modify
along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
-// Git-based CGI blogging engine
+// Git-backed CGI/inetd blogging/phlogging engine
package main
import (
"bytes"
- "compress/gzip"
- "encoding/hex"
- "encoding/json"
- "encoding/xml"
- "errors"
+ "crypto/sha1"
"fmt"
- "hash"
- "html"
- "io"
"io/ioutil"
- "log"
- "net/url"
"os"
"regexp"
- "strconv"
+ "sort"
"strings"
+ "text/scanner"
- "github.com/hjson/hjson-go"
- "go.cypherpunks.ru/netstring/v2"
- "go.stargrave.org/sgblog"
- "golang.org/x/crypto/blake2b"
- "golang.org/x/tools/blog/atom"
- "gopkg.in/src-d/go-git.v4"
- "gopkg.in/src-d/go-git.v4/plumbing"
- "gopkg.in/src-d/go-git.v4/plumbing/object"
+ "github.com/go-git/go-git/v5"
+ "github.com/go-git/go-git/v5/plumbing"
+ "github.com/go-git/go-git/v5/plumbing/object"
)
const (
PageEntries = 50
- AtomFeed = "feed.atom"
)
var (
- sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
- defaultLinks = []string{}
+ sha1DigestRe = regexp.MustCompilePOSIX(fmt.Sprintf("([0-9a-f]{%d,%d})", sha1.Size*2, sha1.Size*2))
repo *git.Repository
notesTree *object.Tree
+ commentsRef *plumbing.Reference
commentsTree *object.Tree
-
- renderableSchemes = map[string]struct{}{
- "ftp": struct{}{},
- "gopher": struct{}{},
- "http": struct{}{},
- "https": struct{}{},
- "telnet": struct{}{},
- }
+ topicsRef *plumbing.Reference
+ topicsTree *object.Tree
)
-type TableEntry struct {
- commit *object.Commit
- commentsRaw []byte
-}
-
type Cfg struct {
GitPath string
Branch string
CommentsNotesRef string
CommentsEmail string
-}
-func makeA(href, text string) string {
- return `<a href="` + href + `">` + text + `</a>`
-}
+ TopicsNotesRef string
+ TopicsCachePath string
-func etagString(etag hash.Hash) string {
- return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
-}
-
-func urlParse(what string) *url.URL {
- if u, err := url.ParseRequestURI(what); err == nil {
- if _, exists := renderableSchemes[u.Scheme]; exists {
- return u
- }
- }
- return nil
+ GopherDomain string
}
func msgSplit(msg string) []string {
return lines
}
-func lineURLize(urlPrefix, line string) string {
- cols := strings.Split(html.EscapeString(line), " ")
- for i, col := range cols {
- if u := urlParse(col); u != nil {
- cols[i] = makeA(col, col)
- continue
- }
- cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
- urlPrefix+"/$1", "$1",
- ))
- }
- return strings.Join(cols, " ")
-}
-
func getNote(tree *object.Tree, what plumbing.Hash) []byte {
if tree == nil {
return nil
func parseComments(data []byte) []string {
comments := []string{}
- nsr := netstring.NewReader(bytes.NewReader(data))
- for {
- if _, err := nsr.Next(); err != nil {
- break
+ isBody := false
+ comment := make([]string, 0, 4)
+ lines := strings.Split(strings.TrimSuffix(string(data), "\n"), "\n")
+ if len(lines) == 1 {
+ return comments
+ }
+ for _, s := range lines {
+ if s == "" {
+ comments = append(comments, strings.Join(comment, "\n"))
+ comment = make([]string, 0, 4)
+ isBody = false
+ continue
}
- if comment, err := ioutil.ReadAll(nsr); err == nil {
- comments = append(comments, string(comment))
+ if s == "Body:" {
+ isBody = true
+ comment = append(comment, "")
+ continue
}
+ if isBody {
+ if s == "+" {
+ comment = append(comment, "")
+ } else {
+ comment = append(comment, strings.TrimPrefix(s, "+ "))
+ }
+ continue
+ }
+ comment = append(comment, s)
}
- return comments
-}
-
-func startHeader(etag hash.Hash, gziped bool) string {
- lines := []string{
- "Content-Type: text/html; charset=UTF-8",
- "ETag: " + etagString(etag),
- }
- if gziped {
- lines = append(lines, "Content-Encoding: gzip")
+ if len(comment) > 1 {
+ comments = append(comments, strings.Join(comment, "\n"))
}
- lines = append(lines, "")
- lines = append(lines, "")
- return strings.Join(lines, "\n")
-}
-
-func startHTML(title string, additional []string) string {
- return fmt.Sprintf(`<html>
-<head>
- <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
- <meta name="generator" content="SGBlog %s">
- <title>%s</title>
- %s
-</head>
-<body>
-`,
- sgblog.Version, title,
- strings.Join(append(defaultLinks, additional...), "\n "),
- )
-}
-
-func makeErr(err error) {
- fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
- fmt.Println(err)
- panic(err)
+ return comments
}
-func checkETag(etag hash.Hash) {
- ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
- if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
- fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
- os.Exit(0)
+func parseTopics(data []byte) []string {
+ var s scanner.Scanner
+ s.Init(bytes.NewBuffer(data))
+ topics := []string{}
+ for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
+ topics = append(topics, s.TokenText())
}
+ sort.Strings(topics)
+ return topics
}
-func main() {
- cfgPath := os.Getenv("SGBLOG_CFG")
- if cfgPath == "" {
- log.Fatalln("SGBLOG_CFG is not set")
- }
- pathInfo, exists := os.LookupEnv("PATH_INFO")
- if !exists {
- pathInfo = "/"
- }
- queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
- if err != nil {
- makeErr(err)
- }
-
- cfgRaw, err := ioutil.ReadFile(cfgPath)
- if err != nil {
- makeErr(err)
- }
- var cfgGeneral map[string]interface{}
- if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
- makeErr(err)
- }
- cfgRaw, err = json.Marshal(cfgGeneral)
- if err != nil {
- makeErr(err)
- }
- var cfg *Cfg
- if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
- makeErr(err)
- }
-
- etagHash, err := blake2b.New256(nil)
- if err != nil {
- panic(err)
- }
- etagHash.Write([]byte("SGBLOG"))
- etagHash.Write([]byte(sgblog.Version))
- etagHash.Write([]byte(cfg.GitPath))
- etagHash.Write([]byte(cfg.Branch))
- etagHash.Write([]byte(cfg.Title))
- etagHash.Write([]byte(cfg.URLPrefix))
- etagHash.Write([]byte(cfg.AtomBaseURL))
- etagHash.Write([]byte(cfg.AtomId))
- etagHash.Write([]byte(cfg.AtomAuthor))
-
- etagHashForWeb := [][]byte{}
- if cfg.CSS != "" {
- defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
- etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
- }
- if cfg.Webmaster != "" {
- defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
- etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
- }
- if cfg.AboutURL != "" {
- etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
- }
- for _, gitURL := range cfg.GitURLs {
- defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
- etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
- }
- if cfg.CommentsNotesRef != "" {
- etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
- }
- if cfg.CommentsEmail != "" {
- etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
- }
-
- defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
- atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
- defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
-
+func initRepo(cfg *Cfg) (*plumbing.Hash, error) {
+ var err error
repo, err = git.PlainOpen(cfg.GitPath)
if err != nil {
- makeErr(err)
+ return nil, err
}
head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
if err != nil {
- makeErr(err)
+ return nil, err
}
-
+ headHash := head.Hash()
if notes, err := repo.Notes(); err == nil {
var notesRef *plumbing.Reference
- var commentsRef *plumbing.Reference
notes.ForEach(func(ref *plumbing.Reference) error {
switch string(ref.Name()) {
case "refs/notes/commits":
notesRef = ref
case cfg.CommentsNotesRef:
commentsRef = ref
+ case cfg.TopicsNotesRef:
+ topicsRef = ref
}
return nil
})
commentsTree, _ = commentsCommit.Tree()
}
}
- }
-
- var outBuf bytes.Buffer
- var out io.Writer
- out = &outBuf
- var gzipWriter *gzip.Writer
- acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
- for _, encoding := range strings.Split(acceptEncoding, ", ") {
- if encoding == "gzip" {
- gzipWriter = gzip.NewWriter(&outBuf)
- out = gzipWriter
- }
- }
-
- if pathInfo == "/" {
- offset := 0
- if offsetRaw, exists := queryValues["offset"]; exists {
- offset, err = strconv.Atoi(offsetRaw[0])
- if err != nil {
- makeErr(err)
- }
- }
- log, err := repo.Log(&git.LogOptions{From: head.Hash()})
- if err != nil {
- makeErr(err)
- }
- commentN := 0
- for i := 0; i < offset; i++ {
- if _, err = log.Next(); err != nil {
- break
- }
- commentN++
- }
-
- entries := make([]TableEntry, 0, PageEntries)
- logEnded := false
- for _, data := range etagHashForWeb {
- etagHash.Write(data)
- }
- etagHash.Write([]byte("INDEX"))
- for i := 0; i < PageEntries; i++ {
- commit, err := log.Next()
- if err != nil {
- logEnded = true
- break
- }
- etagHash.Write(commit.Hash[:])
- commentsRaw := getNote(commentsTree, commit.Hash)
- etagHash.Write(commentsRaw)
- entries = append(entries, TableEntry{commit, commentsRaw})
- }
- checkETag(etagHash)
-
- var table bytes.Buffer
- table.WriteString(
- "<table border=1>\n" +
- "<caption>Comments</caption>\n<tr>" +
- "<th>N</th>" +
- "<th>When</th>" +
- "<th>Title</th>" +
- `<th size="5%"><a title="Lines">L</a></th>` +
- `<th size="5%"><a title="Comments">C</a></th>` +
- "<th>Linked to</th></tr>\n")
- for _, entry := range entries {
- commentN++
- lines := msgSplit(entry.commit.Message)
- domains := []string{}
- for _, line := range lines[2:] {
- if u := urlParse(line); u == nil {
- break
- } else {
- domains = append(domains, makeA(line, u.Host))
- }
- }
- var commentsValue string
- if l := len(parseComments(entry.commentsRaw)); l > 0 {
- commentsValue = strconv.Itoa(l)
- } else {
- commentsValue = " "
- }
- table.WriteString(fmt.Sprintf(
- "<tr><td>%d</td><td><tt>%s</tt></td>"+
- "<td>%s</td>"+
- "<td>%d</td><td>%s</td>"+
- "<td>%s</td></tr>\n",
- commentN, entry.commit.Author.When.Format(sgblog.WhenFmt),
- makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
- len(lines)-2,
- commentsValue,
- strings.Join(domains, " "),
- ))
- }
- table.WriteString("</table>")
-
- var href string
- var links []string
- var refs bytes.Buffer
- if offset > 0 {
- if offsetPrev := offset - PageEntries; offsetPrev > 0 {
- href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
- } else {
- href = cfg.URLPrefix + "/"
+ if topicsRef != nil {
+ if topicsCommit, err := repo.CommitObject(topicsRef.Hash()); err == nil {
+ topicsTree, _ = topicsCommit.Tree()
}
- links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
- refs.WriteString("\n" + makeA(href, "[prev]"))
}
- if !logEnded {
- href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
- links = append(links, `<link rel="next" href="`+href+`" title="older">`)
- refs.WriteString("\n" + makeA(href, "[next]"))
- }
-
- os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
- out.Write([]byte(startHTML(
- fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
- links,
- )))
- if cfg.AboutURL != "" {
- out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
- }
- out.Write(refs.Bytes())
- out.Write(table.Bytes())
- out.Write(refs.Bytes())
- out.Write([]byte("\n"))
- } else if pathInfo == "/"+AtomFeed {
- commit, err := repo.CommitObject(head.Hash())
- if err != nil {
- makeErr(err)
- }
- etagHash.Write([]byte("ATOM"))
- etagHash.Write(commit.Hash[:])
- checkETag(etagHash)
- feed := atom.Feed{
- Title: cfg.Title,
- ID: cfg.AtomId,
- Updated: atom.Time(commit.Author.When),
- Link: []atom.Link{{
- Rel: "self",
- Href: atomURL,
- }},
- Author: &atom.Person{Name: cfg.AtomAuthor},
- }
- log, err := repo.Log(&git.LogOptions{From: head.Hash()})
- if err != nil {
- makeErr(err)
- }
- for i := 0; i < PageEntries; i++ {
- commit, err = log.Next()
- if err != nil {
- break
- }
-
- feedIdRaw := new([16]byte)
- copy(feedIdRaw[:], commit.Hash[:])
- feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
- feedId := fmt.Sprintf(
- "%x-%x-%x-%x-%x",
- feedIdRaw[0:4],
- feedIdRaw[4:6],
- feedIdRaw[6:8],
- feedIdRaw[8:10],
- feedIdRaw[10:],
- )
+ }
+ return &headHash, nil
+}
- lines := msgSplit(commit.Message)
- feed.Entry = append(feed.Entry, &atom.Entry{
- Title: lines[0],
- ID: "urn:uuid:" + feedId,
- Link: []atom.Link{{
- Rel: "alternate",
- Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
- }},
- Published: atom.Time(commit.Author.When),
- Updated: atom.Time(commit.Author.When),
- Summary: &atom.Text{
- Type: "text",
- Body: lines[0],
- },
- Content: &atom.Text{
- Type: "text",
- Body: strings.Join(lines[2:], "\n"),
- },
- })
- }
- data, err := xml.MarshalIndent(&feed, "", " ")
- if err != nil {
- makeErr(err)
- }
- out.Write(data)
- os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
- os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
- if gzipWriter != nil {
- os.Stdout.WriteString("Content-Encoding: gzip\n")
- gzipWriter.Close()
- }
- os.Stdout.WriteString("\n")
- os.Stdout.Write(outBuf.Bytes())
- return
- } else if sha1DigestRe.MatchString(pathInfo[1:]) {
- commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
- if err != nil {
- makeErr(err)
- }
- for _, data := range etagHashForWeb {
- etagHash.Write(data)
- }
- etagHash.Write([]byte("ENTRY"))
- etagHash.Write(commit.Hash[:])
- notesRaw := getNote(notesTree, commit.Hash)
- etagHash.Write(notesRaw)
- commentsRaw := getNote(commentsTree, commit.Hash)
- etagHash.Write(commentsRaw)
- checkETag(etagHash)
- lines := msgSplit(commit.Message)
- title := lines[0]
- when := commit.Author.When.Format(sgblog.WhenFmt)
- os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
- links := []string{}
- var parent string
- if len(commit.ParentHashes) > 0 {
- parent = commit.ParentHashes[0].String()
- links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
- }
- out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
- if cfg.AboutURL != "" {
- out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
- }
- out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
- if parent != "" {
- out.Write([]byte(fmt.Sprintf(
- "[%s]\n",
- makeA(cfg.URLPrefix+"/"+parent, "older"),
- )))
- }
- out.Write([]byte(fmt.Sprintf(
- "[<tt><a title=\"When\">%s</a></tt>]\n"+
- "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
- "<hr/>\n<h2>%s</h2>\n<pre>\n",
- when, commit.Hash.String(), title,
- )))
- for _, line := range lines[2:] {
- out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
- }
- out.Write([]byte("</pre>\n<hr/>\n"))
- if len(notesRaw) > 0 {
- out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
- }
- if cfg.CommentsEmail != "" {
- out.Write([]byte("[" + makeA(
- "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
- "write comment",
- ) + "]\n"))
- }
- out.Write([]byte("<dl>\n"))
- for i, comment := range parseComments(commentsRaw) {
- out.Write([]byte(fmt.Sprintf(
- "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
- "</dt>\n<dd><pre>\n",
- i, i, i,
- )))
- lines = strings.Split(comment, "\n")
- for _, line := range lines[:3] {
- out.Write([]byte(line + "\n"))
- }
- for _, line := range lines[3:] {
- out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
- }
- out.Write([]byte("</pre></dd>\n"))
- }
- out.Write([]byte("</dl>\n"))
+func main() {
+ if len(os.Args) == 3 && os.Args[1] == "-gopher" {
+ serveGopher()
} else {
- makeErr(errors.New("unknown URL action"))
- }
- out.Write([]byte("</body></html>\n"))
- if gzipWriter != nil {
- gzipWriter.Close()
+ serveHTTP()
}
- os.Stdout.Write(outBuf.Bytes())
}