+/*
+SGBlog -- Git-based CGI blogging engine
+Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
+
+This program is free software: you can redistribute it and/or modify
+it under the terms of the GNU Affero General Public License as
+published by the Free Software Foundation, version 3 of the License.
+
+This program is distributed in the hope that it will be useful,
+but WITHOUT ANY WARRANTY; without even the implied warranty of
+MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+GNU General Public License for more details.
+
+You should have received a copy of the GNU Affero General Public License
+along with this program. If not, see <http://www.gnu.org/licenses/>.
+*/
+
+// Git-based CGI blogging engine
+package main
+
+import (
+ "bytes"
+ "compress/gzip"
+ "encoding/hex"
+ "encoding/json"
+ "encoding/xml"
+ "errors"
+ "fmt"
+ "hash"
+ "html"
+ "io"
+ "io/ioutil"
+ "log"
+ "net/url"
+ "os"
+ "strconv"
+ "strings"
+
+ "github.com/hjson/hjson-go"
+ "go.stargrave.org/sgblog"
+ "golang.org/x/crypto/blake2b"
+ "golang.org/x/tools/blog/atom"
+ "gopkg.in/src-d/go-git.v4"
+ "gopkg.in/src-d/go-git.v4/plumbing"
+ "gopkg.in/src-d/go-git.v4/plumbing/object"
+)
+
+const (
+ AtomFeed = "feed.atom"
+)
+
+var (
+ defaultLinks = []string{}
+
+ renderableSchemes = map[string]struct{}{
+ "ftp": struct{}{},
+ "gopher": struct{}{},
+ "http": struct{}{},
+ "https": struct{}{},
+ "telnet": struct{}{},
+ }
+)
+
+type TableEntry struct {
+ commit *object.Commit
+ commentsRaw []byte
+}
+
+func makeA(href, text string) string {
+ return `<a href="` + href + `">` + text + `</a>`
+}
+
+func etagString(etag hash.Hash) string {
+ return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
+}
+
+func urlParse(what string) *url.URL {
+ if u, err := url.ParseRequestURI(what); err == nil {
+ if _, exists := renderableSchemes[u.Scheme]; exists {
+ return u
+ }
+ }
+ return nil
+}
+
+func lineURLize(urlPrefix, line string) string {
+ cols := strings.Split(html.EscapeString(line), " ")
+ for i, col := range cols {
+ if u := urlParse(col); u != nil {
+ cols[i] = makeA(col, col)
+ continue
+ }
+ cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
+ urlPrefix+"/$1", "$1",
+ ))
+ }
+ return strings.Join(cols, " ")
+}
+
+func startHeader(etag hash.Hash, gziped bool) string {
+ lines := []string{
+ "Content-Type: text/html; charset=UTF-8",
+ "ETag: " + etagString(etag),
+ }
+ if gziped {
+ lines = append(lines, "Content-Encoding: gzip")
+ }
+ lines = append(lines, "")
+ lines = append(lines, "")
+ return strings.Join(lines, "\n")
+}
+
+func startHTML(title string, additional []string) string {
+ return fmt.Sprintf(`<html>
+<head>
+ <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+ <meta name="generator" content="SGBlog %s">
+ <title>%s</title>
+ %s
+</head>
+<body>
+`,
+ sgblog.Version, title,
+ strings.Join(append(defaultLinks, additional...), "\n "),
+ )
+}
+
+func makeErr(err error) {
+ fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
+ fmt.Println(err)
+ panic(err)
+}
+
+func checkETag(etag hash.Hash) {
+ ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
+ if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
+ fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
+ os.Exit(0)
+ }
+}
+
+func serveHTTP() {
+ cfgPath := os.Getenv("SGBLOG_CFG")
+ if cfgPath == "" {
+ log.Fatalln("SGBLOG_CFG is not set")
+ }
+ cfgRaw, err := ioutil.ReadFile(cfgPath)
+ if err != nil {
+ makeErr(err)
+ }
+ var cfgGeneral map[string]interface{}
+ if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
+ makeErr(err)
+ }
+ cfgRaw, err = json.Marshal(cfgGeneral)
+ if err != nil {
+ makeErr(err)
+ }
+ var cfg *Cfg
+ if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
+ makeErr(err)
+ }
+ pathInfo, exists := os.LookupEnv("PATH_INFO")
+ if !exists {
+ pathInfo = "/"
+ }
+ queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
+ if err != nil {
+ makeErr(err)
+ }
+
+ etagHash, err := blake2b.New256(nil)
+ if err != nil {
+ panic(err)
+ }
+ etagHash.Write([]byte("SGBLOG"))
+ etagHash.Write([]byte(sgblog.Version))
+ etagHash.Write([]byte(cfg.GitPath))
+ etagHash.Write([]byte(cfg.Branch))
+ etagHash.Write([]byte(cfg.Title))
+ etagHash.Write([]byte(cfg.URLPrefix))
+ etagHash.Write([]byte(cfg.AtomBaseURL))
+ etagHash.Write([]byte(cfg.AtomId))
+ etagHash.Write([]byte(cfg.AtomAuthor))
+
+ etagHashForWeb := [][]byte{}
+ if cfg.CSS != "" {
+ defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
+ }
+ if cfg.Webmaster != "" {
+ defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
+ }
+ if cfg.AboutURL != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
+ }
+ for _, gitURL := range cfg.GitURLs {
+ defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
+ etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
+ }
+ if cfg.CommentsNotesRef != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
+ }
+ if cfg.CommentsEmail != "" {
+ etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
+ }
+
+ defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
+ atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
+ defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
+
+ headHash, err := initRepo(cfg)
+ if err != nil {
+ makeErr(err)
+ }
+
+ if notes, err := repo.Notes(); err == nil {
+ var notesRef *plumbing.Reference
+ var commentsRef *plumbing.Reference
+ notes.ForEach(func(ref *plumbing.Reference) error {
+ switch string(ref.Name()) {
+ case "refs/notes/commits":
+ notesRef = ref
+ case cfg.CommentsNotesRef:
+ commentsRef = ref
+ }
+ return nil
+ })
+ if notesRef != nil {
+ if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
+ notesTree, _ = commentsCommit.Tree()
+ }
+ }
+ if commentsRef != nil {
+ if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
+ commentsTree, _ = commentsCommit.Tree()
+ }
+ }
+ }
+
+ var outBuf bytes.Buffer
+ var out io.Writer
+ out = &outBuf
+ var gzipWriter *gzip.Writer
+ acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
+ for _, encoding := range strings.Split(acceptEncoding, ", ") {
+ if encoding == "gzip" {
+ gzipWriter = gzip.NewWriter(&outBuf)
+ out = gzipWriter
+ }
+ }
+
+ if pathInfo == "/" {
+ offset := 0
+ if offsetRaw, exists := queryValues["offset"]; exists {
+ offset, err = strconv.Atoi(offsetRaw[0])
+ if err != nil {
+ makeErr(err)
+ }
+ }
+ repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
+ if err != nil {
+ makeErr(err)
+ }
+ commitN := 0
+ for i := 0; i < offset; i++ {
+ if _, err = repoLog.Next(); err != nil {
+ break
+ }
+ commitN++
+ }
+
+ entries := make([]TableEntry, 0, PageEntries)
+ logEnded := false
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
+ }
+ etagHash.Write([]byte("INDEX"))
+ for i := 0; i < PageEntries; i++ {
+ commit, err := repoLog.Next()
+ if err != nil {
+ logEnded = true
+ break
+ }
+ etagHash.Write(commit.Hash[:])
+ commentsRaw := getNote(commentsTree, commit.Hash)
+ etagHash.Write(commentsRaw)
+ entries = append(entries, TableEntry{commit, commentsRaw})
+ }
+ checkETag(etagHash)
+
+ var table bytes.Buffer
+ table.WriteString(
+ "<table border=1>\n" +
+ "<caption>Comments</caption>\n<tr>" +
+ "<th>N</th>" +
+ "<th>When</th>" +
+ "<th>Title</th>" +
+ `<th size="5%"><a title="Lines">L</a></th>` +
+ `<th size="5%"><a title="Comments">C</a></th>` +
+ "<th>Linked to</th></tr>\n")
+ for _, entry := range entries {
+ commitN++
+ lines := msgSplit(entry.commit.Message)
+ domains := []string{}
+ for _, line := range lines[2:] {
+ if u := urlParse(line); u == nil {
+ break
+ } else {
+ domains = append(domains, makeA(line, u.Host))
+ }
+ }
+ var commentsValue string
+ if l := len(parseComments(entry.commentsRaw)); l > 0 {
+ commentsValue = strconv.Itoa(l)
+ } else {
+ commentsValue = " "
+ }
+ table.WriteString(fmt.Sprintf(
+ "<tr><td>%d</td><td><tt>%s</tt></td>"+
+ "<td>%s</td>"+
+ "<td>%d</td><td>%s</td>"+
+ "<td>%s</td></tr>\n",
+ commitN, entry.commit.Author.When.Format(sgblog.WhenFmt),
+ makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
+ len(lines)-2,
+ commentsValue,
+ strings.Join(domains, " "),
+ ))
+ }
+ table.WriteString("</table>")
+
+ var href string
+ var links []string
+ var refs bytes.Buffer
+ if offset > 0 {
+ if offsetPrev := offset - PageEntries; offsetPrev > 0 {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
+ } else {
+ href = cfg.URLPrefix + "/"
+ }
+ links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
+ refs.WriteString("\n" + makeA(href, "[prev]"))
+ }
+ if !logEnded {
+ href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
+ links = append(links, `<link rel="next" href="`+href+`" title="older">`)
+ refs.WriteString("\n" + makeA(href, "[next]"))
+ }
+
+ os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
+ out.Write([]byte(startHTML(
+ fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
+ links,
+ )))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
+ }
+ out.Write(refs.Bytes())
+ out.Write(table.Bytes())
+ out.Write(refs.Bytes())
+ out.Write([]byte("\n"))
+ } else if pathInfo == "/"+AtomFeed {
+ commit, err := repo.CommitObject(*headHash)
+ if err != nil {
+ makeErr(err)
+ }
+ etagHash.Write([]byte("ATOM"))
+ etagHash.Write(commit.Hash[:])
+ checkETag(etagHash)
+ feed := atom.Feed{
+ Title: cfg.Title,
+ ID: cfg.AtomId,
+ Updated: atom.Time(commit.Author.When),
+ Link: []atom.Link{{
+ Rel: "self",
+ Href: atomURL,
+ }},
+ Author: &atom.Person{Name: cfg.AtomAuthor},
+ }
+ repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
+ if err != nil {
+ makeErr(err)
+ }
+ for i := 0; i < PageEntries; i++ {
+ commit, err = repoLog.Next()
+ if err != nil {
+ break
+ }
+
+ feedIdRaw := new([16]byte)
+ copy(feedIdRaw[:], commit.Hash[:])
+ feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
+ feedId := fmt.Sprintf(
+ "%x-%x-%x-%x-%x",
+ feedIdRaw[0:4],
+ feedIdRaw[4:6],
+ feedIdRaw[6:8],
+ feedIdRaw[8:10],
+ feedIdRaw[10:],
+ )
+
+ lines := msgSplit(commit.Message)
+ feed.Entry = append(feed.Entry, &atom.Entry{
+ Title: lines[0],
+ ID: "urn:uuid:" + feedId,
+ Link: []atom.Link{{
+ Rel: "alternate",
+ Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
+ }},
+ Published: atom.Time(commit.Author.When),
+ Updated: atom.Time(commit.Author.When),
+ Summary: &atom.Text{
+ Type: "text",
+ Body: lines[0],
+ },
+ Content: &atom.Text{
+ Type: "text",
+ Body: strings.Join(lines[2:], "\n"),
+ },
+ })
+ }
+ data, err := xml.MarshalIndent(&feed, "", " ")
+ if err != nil {
+ makeErr(err)
+ }
+ out.Write(data)
+ os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
+ os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
+ if gzipWriter != nil {
+ os.Stdout.WriteString("Content-Encoding: gzip\n")
+ gzipWriter.Close()
+ }
+ os.Stdout.WriteString("\n")
+ os.Stdout.Write(outBuf.Bytes())
+ return
+ } else if sha1DigestRe.MatchString(pathInfo[1:]) {
+ commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
+ if err != nil {
+ makeErr(err)
+ }
+ for _, data := range etagHashForWeb {
+ etagHash.Write(data)
+ }
+ etagHash.Write([]byte("ENTRY"))
+ etagHash.Write(commit.Hash[:])
+ notesRaw := getNote(notesTree, commit.Hash)
+ etagHash.Write(notesRaw)
+ commentsRaw := getNote(commentsTree, commit.Hash)
+ etagHash.Write(commentsRaw)
+ checkETag(etagHash)
+ lines := msgSplit(commit.Message)
+ title := lines[0]
+ when := commit.Author.When.Format(sgblog.WhenFmt)
+ os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
+ links := []string{}
+ var parent string
+ if len(commit.ParentHashes) > 0 {
+ parent = commit.ParentHashes[0].String()
+ links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
+ }
+ out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
+ if cfg.AboutURL != "" {
+ out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
+ }
+ out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
+ if parent != "" {
+ out.Write([]byte(fmt.Sprintf(
+ "[%s]\n",
+ makeA(cfg.URLPrefix+"/"+parent, "older"),
+ )))
+ }
+ out.Write([]byte(fmt.Sprintf(
+ "[<tt><a title=\"When\">%s</a></tt>]\n"+
+ "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
+ "<hr/>\n<h2>%s</h2>\n<pre>\n",
+ when, commit.Hash.String(), title,
+ )))
+ for _, line := range lines[2:] {
+ out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
+ }
+ out.Write([]byte("</pre>\n<hr/>\n"))
+ if len(notesRaw) > 0 {
+ out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
+ }
+ if cfg.CommentsEmail != "" {
+ out.Write([]byte("[" + makeA(
+ "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
+ "write comment",
+ ) + "]\n"))
+ }
+ out.Write([]byte("<dl>\n"))
+ for i, comment := range parseComments(commentsRaw) {
+ out.Write([]byte(fmt.Sprintf(
+ "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
+ "</dt>\n<dd><pre>\n",
+ i, i, i,
+ )))
+ lines = strings.Split(comment, "\n")
+ for _, line := range lines[:3] {
+ out.Write([]byte(line + "\n"))
+ }
+ for _, line := range lines[3:] {
+ out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
+ }
+ out.Write([]byte("</pre></dd>\n"))
+ }
+ out.Write([]byte("</dl>\n"))
+ } else {
+ makeErr(errors.New("unknown URL action"))
+ }
+ out.Write([]byte("</body></html>\n"))
+ if gzipWriter != nil {
+ gzipWriter.Close()
+ }
+ os.Stdout.Write(outBuf.Bytes())
+}