]> Sergey Matveev's repositories - sgblog.git/blob - cmd/sgblog/main.go
Configurable comments commits author
[sgblog.git] / cmd / sgblog / main.go
1 /*
2 SGBlog -- Git-based CGI blogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
4
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
8
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
12 GNU General Public License for more details.
13
14 You should have received a copy of the GNU Affero General Public License
15 along with this program.  If not, see <http://www.gnu.org/licenses/>.
16 */
17
18 // Git-based CGI blogging engine
19 package main
20
21 import (
22         "bytes"
23         "compress/gzip"
24         "encoding/hex"
25         "encoding/json"
26         "encoding/xml"
27         "errors"
28         "fmt"
29         "hash"
30         "html"
31         "io"
32         "io/ioutil"
33         "log"
34         "net/url"
35         "os"
36         "regexp"
37         "strconv"
38         "strings"
39
40         "github.com/hjson/hjson-go"
41         "go.cypherpunks.ru/netstring/v2"
42         "go.stargrave.org/sgblog"
43         "golang.org/x/crypto/blake2b"
44         "golang.org/x/tools/blog/atom"
45         "gopkg.in/src-d/go-git.v4"
46         "gopkg.in/src-d/go-git.v4/plumbing"
47         "gopkg.in/src-d/go-git.v4/plumbing/object"
48 )
49
50 const (
51         PageEntries = 50
52         AtomFeed    = "feed.atom"
53 )
54
55 var (
56         sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
57         defaultLinks = []string{}
58         repo         *git.Repository
59         notesTree    *object.Tree
60         commentsTree *object.Tree
61
62         renderableSchemes = map[string]struct{}{
63                 "ftp":    struct{}{},
64                 "gopher": struct{}{},
65                 "http":   struct{}{},
66                 "https":  struct{}{},
67         }
68 )
69
70 type TableEntry struct {
71         commit      *object.Commit
72         commentsRaw []byte
73 }
74
75 type Cfg struct {
76         GitPath string
77         Branch  string
78         Title   string
79
80         BaseURL   string
81         URLPrefix string
82
83         AtomId     string
84         AtomAuthor string
85
86         CSS       string
87         Webmaster string
88         AboutURL  string
89         GitURLs   []string
90
91         CommentsNotesRef string
92         CommentsEmail    string
93 }
94
95 func makeA(href, text string) string {
96         return `<a href="` + href + `">` + text + `</a>`
97 }
98
99 func etagString(etag hash.Hash) string {
100         return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
101 }
102
103 func urlParse(what string) *url.URL {
104         if u, err := url.ParseRequestURI(what); err == nil {
105                 if _, exists := renderableSchemes[u.Scheme]; exists {
106                         return u
107                 }
108         }
109         return nil
110 }
111
112 func msgSplit(msg string) []string {
113         lines := strings.Split(msg, "\n")
114         lines = lines[:len(lines)-1]
115         if len(lines) < 3 {
116                 lines = []string{lines[0], "", ""}
117         }
118         return lines
119 }
120
121 func lineURLize(urlPrefix, line string) string {
122         cols := strings.Split(html.EscapeString(line), " ")
123         for i, col := range cols {
124                 if u := urlParse(col); u != nil {
125                         cols[i] = makeA(col, col)
126                         continue
127                 }
128                 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
129                         urlPrefix+"/$1", "$1",
130                 ))
131         }
132         return strings.Join(cols, " ")
133 }
134
135 func getNote(tree *object.Tree, what plumbing.Hash) []byte {
136         if tree == nil {
137                 return nil
138         }
139         var entry *object.TreeEntry
140         var err error
141         paths := make([]string, 3)
142         paths[0] = what.String()
143         paths[1] = paths[0][:2] + "/" + paths[0][2:]
144         paths[2] = paths[1][:4+1] + "/" + paths[1][4+1:]
145         for _, p := range paths {
146                 entry, err = tree.FindEntry(p)
147                 if err == nil {
148                         break
149                 }
150         }
151         if entry == nil {
152                 return nil
153         }
154         blob, err := repo.BlobObject(entry.Hash)
155         if err != nil {
156                 return nil
157         }
158         r, err := blob.Reader()
159         if err != nil {
160                 return nil
161         }
162         data, err := ioutil.ReadAll(r)
163         if err != nil {
164                 return nil
165         }
166         return bytes.TrimSuffix(data, []byte{'\n'})
167 }
168
169 func parseComments(data []byte) []string {
170         comments := []string{}
171         nsr := netstring.NewReader(bytes.NewReader(data))
172         for {
173                 if _, err := nsr.Next(); err != nil {
174                         break
175                 }
176                 if comment, err := ioutil.ReadAll(nsr); err == nil {
177                         comments = append(comments, string(comment))
178                 }
179         }
180         return comments
181 }
182
183 func startHeader(etag hash.Hash, gziped bool) string {
184         lines := []string{
185                 "Content-Type: text/html; charset=UTF-8",
186                 "ETag: " + etagString(etag),
187         }
188         if gziped {
189                 lines = append(lines, "Content-Encoding: gzip")
190         }
191         lines = append(lines, "")
192         lines = append(lines, "")
193         return strings.Join(lines, "\n")
194 }
195
196 func startHTML(title string, additional []string) string {
197         return fmt.Sprintf(`<html>
198 <head>
199         <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
200         <meta name="generator" content="SGBlog %s">
201         <title>%s</title>
202         %s
203 </head>
204 <body>
205 `,
206                 sgblog.Version, title,
207                 strings.Join(append(defaultLinks, additional...), "\n   "),
208         )
209 }
210
211 func makeErr(err error) {
212         fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
213         fmt.Println(err)
214         panic(err)
215 }
216
217 func checkETag(etag hash.Hash) {
218         ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
219         if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
220                 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
221                 os.Exit(0)
222         }
223 }
224
225 func main() {
226         cfgPath := os.Getenv("SGBLOG_CFG")
227         if cfgPath == "" {
228                 log.Fatalln("SGBLOG_CFG is not set")
229         }
230         pathInfo, exists := os.LookupEnv("PATH_INFO")
231         if !exists {
232                 pathInfo = "/"
233         }
234         queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
235         if err != nil {
236                 makeErr(err)
237         }
238
239         cfgRaw, err := ioutil.ReadFile(cfgPath)
240         if err != nil {
241                 makeErr(err)
242         }
243         var cfgGeneral map[string]interface{}
244         if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
245                 makeErr(err)
246         }
247         cfgRaw, err = json.Marshal(cfgGeneral)
248         if err != nil {
249                 makeErr(err)
250         }
251         var cfg *Cfg
252         if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
253                 makeErr(err)
254         }
255
256         etagHash, err := blake2b.New256(nil)
257         if err != nil {
258                 panic(err)
259         }
260         etagHash.Write([]byte("SGBLOG"))
261         etagHash.Write([]byte(sgblog.Version))
262         etagHash.Write([]byte(cfg.GitPath))
263         etagHash.Write([]byte(cfg.Branch))
264         etagHash.Write([]byte(cfg.Title))
265         etagHash.Write([]byte(cfg.BaseURL))
266         etagHash.Write([]byte(cfg.URLPrefix))
267         etagHash.Write([]byte(cfg.AtomId))
268         etagHash.Write([]byte(cfg.AtomAuthor))
269
270         etagHashForWeb := [][]byte{}
271         if cfg.CSS != "" {
272                 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
273                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
274         }
275         if cfg.Webmaster != "" {
276                 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
277                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
278         }
279         if cfg.AboutURL != "" {
280                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
281         }
282         for _, gitURL := range cfg.GitURLs {
283                 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
284                 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
285         }
286         if cfg.CommentsNotesRef != "" {
287                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
288         }
289         if cfg.CommentsEmail != "" {
290                 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
291         }
292
293         defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
294         atomURL := cfg.BaseURL + cfg.URLPrefix + "/" + AtomFeed
295         defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
296
297         repo, err = git.PlainOpen(cfg.GitPath)
298         if err != nil {
299                 makeErr(err)
300         }
301         head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
302         if err != nil {
303                 makeErr(err)
304         }
305
306         if notes, err := repo.Notes(); err == nil {
307                 var notesRef *plumbing.Reference
308                 var commentsRef *plumbing.Reference
309                 notes.ForEach(func(ref *plumbing.Reference) error {
310                         switch string(ref.Name()) {
311                         case "refs/notes/commits":
312                                 notesRef = ref
313                         case cfg.CommentsNotesRef:
314                                 commentsRef = ref
315                         }
316                         return nil
317                 })
318                 if notesRef != nil {
319                         if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
320                                 notesTree, _ = commentsCommit.Tree()
321                         }
322                 }
323                 if commentsRef != nil {
324                         if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
325                                 commentsTree, _ = commentsCommit.Tree()
326                         }
327                 }
328         }
329
330         var outBuf bytes.Buffer
331         var out io.Writer
332         out = &outBuf
333         var gzipWriter *gzip.Writer
334         acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
335         for _, encoding := range strings.Split(acceptEncoding, ", ") {
336                 if encoding == "gzip" {
337                         gzipWriter = gzip.NewWriter(&outBuf)
338                         out = gzipWriter
339                 }
340         }
341
342         if pathInfo == "/" {
343                 offset := 0
344                 if offsetRaw, exists := queryValues["offset"]; exists {
345                         offset, err = strconv.Atoi(offsetRaw[0])
346                         if err != nil {
347                                 makeErr(err)
348                         }
349                 }
350                 log, err := repo.Log(&git.LogOptions{From: head.Hash()})
351                 if err != nil {
352                         makeErr(err)
353                 }
354                 commentN := 0
355                 for i := 0; i < offset; i++ {
356                         if _, err = log.Next(); err != nil {
357                                 break
358                         }
359                         commentN++
360                 }
361
362                 entries := make([]TableEntry, 0, PageEntries)
363                 logEnded := false
364                 for _, data := range etagHashForWeb {
365                         etagHash.Write(data)
366                 }
367                 etagHash.Write([]byte("INDEX"))
368                 for i := 0; i < PageEntries; i++ {
369                         commit, err := log.Next()
370                         if err != nil {
371                                 logEnded = true
372                                 break
373                         }
374                         etagHash.Write(commit.Hash[:])
375                         commentsRaw := getNote(commentsTree, commit.Hash)
376                         etagHash.Write(commentsRaw)
377                         entries = append(entries, TableEntry{commit, commentsRaw})
378                 }
379                 checkETag(etagHash)
380
381                 var table bytes.Buffer
382                 table.WriteString(
383                         "<table border=1>\n<tr>" +
384                                 "<th>N</th>" +
385                                 "<th>When</th>" +
386                                 "<th>Title</th>" +
387                                 "<th size=\"5%\">L</th>" +
388                                 "<th size=\"5%\">C</th>" +
389                                 "<th>Linked to</th></tr>\n")
390                 for _, entry := range entries {
391                         commentN++
392                         lines := msgSplit(entry.commit.Message)
393                         domains := []string{}
394                         for _, line := range lines[2:] {
395                                 if u := urlParse(line); u == nil {
396                                         break
397                                 } else {
398                                         domains = append(domains, makeA(line, u.Host))
399                                 }
400                         }
401                         var commentsValue string
402                         if l := len(parseComments(entry.commentsRaw)); l > 0 {
403                                 commentsValue = strconv.Itoa(l)
404                         } else {
405                                 commentsValue = "&nbsp;"
406                         }
407                         table.WriteString(fmt.Sprintf(
408                                 "<tr><td>%d</td><td><tt>%s</tt></td>"+
409                                         "<td>%s</td>"+
410                                         "<td>%d</td><td>%s</td>"+
411                                         "<td>%s</td></tr>\n",
412                                 commentN, entry.commit.Author.When.Format(sgblog.WhenFmt),
413                                 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
414                                 len(lines)-2,
415                                 commentsValue,
416                                 strings.Join(domains, " "),
417                         ))
418                 }
419                 table.WriteString("</table>")
420
421                 var href string
422                 var links []string
423                 var refs bytes.Buffer
424                 if offset > 0 {
425                         if offsetPrev := offset - PageEntries; offsetPrev > 0 {
426                                 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
427                         } else {
428                                 href = cfg.URLPrefix + "/"
429                         }
430                         links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
431                         refs.WriteString(makeA(href, "&nbsp;[prev]"))
432                 }
433                 if !logEnded {
434                         href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
435                         links = append(links, `<link rel="next" href="`+href+`" title="older">`)
436                         refs.WriteString(makeA(href, "&nbsp;[next]"))
437                 }
438
439                 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
440                 out.Write([]byte(startHTML(
441                         fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
442                         links,
443                 )))
444                 if cfg.AboutURL != "" {
445                         out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
446                 }
447                 out.Write(refs.Bytes())
448                 out.Write(table.Bytes())
449                 out.Write(refs.Bytes())
450                 out.Write([]byte("\n"))
451         } else if pathInfo == "/"+AtomFeed {
452                 commit, err := repo.CommitObject(head.Hash())
453                 if err != nil {
454                         makeErr(err)
455                 }
456                 etagHash.Write([]byte("ATOM"))
457                 etagHash.Write(commit.Hash[:])
458                 checkETag(etagHash)
459                 feed := atom.Feed{
460                         Title:   cfg.Title,
461                         ID:      cfg.AtomId,
462                         Updated: atom.Time(commit.Author.When),
463                         Link: []atom.Link{{
464                                 Rel:  "self",
465                                 Href: atomURL,
466                         }},
467                         Author: &atom.Person{Name: cfg.AtomAuthor},
468                 }
469                 log, err := repo.Log(&git.LogOptions{From: head.Hash()})
470                 if err != nil {
471                         makeErr(err)
472                 }
473                 for i := 0; i < PageEntries; i++ {
474                         commit, err = log.Next()
475                         if err != nil {
476                                 break
477                         }
478
479                         feedIdRaw := new([16]byte)
480                         copy(feedIdRaw[:], commit.Hash[:])
481                         feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
482                         feedId := fmt.Sprintf(
483                                 "%x-%x-%x-%x-%x",
484                                 feedIdRaw[0:4],
485                                 feedIdRaw[4:6],
486                                 feedIdRaw[6:8],
487                                 feedIdRaw[8:10],
488                                 feedIdRaw[10:],
489                         )
490
491                         lines := msgSplit(commit.Message)
492                         feed.Entry = append(feed.Entry, &atom.Entry{
493                                 Title: lines[0],
494                                 ID:    "urn:uuid:" + feedId,
495                                 Link: []atom.Link{{
496                                         Rel:  "alternate",
497                                         Href: cfg.BaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
498                                 }},
499                                 Published: atom.Time(commit.Author.When),
500                                 Updated:   atom.Time(commit.Author.When),
501                                 Summary: &atom.Text{
502                                         Type: "text",
503                                         Body: lines[0],
504                                 },
505                                 Content: &atom.Text{
506                                         Type: "text",
507                                         Body: strings.Join(lines[2:], "\n"),
508                                 },
509                         })
510                 }
511                 data, err := xml.MarshalIndent(&feed, "", "  ")
512                 if err != nil {
513                         makeErr(err)
514                 }
515                 out.Write(data)
516                 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
517                 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
518                 if gzipWriter != nil {
519                         os.Stdout.WriteString("Content-Encoding: gzip\n")
520                         gzipWriter.Close()
521                 }
522                 os.Stdout.WriteString("\n")
523                 os.Stdout.Write(outBuf.Bytes())
524                 return
525         } else if sha1DigestRe.MatchString(pathInfo[1:]) {
526                 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
527                 if err != nil {
528                         makeErr(err)
529                 }
530                 for _, data := range etagHashForWeb {
531                         etagHash.Write(data)
532                 }
533                 etagHash.Write([]byte("ENTRY"))
534                 etagHash.Write(commit.Hash[:])
535                 notesRaw := getNote(notesTree, commit.Hash)
536                 etagHash.Write(notesRaw)
537                 commentsRaw := getNote(commentsTree, commit.Hash)
538                 etagHash.Write(commentsRaw)
539                 checkETag(etagHash)
540                 lines := msgSplit(commit.Message)
541                 title := lines[0]
542                 when := commit.Author.When.Format(sgblog.WhenFmt)
543                 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
544                 links := []string{}
545                 var parent string
546                 if len(commit.ParentHashes) > 0 {
547                         parent = commit.ParentHashes[0].String()
548                         links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
549                 }
550                 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
551                 if cfg.AboutURL != "" {
552                         out.Write([]byte(fmt.Sprintf("[%s]&nbsp;", makeA(cfg.AboutURL, "about"))))
553                 }
554                 if parent != "" {
555                         out.Write([]byte(fmt.Sprintf(
556                                 "[%s]&nbsp;",
557                                 makeA(cfg.URLPrefix+"/"+parent, "older"),
558                         )))
559                 }
560                 out.Write([]byte(fmt.Sprintf(
561                         "[<tt>%s</tt>]&nbsp;[<tt>%s</tt>]<hr/>\n<h2>%s</h2>\n<pre>\n",
562                         when, commit.Hash.String(), title,
563                 )))
564                 for _, line := range lines[2:] {
565                         out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
566                 }
567                 out.Write([]byte("</pre>\n<hr/>\n"))
568                 if len(notesRaw) > 0 {
569                         out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
570                 }
571                 if cfg.CommentsEmail != "" {
572                         out.Write([]byte("[" + makeA(
573                                 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
574                                 "write comment",
575                         ) + "]\n"))
576                 }
577                 out.Write([]byte("<dl>\n"))
578                 for i, comment := range parseComments(commentsRaw) {
579                         out.Write([]byte(fmt.Sprintf(
580                                 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
581                                         "</dt>\n<dd><pre>\n",
582                                 i, i, i,
583                         )))
584                         lines = strings.Split(comment, "\n")
585                         for _, line := range lines[:3] {
586                                 out.Write([]byte(line + "\n"))
587                         }
588                         for _, line := range lines[3:] {
589                                 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
590                         }
591                         out.Write([]byte("</pre></dd>\n"))
592                 }
593                 out.Write([]byte("</dl>\n"))
594         } else {
595                 makeErr(errors.New("unknown URL action"))
596         }
597         out.Write([]byte("</body></html>\n"))
598         if gzipWriter != nil {
599                 gzipWriter.Close()
600         }
601         os.Stdout.Write(outBuf.Bytes())
602 }