2 SGBlog -- Git-based CGI blogging engine
3 Copyright (C) 2020 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
18 // Git-based CGI blogging engine
41 "github.com/hjson/hjson-go"
42 "go.cypherpunks.ru/netstring/v2"
43 "go.stargrave.org/sgblog"
44 "golang.org/x/crypto/blake2b"
45 "golang.org/x/tools/blog/atom"
46 "gopkg.in/src-d/go-git.v4"
47 "gopkg.in/src-d/go-git.v4/plumbing"
48 "gopkg.in/src-d/go-git.v4/plumbing/object"
53 AtomFeed = "feed.atom"
58 sha1DigestRe = regexp.MustCompilePOSIX("([0-9a-f]{40,40})")
59 defaultLinks = []string{}
61 notesTree *object.Tree
62 commentsTree *object.Tree
64 renderableSchemes = map[string]struct{}{
71 DashLine = strings.Repeat("-", 72)
74 type TableEntry struct {
95 CommentsNotesRef string
101 func makeA(href, text string) string {
102 return `<a href="` + href + `">` + text + `</a>`
105 func etagString(etag hash.Hash) string {
106 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
109 func urlParse(what string) *url.URL {
110 if u, err := url.ParseRequestURI(what); err == nil {
111 if _, exists := renderableSchemes[u.Scheme]; exists {
118 func msgSplit(msg string) []string {
119 lines := strings.Split(msg, "\n")
120 lines = lines[:len(lines)-1]
122 lines = []string{lines[0], "", ""}
127 func lineURLize(urlPrefix, line string) string {
128 cols := strings.Split(html.EscapeString(line), " ")
129 for i, col := range cols {
130 if u := urlParse(col); u != nil {
131 cols[i] = makeA(col, col)
134 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(
135 urlPrefix+"/$1", "$1",
138 return strings.Join(cols, " ")
141 func getNote(tree *object.Tree, what plumbing.Hash) []byte {
145 var entry *object.TreeEntry
147 paths := make([]string, 3)
148 paths[0] = what.String()
149 paths[1] = paths[0][:2] + "/" + paths[0][2:]
150 paths[2] = paths[1][:4+1] + "/" + paths[1][4+1:]
151 for _, p := range paths {
152 entry, err = tree.FindEntry(p)
160 blob, err := repo.BlobObject(entry.Hash)
164 r, err := blob.Reader()
168 data, err := ioutil.ReadAll(r)
172 return bytes.TrimSuffix(data, []byte{'\n'})
175 func parseComments(data []byte) []string {
176 comments := []string{}
177 nsr := netstring.NewReader(bytes.NewReader(data))
179 if _, err := nsr.Next(); err != nil {
182 if comment, err := ioutil.ReadAll(nsr); err == nil {
183 comments = append(comments, string(comment))
189 func startHeader(etag hash.Hash, gziped bool) string {
191 "Content-Type: text/html; charset=UTF-8",
192 "ETag: " + etagString(etag),
195 lines = append(lines, "Content-Encoding: gzip")
197 lines = append(lines, "")
198 lines = append(lines, "")
199 return strings.Join(lines, "\n")
202 func startHTML(title string, additional []string) string {
203 return fmt.Sprintf(`<html>
205 <meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
206 <meta name="generator" content="SGBlog %s">
212 sgblog.Version, title,
213 strings.Join(append(defaultLinks, additional...), "\n "),
217 func makeErr(err error) {
218 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
223 func checkETag(etag hash.Hash) {
224 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
225 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
226 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
231 func initRepo(cfg *Cfg) (*plumbing.Hash, error) {
233 repo, err = git.PlainOpen(cfg.GitPath)
237 head, err := repo.Reference(plumbing.ReferenceName(cfg.Branch), false)
241 headHash := head.Hash()
242 if notes, err := repo.Notes(); err == nil {
243 var notesRef *plumbing.Reference
244 var commentsRef *plumbing.Reference
245 notes.ForEach(func(ref *plumbing.Reference) error {
246 switch string(ref.Name()) {
247 case "refs/notes/commits":
249 case cfg.CommentsNotesRef:
255 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
256 notesTree, _ = commentsCommit.Tree()
259 if commentsRef != nil {
260 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
261 commentsTree, _ = commentsCommit.Tree()
265 return &headHash, nil
269 cfgPath := os.Getenv("SGBLOG_CFG")
271 log.Fatalln("SGBLOG_CFG is not set")
273 cfgRaw, err := ioutil.ReadFile(cfgPath)
277 var cfgGeneral map[string]interface{}
278 if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
281 cfgRaw, err = json.Marshal(cfgGeneral)
286 if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
289 pathInfo, exists := os.LookupEnv("PATH_INFO")
293 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
298 etagHash, err := blake2b.New256(nil)
302 etagHash.Write([]byte("SGBLOG"))
303 etagHash.Write([]byte(sgblog.Version))
304 etagHash.Write([]byte(cfg.GitPath))
305 etagHash.Write([]byte(cfg.Branch))
306 etagHash.Write([]byte(cfg.Title))
307 etagHash.Write([]byte(cfg.URLPrefix))
308 etagHash.Write([]byte(cfg.AtomBaseURL))
309 etagHash.Write([]byte(cfg.AtomId))
310 etagHash.Write([]byte(cfg.AtomAuthor))
312 etagHashForWeb := [][]byte{}
314 defaultLinks = append(defaultLinks, `<link rel="stylesheet" type="text/css" href="`+cfg.CSS+`">`)
315 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CSS))
317 if cfg.Webmaster != "" {
318 defaultLinks = append(defaultLinks, `<link rev="made" href="mailto:`+cfg.Webmaster+`">`)
319 etagHashForWeb = append(etagHashForWeb, []byte(cfg.Webmaster))
321 if cfg.AboutURL != "" {
322 etagHashForWeb = append(etagHashForWeb, []byte(cfg.AboutURL))
324 for _, gitURL := range cfg.GitURLs {
325 defaultLinks = append(defaultLinks, `<link rel="vcs-git" href="`+gitURL+`" title="Git repository">`)
326 etagHashForWeb = append(etagHashForWeb, []byte(gitURL))
328 if cfg.CommentsNotesRef != "" {
329 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsNotesRef))
331 if cfg.CommentsEmail != "" {
332 etagHashForWeb = append(etagHashForWeb, []byte(cfg.CommentsEmail))
335 defaultLinks = append(defaultLinks, `<link rel="top" href="`+cfg.URLPrefix+`/" title="top">`)
336 atomURL := cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomFeed
337 defaultLinks = append(defaultLinks, `<link rel="alternate" title="Atom feed" href="`+atomURL+`" type="application/atom+xml">`)
339 headHash, err := initRepo(cfg)
344 if notes, err := repo.Notes(); err == nil {
345 var notesRef *plumbing.Reference
346 var commentsRef *plumbing.Reference
347 notes.ForEach(func(ref *plumbing.Reference) error {
348 switch string(ref.Name()) {
349 case "refs/notes/commits":
351 case cfg.CommentsNotesRef:
357 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
358 notesTree, _ = commentsCommit.Tree()
361 if commentsRef != nil {
362 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
363 commentsTree, _ = commentsCommit.Tree()
368 var outBuf bytes.Buffer
371 var gzipWriter *gzip.Writer
372 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
373 for _, encoding := range strings.Split(acceptEncoding, ", ") {
374 if encoding == "gzip" {
375 gzipWriter = gzip.NewWriter(&outBuf)
382 if offsetRaw, exists := queryValues["offset"]; exists {
383 offset, err = strconv.Atoi(offsetRaw[0])
388 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
393 for i := 0; i < offset; i++ {
394 if _, err = repoLog.Next(); err != nil {
400 entries := make([]TableEntry, 0, PageEntries)
402 for _, data := range etagHashForWeb {
405 etagHash.Write([]byte("INDEX"))
406 for i := 0; i < PageEntries; i++ {
407 commit, err := repoLog.Next()
412 etagHash.Write(commit.Hash[:])
413 commentsRaw := getNote(commentsTree, commit.Hash)
414 etagHash.Write(commentsRaw)
415 entries = append(entries, TableEntry{commit, commentsRaw})
419 var table bytes.Buffer
421 "<table border=1>\n" +
422 "<caption>Comments</caption>\n<tr>" +
426 `<th size="5%"><a title="Lines">L</a></th>` +
427 `<th size="5%"><a title="Comments">C</a></th>` +
428 "<th>Linked to</th></tr>\n")
429 for _, entry := range entries {
431 lines := msgSplit(entry.commit.Message)
432 domains := []string{}
433 for _, line := range lines[2:] {
434 if u := urlParse(line); u == nil {
437 domains = append(domains, makeA(line, u.Host))
440 var commentsValue string
441 if l := len(parseComments(entry.commentsRaw)); l > 0 {
442 commentsValue = strconv.Itoa(l)
444 commentsValue = " "
446 table.WriteString(fmt.Sprintf(
447 "<tr><td>%d</td><td><tt>%s</tt></td>"+
449 "<td>%d</td><td>%s</td>"+
450 "<td>%s</td></tr>\n",
451 commitN, entry.commit.Author.When.Format(sgblog.WhenFmt),
452 makeA(cfg.URLPrefix+"/"+entry.commit.Hash.String(), lines[0]),
455 strings.Join(domains, " "),
458 table.WriteString("</table>")
462 var refs bytes.Buffer
464 if offsetPrev := offset - PageEntries; offsetPrev > 0 {
465 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offsetPrev)
467 href = cfg.URLPrefix + "/"
469 links = append(links, `<link rel="prev" href="`+href+`" title="newer">`)
470 refs.WriteString("\n" + makeA(href, "[prev]"))
473 href = cfg.URLPrefix + "/?offset=" + strconv.Itoa(offset+PageEntries)
474 links = append(links, `<link rel="next" href="`+href+`" title="older">`)
475 refs.WriteString("\n" + makeA(href, "[next]"))
478 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
479 out.Write([]byte(startHTML(
480 fmt.Sprintf("%s (%d-%d)", cfg.Title, offset, offset+PageEntries),
483 if cfg.AboutURL != "" {
484 out.Write([]byte(fmt.Sprintf("[%s]", makeA(cfg.AboutURL, "about"))))
486 out.Write(refs.Bytes())
487 out.Write(table.Bytes())
488 out.Write(refs.Bytes())
489 out.Write([]byte("\n"))
490 } else if pathInfo == "/"+AtomFeed {
491 commit, err := repo.CommitObject(*headHash)
495 etagHash.Write([]byte("ATOM"))
496 etagHash.Write(commit.Hash[:])
501 Updated: atom.Time(commit.Author.When),
506 Author: &atom.Person{Name: cfg.AtomAuthor},
508 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
512 for i := 0; i < PageEntries; i++ {
513 commit, err = repoLog.Next()
518 feedIdRaw := new([16]byte)
519 copy(feedIdRaw[:], commit.Hash[:])
520 feedIdRaw[6] = (feedIdRaw[6] & 0x0F) | uint8(4<<4) // version 4
521 feedId := fmt.Sprintf(
530 lines := msgSplit(commit.Message)
531 feed.Entry = append(feed.Entry, &atom.Entry{
533 ID: "urn:uuid:" + feedId,
536 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
538 Published: atom.Time(commit.Author.When),
539 Updated: atom.Time(commit.Author.When),
546 Body: strings.Join(lines[2:], "\n"),
550 data, err := xml.MarshalIndent(&feed, "", " ")
555 os.Stdout.WriteString("Content-Type: text/xml; charset=UTF-8\n")
556 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
557 if gzipWriter != nil {
558 os.Stdout.WriteString("Content-Encoding: gzip\n")
561 os.Stdout.WriteString("\n")
562 os.Stdout.Write(outBuf.Bytes())
564 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
565 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1:]))
569 for _, data := range etagHashForWeb {
572 etagHash.Write([]byte("ENTRY"))
573 etagHash.Write(commit.Hash[:])
574 notesRaw := getNote(notesTree, commit.Hash)
575 etagHash.Write(notesRaw)
576 commentsRaw := getNote(commentsTree, commit.Hash)
577 etagHash.Write(commentsRaw)
579 lines := msgSplit(commit.Message)
581 when := commit.Author.When.Format(sgblog.WhenFmt)
582 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
585 if len(commit.ParentHashes) > 0 {
586 parent = commit.ParentHashes[0].String()
587 links = append(links, `<link rel="prev" href="`+cfg.URLPrefix+"/"+parent+`" title="older">`)
589 out.Write([]byte(startHTML(fmt.Sprintf("%s (%s)", title, when), links)))
590 if cfg.AboutURL != "" {
591 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.AboutURL, "about"))))
593 out.Write([]byte(fmt.Sprintf("[%s]\n", makeA(cfg.URLPrefix+"/", "index"))))
595 out.Write([]byte(fmt.Sprintf(
597 makeA(cfg.URLPrefix+"/"+parent, "older"),
600 out.Write([]byte(fmt.Sprintf(
601 "[<tt><a title=\"When\">%s</a></tt>]\n"+
602 "[<tt><a title=\"Hash\">%s</a></tt>]\n"+
603 "<hr/>\n<h2>%s</h2>\n<pre>\n",
604 when, commit.Hash.String(), title,
606 for _, line := range lines[2:] {
607 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
609 out.Write([]byte("</pre>\n<hr/>\n"))
610 if len(notesRaw) > 0 {
611 out.Write([]byte("Note:<pre>\n" + string(notesRaw) + "\n</pre>\n<hr/>\n"))
613 if cfg.CommentsEmail != "" {
614 out.Write([]byte("[" + makeA(
615 "mailto:"+cfg.CommentsEmail+"?subject="+commit.Hash.String(),
619 out.Write([]byte("<dl>\n"))
620 for i, comment := range parseComments(commentsRaw) {
621 out.Write([]byte(fmt.Sprintf(
622 "<dt><a name=\"comment%d\"><a href=\"#comment%d\">comment %d</a>:"+
623 "</dt>\n<dd><pre>\n",
626 lines = strings.Split(comment, "\n")
627 for _, line := range lines[:3] {
628 out.Write([]byte(line + "\n"))
630 for _, line := range lines[3:] {
631 out.Write([]byte(lineURLize(cfg.URLPrefix, line) + "\n"))
633 out.Write([]byte("</pre></dd>\n"))
635 out.Write([]byte("</dl>\n"))
637 makeErr(errors.New("unknown URL action"))
639 out.Write([]byte("</body></html>\n"))
640 if gzipWriter != nil {
643 os.Stdout.Write(outBuf.Bytes())
647 cfgPath := os.Args[2]
648 cfgRaw, err := ioutil.ReadFile(cfgPath)
652 var cfgGeneral map[string]interface{}
653 if err = hjson.Unmarshal(cfgRaw, &cfgGeneral); err != nil {
656 cfgRaw, err = json.Marshal(cfgGeneral)
661 if err = json.Unmarshal(cfgRaw, &cfg); err != nil {
664 if cfg.GopherDomain == "" {
665 log.Fatalln("GopherDomain is not configured")
668 headHash, err := initRepo(cfg)
673 scanner := bufio.NewScanner(io.LimitReader(os.Stdin, 1<<8))
675 log.Fatalln(errors.New("no CRLF found"))
677 selector := scanner.Text()
679 selector = "offset/0"
681 if strings.HasPrefix(selector, "offset/") {
682 offset, err := strconv.Atoi(selector[len("offset/"):])
686 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
691 for i := 0; i < offset; i++ {
692 if _, err = repoLog.Next(); err != nil {
699 var menu bytes.Buffer
700 for i := 0; i < PageEntries; i++ {
701 commit, err := repoLog.Next()
707 lines := msgSplit(commit.Message)
709 var commentsValue string
710 if l := len(parseComments(getNote(commentsTree, commit.Hash))); l > 0 {
711 commentsValue = fmt.Sprintf(" (%dC)", l)
713 menu.WriteString(fmt.Sprintf(
714 "0[%s] %s (%dL)%s\t/%s\t%s\t%d%s",
715 commit.Author.When.Format(sgblog.WhenFmt),
719 commit.Hash.String(),
720 cfg.GopherDomain, 70, CRLF,
724 var links bytes.Buffer
726 offsetPrev := offset - PageEntries
730 links.WriteString(fmt.Sprintf(
731 "1Prev\toffset/%d\t%s\t%d%s",
733 cfg.GopherDomain, 70, CRLF,
737 links.WriteString(fmt.Sprintf(
738 "1Next\toffset/%d\t%s\t%d%s",
740 cfg.GopherDomain, 70, CRLF,
745 "i%s (%d-%d)\t\tnull.host\t1%s",
751 if cfg.AboutURL != "" {
752 fmt.Printf("iAbout: %s\t\tnull.host\t1%s", cfg.AboutURL, CRLF)
754 fmt.Print(links.String())
755 fmt.Print(menu.String())
756 fmt.Print("." + CRLF)
757 } else if sha1DigestRe.MatchString(selector) {
758 commit, err := repo.CommitObject(plumbing.NewHash(selector[1:]))
763 "What: %s\nWhen: %s\n%s\n%s",
764 commit.Hash.String(),
765 commit.Author.When.Format(sgblog.WhenFmt),
769 notesRaw := getNote(notesTree, commit.Hash)
770 if len(notesRaw) > 0 {
771 fmt.Printf("%s\nNote:\n%s\n", DashLine, string(notesRaw))
773 for i, comment := range parseComments(getNote(commentsTree, commit.Hash)) {
774 fmt.Printf("%s\ncomment %d:\n%s\n", DashLine, i, comment)
777 log.Fatalln(errors.New("unknown selector"))
782 if len(os.Args) == 3 && os.Args[1] == "-gopher" {