2 SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
3 Copyright (C) 2020-2021 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
39 "github.com/go-git/go-git/v5"
40 "github.com/go-git/go-git/v5/plumbing"
41 "github.com/go-git/go-git/v5/plumbing/object"
42 "go.stargrave.org/sgblog"
43 "go.stargrave.org/sgblog/cmd/sgblog/atom"
44 "golang.org/x/crypto/blake2b"
48 AtomPostsFeed = "feed.atom"
49 AtomCommentsFeed = "comments.atom"
53 renderableSchemes = map[string]struct{}{
63 //go:embed http-index.tmpl
64 TmplHTMLIndexRaw string
65 TmplHTMLIndex = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
67 //go:embed http-entry.tmpl
68 TmplHTMLEntryRaw string
69 TmplHTMLEntry = template.Must(template.New("http-entry").Funcs(
70 template.FuncMap{"lineURLize": lineURLizeInTemplate},
71 ).Parse(TmplHTMLEntryRaw))
74 type TableEntry struct {
86 type CommentEntry struct {
91 func makeA(href, text string) string {
92 return `<a href="` + href + `">` + text + `</a>`
95 func etagString(etag hash.Hash) string {
96 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
99 func urlParse(what string) *url.URL {
100 if u, err := url.ParseRequestURI(what); err == nil {
101 if _, exists := renderableSchemes[u.Scheme]; exists {
108 func lineURLize(urlPrefix, line string) string {
109 cols := strings.Split(html.EscapeString(line), " ")
110 for i, col := range cols {
111 if u := urlParse(col); u != nil {
112 cols[i] = makeA(col, col)
115 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
117 return strings.Join(cols, " ")
120 func lineURLizeInTemplate(urlPrefix, line interface{}) string {
121 return lineURLize(urlPrefix.(string), line.(string))
124 func startHeader(etag hash.Hash, gziped bool) string {
126 "Content-Type: text/html; charset=UTF-8",
127 "ETag: " + etagString(etag),
130 lines = append(lines, "Content-Encoding: gzip")
132 lines = append(lines, "")
133 lines = append(lines, "")
134 return strings.Join(lines, "\n")
137 func makeErr(err error) {
138 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
143 func checkETag(etag hash.Hash) {
144 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
145 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
146 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
151 func bytes2uuid(b []byte) string {
154 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
155 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
158 type CommitIterNext interface {
159 Next() (*object.Commit, error)
163 cfgPath := os.Getenv("SGBLOG_CFG")
165 log.Fatalln("SGBLOG_CFG is not set")
167 cfg, err := readCfg(cfgPath)
172 pathInfo, exists := os.LookupEnv("PATH_INFO")
176 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
181 etagHash, err := blake2b.New256(nil)
185 for _, s := range []string{
196 if _, err = etagHash.Write([]byte(s)); err != nil {
200 etagHashForWeb := []string{
204 cfg.CommentsNotesRef,
207 for _, gitURL := range cfg.GitURLs {
208 etagHashForWeb = append(etagHashForWeb, gitURL)
211 headHash, err := initRepo(cfg)
216 if notes, err := repo.Notes(); err == nil {
217 var notesRef *plumbing.Reference
218 var commentsRef *plumbing.Reference
219 notes.ForEach(func(ref *plumbing.Reference) error {
220 switch string(ref.Name()) {
221 case "refs/notes/commits":
223 case cfg.CommentsNotesRef:
229 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
230 notesTree, _ = commentsCommit.Tree()
233 if commentsRef != nil {
234 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
235 commentsTree, _ = commentsCommit.Tree()
240 var outBuf bytes.Buffer
243 var gzipWriter *gzip.Writer
244 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
245 for _, encoding := range strings.Split(acceptEncoding, ", ") {
246 if encoding == "gzip" {
247 gzipWriter = gzip.NewWriter(&outBuf)
254 if offsetRaw, exists := queryValues["offset"]; exists {
255 offset, err = strconv.Atoi(offsetRaw[0])
260 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
264 topicsCache, err := getTopicsCache(cfg, repoLog)
268 repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
274 var commits CommitIterNext
276 if t, exists := queryValues["topic"]; exists {
278 hashes := topicsCache[topic]
280 makeErr(errors.New("no posts with that topic"))
282 if len(hashes) > offset {
283 hashes = hashes[offset:]
286 commits = &HashesIter{hashes}
288 for i := 0; i < offset; i++ {
289 if _, err = repoLog.Next(); err != nil {
297 entries := make([]TableEntry, 0, PageEntries)
299 for _, data := range etagHashForWeb {
300 etagHash.Write([]byte(data))
302 etagHash.Write([]byte("INDEX"))
303 etagHash.Write([]byte(topic))
304 for i := 0; i < PageEntries; i++ {
305 commit, err := commits.Next()
310 etagHash.Write(commit.Hash[:])
311 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
312 etagHash.Write(commentsRaw)
313 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
314 etagHash.Write(topicsRaw)
315 entries = append(entries, TableEntry{
317 CommentsRaw: commentsRaw,
318 TopicsRaw: topicsRaw,
323 for i, entry := range entries {
326 lines := msgSplit(entry.Commit.Message)
327 entry.Title = lines[0]
328 entry.LinesNum = len(lines) - 2
329 for _, line := range lines[2:] {
334 entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
336 entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
337 entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
340 offsetPrev := offset - PageEntries
344 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
345 err = TmplHTMLIndex.Execute(out, struct {
353 AtomCommentsFeed string
360 Version: sgblog.Version,
363 TopicsEnabled: topicsTree != nil,
364 Topics: topicsCache.Topics(),
365 CommentsEnabled: commentsTree != nil,
366 AtomPostsFeed: AtomPostsFeed,
367 AtomCommentsFeed: AtomCommentsFeed,
369 OffsetPrev: offsetPrev,
370 OffsetNext: offset + PageEntries,
377 } else if pathInfo == "/"+AtomPostsFeed {
378 commit, err := repo.CommitObject(*headHash)
384 if t, exists := queryValues["topic"]; exists {
388 etagHash.Write([]byte("ATOM POSTS"))
389 etagHash.Write([]byte(topic))
390 etagHash.Write(commit.Hash[:])
396 title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
398 idHasher, err := blake2b.New256(nil)
402 idHasher.Write([]byte("ATOM POSTS"))
403 idHasher.Write([]byte(cfg.AtomId))
404 idHasher.Write([]byte(topic))
407 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
408 Updated: atom.Time(commit.Author.When),
411 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
413 Author: &atom.Person{Name: cfg.AtomAuthor},
416 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
420 var commits CommitIterNext
424 topicsCache, err := getTopicsCache(cfg, repoLog)
428 hashes := topicsCache[topic]
430 makeErr(errors.New("no posts with that topic"))
432 commits = &HashesIter{hashes}
435 for i := 0; i < PageEntries; i++ {
436 commit, err = commits.Next()
440 lines := msgSplit(commit.Message)
441 var categories []atom.Category
442 for _, topic := range sgblog.ParseTopics(sgblog.GetNote(repo, topicsTree, commit.Hash)) {
443 categories = append(categories, atom.Category{Term: topic})
445 htmlized := make([]string, 0, len(lines))
446 htmlized = append(htmlized, "<pre>")
447 for _, l := range lines[2:] {
448 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
450 htmlized = append(htmlized, "</pre>")
451 feed.Entry = append(feed.Entry, &atom.Entry{
453 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
456 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
458 Published: atom.Time(commit.Author.When),
459 Updated: atom.Time(commit.Author.When),
460 Summary: &atom.Text{Type: "text", Body: lines[0]},
463 Body: strings.Join(htmlized, "\n"),
465 Category: categories,
468 data, err := xml.MarshalIndent(&feed, "", " ")
474 } else if pathInfo == "/"+AtomCommentsFeed {
475 commit, err := repo.CommitObject(commentsRef.Hash())
479 etagHash.Write([]byte("ATOM COMMENTS"))
480 etagHash.Write(commit.Hash[:])
482 idHasher, err := blake2b.New256(nil)
486 idHasher.Write([]byte("ATOM COMMENTS"))
487 idHasher.Write([]byte(cfg.AtomId))
489 Title: cfg.Title + " comments",
490 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
491 Updated: atom.Time(commit.Author.When),
494 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
496 Author: &atom.Person{Name: cfg.AtomAuthor},
498 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
502 for i := 0; i < PageEntries; i++ {
503 commit, err = repoLog.Next()
507 fileStats, err := commit.Stats()
511 t, err := commit.Tree()
515 commentedHash := plumbing.NewHash(strings.ReplaceAll(
516 fileStats[0].Name, "/", "",
518 commit, err = repo.CommitObject(commentedHash)
522 comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
523 if len(comments) == 0 {
526 commentN := strconv.Itoa(len(comments) - 1)
527 lines := strings.Split(comments[len(comments)-1], "\n")
528 from := strings.TrimPrefix(lines[0], "From: ")
529 date := strings.TrimPrefix(lines[1], "Date: ")
530 htmlized := make([]string, 0, len(lines))
531 htmlized = append(htmlized, "<pre>")
532 for _, l := range lines[2:] {
533 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
535 htmlized = append(htmlized, "</pre>")
537 idHasher.Write([]byte("COMMENT"))
538 idHasher.Write(commit.Hash[:])
539 idHasher.Write([]byte(commentN))
540 feed.Entry = append(feed.Entry, &atom.Entry{
542 "Comment %s for \"%s\" by %s",
543 commentN, msgSplit(commit.Message)[0], from,
545 Author: &atom.Person{Name: from},
546 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
549 Href: strings.Join([]string{
550 cfg.AtomBaseURL, cfg.URLPrefix, "/",
551 commit.Hash.String(), "#comment", commentN,
554 Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
555 Updated: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
558 Body: strings.Join(htmlized, "\n"),
562 data, err := xml.MarshalIndent(&feed, "", " ")
568 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
569 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
573 for _, data := range etagHashForWeb {
574 etagHash.Write([]byte(data))
576 etagHash.Write([]byte("ENTRY"))
577 etagHash.Write(commit.Hash[:])
578 atomCommentsURL := strings.Join([]string{
579 cfg.AtomBaseURL, cfg.URLPrefix, "/",
580 commit.Hash.String(), "/", AtomCommentsFeed,
582 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
583 etagHash.Write(commentsRaw)
584 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
585 etagHash.Write(topicsRaw)
586 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
587 etagHash.Write([]byte("ATOM COMMENTS"))
589 type Comment struct {
595 commentsRaw := sgblog.ParseComments(commentsRaw)
597 if len(commentsRaw) > PageEntries {
598 toSkip = len(commentsRaw) - PageEntries
600 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
601 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
602 lines := strings.Split(commentsRaw[i], "\n")
603 from := strings.TrimPrefix(lines[0], "From: ")
604 date := strings.TrimPrefix(lines[1], "Date: ")
605 comments = append(comments, Comment{
608 date: strings.Replace(date, " ", "T", 1),
612 idHasher, err := blake2b.New256(nil)
616 idHasher.Write([]byte("ATOM COMMENTS"))
617 idHasher.Write(commit.Hash[:])
619 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
620 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
621 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
622 Author: &atom.Person{Name: cfg.AtomAuthor},
624 if len(comments) > 0 {
625 feed.Updated = atom.TimeStr(comments[0].date)
627 feed.Updated = atom.Time(commit.Author.When)
629 for _, comment := range comments {
631 idHasher.Write([]byte("COMMENT"))
632 idHasher.Write(commit.Hash[:])
633 idHasher.Write([]byte(comment.n))
634 htmlized := make([]string, 0, len(comment.body))
635 htmlized = append(htmlized, "<pre>")
636 for _, l := range comment.body {
639 lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
642 htmlized = append(htmlized, "</pre>")
643 feed.Entry = append(feed.Entry, &atom.Entry{
644 Title: fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
645 Author: &atom.Person{Name: comment.from},
646 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
649 Href: strings.Join([]string{
652 commit.Hash.String(),
653 "#comment", comment.n,
656 Published: atom.TimeStr(
657 strings.Replace(comment.date, " ", "T", -1),
659 Updated: atom.TimeStr(
660 strings.Replace(comment.date, " ", "T", -1),
664 Body: strings.Join(htmlized, "\n"),
668 data, err := xml.MarshalIndent(&feed, "", " ")
675 notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
676 etagHash.Write(notesRaw)
679 lines := msgSplit(commit.Message)
681 when := commit.Author.When.Format(sgblog.WhenFmt)
683 if len(commit.ParentHashes) > 0 {
684 parent = commit.ParentHashes[0].String()
686 commentsParsed := sgblog.ParseComments(commentsRaw)
687 comments := make([]CommentEntry, 0, len(commentsParsed))
688 for _, comment := range commentsParsed {
689 lines := strings.Split(comment, "\n")
690 comments = append(comments, CommentEntry{lines[:3], lines[3:]})
692 var notesLines []string
693 if len(notesRaw) > 0 {
694 notesLines = strings.Split(string(notesRaw), "\n")
697 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
698 err = TmplHTMLEntry.Execute(out, struct {
704 AtomCommentsURL string
706 Commit *object.Commit
709 Comments []CommentEntry
712 Version: sgblog.Version,
715 TitleEscaped: url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
717 AtomCommentsURL: atomCommentsURL,
721 NoteLines: notesLines,
723 Topics: sgblog.ParseTopics(topicsRaw),
729 makeErr(errors.New("unknown URL action"))
731 out.Write([]byte("</body></html>\n"))
732 if gzipWriter != nil {
735 os.Stdout.Write(outBuf.Bytes())
739 os.Stdout.WriteString("Content-Type: application/atom+xml; charset=UTF-8\n")
740 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
741 if gzipWriter != nil {
742 os.Stdout.WriteString("Content-Encoding: gzip\n")
745 os.Stdout.WriteString("\n")
746 os.Stdout.Write(outBuf.Bytes())