2 SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
3 Copyright (C) 2020-2022 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
41 "github.com/go-git/go-git/v5"
42 "github.com/go-git/go-git/v5/plumbing"
43 "github.com/go-git/go-git/v5/plumbing/object"
44 "go.stargrave.org/sgblog"
45 "go.stargrave.org/sgblog/cmd/sgblog/atom"
46 "golang.org/x/crypto/blake2b"
50 AtomPostsFeed = "feed.atom"
51 AtomCommentsFeed = "comments.atom"
55 renderableSchemes = map[string]struct{}{
68 //go:embed http-index.tmpl
69 TmplHTMLIndexRaw string
70 TmplHTMLIndex = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
72 //go:embed http-entry.tmpl
73 TmplHTMLEntryRaw string
74 TmplHTMLEntry = template.Must(template.New("http-entry").Funcs(
75 template.FuncMap{"lineURLize": lineURLizeInTemplate},
76 ).Parse(TmplHTMLEntryRaw))
79 type TableEntry struct {
91 type CommentEntry struct {
96 func makeA(href, text string) string {
97 return `<a href="` + href + `">` + text + `</a>`
100 func etagString(etag hash.Hash) string {
101 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
104 func urlParse(what string) *url.URL {
105 if u, err := url.ParseRequestURI(what); err == nil {
106 if _, exists := renderableSchemes[u.Scheme]; exists {
113 func lineURLize(urlPrefix, line string) string {
114 cols := strings.Split(html.EscapeString(line), " ")
115 for i, col := range cols {
116 if u := urlParse(col); u != nil {
117 cols[i] = makeA(col, col)
120 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
122 return strings.Join(cols, " ")
125 func lineURLizeInTemplate(urlPrefix, line interface{}) string {
126 return lineURLize(urlPrefix.(string), line.(string))
129 func startHeader(etag hash.Hash, gziped bool) string {
131 "Content-Type: text/html; charset=utf-8",
132 "ETag: " + etagString(etag),
135 lines = append(lines, "Content-Encoding: gzip")
137 lines = append(lines, "")
138 lines = append(lines, "")
139 return strings.Join(lines, "\n")
142 func makeErr(err error, status int) {
143 fmt.Println("Status:", status)
144 fmt.Print("Content-Type: text/plain; charset=utf-8\n\n")
149 func checkETag(etag hash.Hash) {
150 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
151 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
152 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
157 func bytes2uuid(b []byte) string {
160 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
161 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
164 type CommitIterNext interface {
165 Next() (*object.Commit, error)
169 cfgPath := os.Getenv("SGBLOG_CFG")
171 log.Fatalln("SGBLOG_CFG is not set")
173 cfg, err := readCfg(cfgPath)
178 pathInfo := os.Getenv("PATH_INFO")
179 if len(pathInfo) == 0 {
182 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
184 makeErr(err, http.StatusBadRequest)
187 etagHash, err := blake2b.New256(nil)
191 for _, s := range []string{
202 if _, err = etagHash.Write([]byte(s)); err != nil {
206 etagHashForWeb := []string{
210 cfg.CommentsNotesRef,
213 for _, gitURL := range cfg.GitURLs {
214 etagHashForWeb = append(etagHashForWeb, gitURL)
217 headHash, err := initRepo(cfg)
219 makeErr(err, http.StatusInternalServerError)
222 if notes, err := repo.Notes(); err == nil {
223 var notesRef *plumbing.Reference
224 var commentsRef *plumbing.Reference
225 notes.ForEach(func(ref *plumbing.Reference) error {
226 switch string(ref.Name()) {
227 case "refs/notes/commits":
229 case cfg.CommentsNotesRef:
235 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
236 notesTree, _ = commentsCommit.Tree()
239 if commentsRef != nil {
240 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
241 commentsTree, _ = commentsCommit.Tree()
246 var outBuf bytes.Buffer
249 var gzipWriter *gzip.Writer
250 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
251 for _, encoding := range strings.Split(acceptEncoding, ", ") {
252 if encoding == "gzip" {
253 gzipWriter = gzip.NewWriter(&outBuf)
260 if offsetRaw, exists := queryValues["offset"]; exists {
261 offset, err = strconv.Atoi(offsetRaw[0])
263 makeErr(err, http.StatusBadRequest)
266 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
268 makeErr(err, http.StatusInternalServerError)
270 topicsCache, err := getTopicsCache(cfg, repoLog)
272 makeErr(err, http.StatusInternalServerError)
274 repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
276 makeErr(err, http.StatusInternalServerError)
280 var commits CommitIterNext
282 if t, exists := queryValues["topic"]; exists {
284 hashes := topicsCache[topic]
286 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
288 if len(hashes) > offset {
289 hashes = hashes[offset:]
292 commits = &HashesIter{hashes}
294 for i := 0; i < offset; i++ {
295 if _, err = repoLog.Next(); err != nil {
303 entries := make([]TableEntry, 0, PageEntries)
305 for _, data := range etagHashForWeb {
306 etagHash.Write([]byte(data))
308 etagHash.Write([]byte("INDEX"))
309 etagHash.Write([]byte(topic))
310 for i := 0; i < PageEntries; i++ {
311 commit, err := commits.Next()
316 etagHash.Write(commit.Hash[:])
317 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
318 etagHash.Write(commentsRaw)
319 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
320 etagHash.Write(topicsRaw)
321 entries = append(entries, TableEntry{
323 CommentsRaw: commentsRaw,
324 TopicsRaw: topicsRaw,
329 for i, entry := range entries {
332 lines := msgSplit(entry.Commit.Message)
333 entry.Title = lines[0]
334 entry.LinesNum = len(lines) - 2
335 for _, line := range lines[2:] {
340 entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
342 entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
343 entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
346 offsetPrev := offset - PageEntries
350 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
351 err = TmplHTMLIndex.Execute(out, struct {
359 AtomCommentsFeed string
366 Version: sgblog.Version,
369 TopicsEnabled: topicsTree != nil,
370 Topics: topicsCache.Topics(),
371 CommentsEnabled: commentsTree != nil,
372 AtomPostsFeed: AtomPostsFeed,
373 AtomCommentsFeed: AtomCommentsFeed,
375 OffsetPrev: offsetPrev,
376 OffsetNext: offset + PageEntries,
381 makeErr(err, http.StatusInternalServerError)
383 } else if pathInfo == "/twtxt.txt" {
384 commit, err := repo.CommitObject(*headHash)
386 makeErr(err, http.StatusInternalServerError)
388 etagHash.Write([]byte("TWTXT POSTS"))
389 etagHash.Write(commit.Hash[:])
391 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
393 makeErr(err, http.StatusInternalServerError)
395 for i := 0; i < PageEntries; i++ {
396 commit, err = repoLog.Next()
402 commit.Author.When.Format(time.RFC3339),
403 msgSplit(commit.Message)[0],
406 os.Stdout.WriteString("Content-Type: text/plain; charset=utf-8\n")
407 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
408 if gzipWriter != nil {
409 os.Stdout.WriteString("Content-Encoding: gzip\n")
412 os.Stdout.WriteString("\n")
413 os.Stdout.Write(outBuf.Bytes())
415 } else if pathInfo == "/"+AtomPostsFeed {
416 commit, err := repo.CommitObject(*headHash)
418 makeErr(err, http.StatusInternalServerError)
422 if t, exists := queryValues["topic"]; exists {
426 etagHash.Write([]byte("ATOM POSTS"))
427 etagHash.Write([]byte(topic))
428 etagHash.Write(commit.Hash[:])
434 title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
436 idHasher, err := blake2b.New256(nil)
440 idHasher.Write([]byte("ATOM POSTS"))
441 idHasher.Write([]byte(cfg.AtomId))
442 idHasher.Write([]byte(topic))
445 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
446 Updated: atom.Time(commit.Author.When),
449 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
451 Author: &atom.Person{Name: cfg.AtomAuthor},
454 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
456 makeErr(err, http.StatusInternalServerError)
458 var commits CommitIterNext
462 topicsCache, err := getTopicsCache(cfg, repoLog)
464 makeErr(err, http.StatusInternalServerError)
466 hashes := topicsCache[topic]
468 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
470 commits = &HashesIter{hashes}
473 for i := 0; i < PageEntries; i++ {
474 commit, err = commits.Next()
478 lines := msgSplit(commit.Message)
479 var categories []atom.Category
480 for _, topic := range sgblog.ParseTopics(sgblog.GetNote(
481 repo, topicsTree, commit.Hash,
483 categories = append(categories, atom.Category{Term: topic})
485 htmlized := make([]string, 0, len(lines))
486 htmlized = append(htmlized, "<pre>")
487 for _, l := range lines[2:] {
488 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
490 htmlized = append(htmlized, "</pre>")
491 feed.Entry = append(feed.Entry, &atom.Entry{
493 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
496 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
498 Published: atom.Time(commit.Author.When),
499 Updated: atom.Time(commit.Author.When),
500 Summary: &atom.Text{Type: "text", Body: lines[0]},
503 Body: strings.Join(htmlized, "\n"),
505 Category: categories,
508 data, err := xml.MarshalIndent(&feed, "", " ")
510 makeErr(err, http.StatusInternalServerError)
512 out.Write([]byte(xml.Header))
515 } else if pathInfo == "/"+AtomCommentsFeed {
516 commit, err := repo.CommitObject(commentsRef.Hash())
518 makeErr(err, http.StatusInternalServerError)
520 etagHash.Write([]byte("ATOM COMMENTS"))
521 etagHash.Write(commit.Hash[:])
523 idHasher, err := blake2b.New256(nil)
527 idHasher.Write([]byte("ATOM COMMENTS"))
528 idHasher.Write([]byte(cfg.AtomId))
530 Title: cfg.Title + " comments",
531 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
532 Updated: atom.Time(commit.Author.When),
535 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
537 Author: &atom.Person{Name: cfg.AtomAuthor},
539 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
541 makeErr(err, http.StatusInternalServerError)
543 for i := 0; i < PageEntries; i++ {
544 commit, err = repoLog.Next()
548 fileStats, err := commit.Stats()
550 makeErr(err, http.StatusInternalServerError)
552 t, err := commit.Tree()
554 makeErr(err, http.StatusInternalServerError)
556 commentedHash := plumbing.NewHash(strings.ReplaceAll(
557 fileStats[0].Name, "/", "",
559 commit, err = repo.CommitObject(commentedHash)
563 comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
564 if len(comments) == 0 {
567 commentN := strconv.Itoa(len(comments) - 1)
568 lines := strings.Split(comments[len(comments)-1], "\n")
569 from := strings.TrimPrefix(lines[0], "From: ")
570 date := strings.TrimPrefix(lines[1], "Date: ")
571 htmlized := make([]string, 0, len(lines))
572 htmlized = append(htmlized, "<pre>")
573 for _, l := range lines[2:] {
574 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
576 htmlized = append(htmlized, "</pre>")
578 idHasher.Write([]byte("COMMENT"))
579 idHasher.Write(commit.Hash[:])
580 idHasher.Write([]byte(commentN))
581 feed.Entry = append(feed.Entry, &atom.Entry{
583 "Comment %s for \"%s\" by %s",
584 commentN, msgSplit(commit.Message)[0], from,
586 Author: &atom.Person{Name: from},
587 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
590 Href: strings.Join([]string{
591 cfg.AtomBaseURL, cfg.URLPrefix, "/",
592 commit.Hash.String(), "#comment", commentN,
595 Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
596 Updated: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
599 Body: strings.Join(htmlized, "\n"),
603 data, err := xml.MarshalIndent(&feed, "", " ")
605 makeErr(err, http.StatusInternalServerError)
607 out.Write([]byte(xml.Header))
610 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
611 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
613 makeErr(err, http.StatusBadRequest)
615 for _, data := range etagHashForWeb {
616 etagHash.Write([]byte(data))
618 etagHash.Write([]byte("ENTRY"))
619 etagHash.Write(commit.Hash[:])
620 atomCommentsURL := strings.Join([]string{
621 cfg.AtomBaseURL, cfg.URLPrefix, "/",
622 commit.Hash.String(), "/", AtomCommentsFeed,
624 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
625 etagHash.Write(commentsRaw)
626 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
627 etagHash.Write(topicsRaw)
628 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
629 etagHash.Write([]byte("ATOM COMMENTS"))
631 type Comment struct {
637 commentsRaw := sgblog.ParseComments(commentsRaw)
639 if len(commentsRaw) > PageEntries {
640 toSkip = len(commentsRaw) - PageEntries
642 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
643 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
644 lines := strings.Split(commentsRaw[i], "\n")
645 from := strings.TrimPrefix(lines[0], "From: ")
646 date := strings.TrimPrefix(lines[1], "Date: ")
647 comments = append(comments, Comment{
650 date: strings.Replace(date, " ", "T", 1),
654 idHasher, err := blake2b.New256(nil)
658 idHasher.Write([]byte("ATOM COMMENTS"))
659 idHasher.Write(commit.Hash[:])
661 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
662 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
663 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
664 Author: &atom.Person{Name: cfg.AtomAuthor},
666 if len(comments) > 0 {
667 feed.Updated = atom.TimeStr(comments[0].date)
669 feed.Updated = atom.Time(commit.Author.When)
671 for _, comment := range comments {
673 idHasher.Write([]byte("COMMENT"))
674 idHasher.Write(commit.Hash[:])
675 idHasher.Write([]byte(comment.n))
676 htmlized := make([]string, 0, len(comment.body))
677 htmlized = append(htmlized, "<pre>")
678 for _, l := range comment.body {
681 lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
684 htmlized = append(htmlized, "</pre>")
685 feed.Entry = append(feed.Entry, &atom.Entry{
686 Title: fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
687 Author: &atom.Person{Name: comment.from},
688 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
691 Href: strings.Join([]string{
694 commit.Hash.String(),
695 "#comment", comment.n,
698 Published: atom.TimeStr(
699 strings.Replace(comment.date, " ", "T", -1),
701 Updated: atom.TimeStr(
702 strings.Replace(comment.date, " ", "T", -1),
706 Body: strings.Join(htmlized, "\n"),
710 data, err := xml.MarshalIndent(&feed, "", " ")
712 makeErr(err, http.StatusInternalServerError)
714 out.Write([]byte(xml.Header))
718 notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
719 etagHash.Write(notesRaw)
722 lines := msgSplit(commit.Message)
724 when := commit.Author.When.Format(sgblog.WhenFmt)
726 if len(commit.ParentHashes) > 0 {
727 parent = commit.ParentHashes[0].String()
729 commentsParsed := sgblog.ParseComments(commentsRaw)
730 comments := make([]CommentEntry, 0, len(commentsParsed))
731 for _, comment := range commentsParsed {
732 lines := strings.Split(comment, "\n")
733 comments = append(comments, CommentEntry{lines[:3], lines[3:]})
735 var notesLines []string
736 if len(notesRaw) > 0 {
737 notesLines = strings.Split(string(notesRaw), "\n")
740 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
741 err = TmplHTMLEntry.Execute(out, struct {
747 AtomCommentsURL string
749 Commit *object.Commit
752 Comments []CommentEntry
755 Version: sgblog.Version,
758 TitleEscaped: url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
760 AtomCommentsURL: atomCommentsURL,
764 NoteLines: notesLines,
766 Topics: sgblog.ParseTopics(topicsRaw),
769 makeErr(err, http.StatusInternalServerError)
772 makeErr(errors.New("unknown URL action"), http.StatusNotFound)
774 out.Write([]byte("</body></html>\n"))
775 if gzipWriter != nil {
778 os.Stdout.Write(outBuf.Bytes())
782 os.Stdout.WriteString("Content-Type: application/atom+xml; charset=utf-8\n")
783 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
784 if gzipWriter != nil {
785 os.Stdout.WriteString("Content-Encoding: gzip\n")
788 os.Stdout.WriteString("\n")
789 os.Stdout.Write(outBuf.Bytes())