2 SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
3 Copyright (C) 2020-2023 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
41 "github.com/go-git/go-git/v5"
42 "github.com/go-git/go-git/v5/plumbing"
43 "github.com/go-git/go-git/v5/plumbing/object"
44 "github.com/vorlif/spreak"
45 "go.stargrave.org/sgblog"
46 "go.stargrave.org/sgblog/cmd/sgblog/atom"
47 "lukechampine.com/blake3"
51 AtomPostsFeed = "feed.atom"
52 AtomCommentsFeed = "comments.atom"
56 renderableSchemes = map[string]struct{}{
69 //go:embed http-index.tmpl
70 TmplHTMLIndexRaw string
71 TmplHTMLIndex = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
73 //go:embed http-entry.tmpl
74 TmplHTMLEntryRaw string
75 TmplHTMLEntry = template.Must(template.New("http-entry").Funcs(
76 template.FuncMap{"lineURLize": lineURLizeInTemplate},
77 ).Parse(TmplHTMLEntryRaw))
80 type TableEntry struct {
92 type CommentEntry struct {
97 func makeA(href, text string) string {
98 return `<a href="` + href + `">` + text + `</a>`
101 func etagString(etag hash.Hash) string {
102 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
105 func urlParse(what string) *url.URL {
106 if u, err := url.ParseRequestURI(what); err == nil {
107 if _, exists := renderableSchemes[u.Scheme]; exists {
114 func lineURLize(urlPrefix, line string) string {
115 cols := strings.Split(html.EscapeString(line), " ")
116 for i, col := range cols {
117 if u := urlParse(col); u != nil {
118 cols[i] = makeA(col, col)
121 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
123 return strings.Join(cols, " ")
126 func lineURLizeInTemplate(urlPrefix, line interface{}) string {
127 return lineURLize(urlPrefix.(string), line.(string))
130 func startHeader(etag hash.Hash, gziped bool) string {
132 "Content-Type: text/html; charset=utf-8",
133 "ETag: " + etagString(etag),
136 lines = append(lines, "Content-Encoding: gzip")
138 lines = append(lines, "")
139 lines = append(lines, "")
140 return strings.Join(lines, "\n")
143 func makeErr(err error, status int) {
144 fmt.Println("Status:", status)
145 fmt.Print("Content-Type: text/plain; charset=utf-8\n\n")
150 func checkETag(etag hash.Hash) {
151 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
152 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
153 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
158 func bytes2uuid(b []byte) string {
161 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
162 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
165 type CommitIterNext interface {
166 Next() (*object.Commit, error)
170 cfgPath := os.Getenv("SGBLOG_CFG")
172 log.Fatalln("SGBLOG_CFG is not set")
174 cfg, err := readCfg(cfgPath)
178 initLocalizer(cfg.Lang)
180 pathInfo := os.Getenv("PATH_INFO")
181 if len(pathInfo) == 0 {
184 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
186 makeErr(err, http.StatusBadRequest)
189 etagHash := blake3.New(32, nil)
190 for _, s := range []string{
201 if _, err = etagHash.Write([]byte(s)); err != nil {
205 etagHashForWeb := []string{
209 cfg.CommentsNotesRef,
212 etagHashForWeb = append(etagHashForWeb, cfg.GitURLs...)
214 headHash, err := initRepo(cfg)
216 makeErr(err, http.StatusInternalServerError)
219 if notes, err := repo.Notes(); err == nil {
220 var notesRef *plumbing.Reference
221 var commentsRef *plumbing.Reference
222 notes.ForEach(func(ref *plumbing.Reference) error {
223 switch string(ref.Name()) {
224 case "refs/notes/commits":
226 case cfg.CommentsNotesRef:
232 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
233 notesTree, _ = commentsCommit.Tree()
236 if commentsRef != nil {
237 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
238 commentsTree, _ = commentsCommit.Tree()
243 var outBuf bytes.Buffer
246 var gzipWriter *gzip.Writer
247 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
248 for _, encoding := range strings.Split(acceptEncoding, ", ") {
249 if encoding == "gzip" {
250 gzipWriter = gzip.NewWriter(&outBuf)
257 if offsetRaw, exists := queryValues["offset"]; exists {
258 offset, err = strconv.Atoi(offsetRaw[0])
260 makeErr(err, http.StatusBadRequest)
263 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
265 makeErr(err, http.StatusInternalServerError)
267 topicsCache, err := getTopicsCache(cfg, repoLog)
269 makeErr(err, http.StatusInternalServerError)
271 repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
273 makeErr(err, http.StatusInternalServerError)
277 var commits CommitIterNext
279 if t, exists := queryValues["topic"]; exists {
281 hashes := topicsCache[topic]
283 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
285 if len(hashes) > offset {
286 hashes = hashes[offset:]
289 commits = &HashesIter{hashes}
291 for i := 0; i < offset; i++ {
292 if _, err = repoLog.Next(); err != nil {
300 entries := make([]TableEntry, 0, PageEntries)
302 for _, data := range etagHashForWeb {
303 etagHash.Write([]byte(data))
305 etagHash.Write([]byte("INDEX"))
306 etagHash.Write([]byte(topic))
307 for i := 0; i < PageEntries; i++ {
308 commit, err := commits.Next()
313 etagHash.Write(commit.Hash[:])
314 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
315 etagHash.Write(commentsRaw)
316 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
317 etagHash.Write(topicsRaw)
318 entries = append(entries, TableEntry{
320 CommentsRaw: commentsRaw,
321 TopicsRaw: topicsRaw,
326 for i, entry := range entries {
329 lines := msgSplit(entry.Commit.Message)
330 entry.Title = lines[0]
331 entry.LinesNum = len(lines) - 2
332 for _, line := range lines[2:] {
337 entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
339 entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
340 entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
343 offsetPrev := offset - PageEntries
347 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
348 err = TmplHTMLIndex.Execute(out, struct {
357 AtomCommentsFeed string
365 Version: sgblog.Version,
368 TopicsEnabled: topicsTree != nil,
369 Topics: topicsCache.Topics(),
370 CommentsEnabled: commentsTree != nil,
371 AtomPostsFeed: AtomPostsFeed,
372 AtomCommentsFeed: AtomCommentsFeed,
374 OffsetPrev: offsetPrev,
375 OffsetNext: offset + PageEntries,
380 makeErr(err, http.StatusInternalServerError)
382 } else if pathInfo == "/twtxt.txt" {
383 commit, err := repo.CommitObject(*headHash)
385 makeErr(err, http.StatusInternalServerError)
387 etagHash.Write([]byte("TWTXT POSTS"))
388 etagHash.Write(commit.Hash[:])
390 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
392 makeErr(err, http.StatusInternalServerError)
394 for i := 0; i < PageEntries; i++ {
395 commit, err = repoLog.Next()
401 commit.Author.When.Format(time.RFC3339),
402 msgSplit(commit.Message)[0],
405 os.Stdout.WriteString("Content-Type: text/plain; charset=utf-8\n")
406 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
407 if gzipWriter != nil {
408 os.Stdout.WriteString("Content-Encoding: gzip\n")
411 os.Stdout.WriteString("\n")
412 os.Stdout.Write(outBuf.Bytes())
414 } else if pathInfo == "/"+AtomPostsFeed {
415 commit, err := repo.CommitObject(*headHash)
417 makeErr(err, http.StatusInternalServerError)
421 if t, exists := queryValues["topic"]; exists {
425 etagHash.Write([]byte("ATOM POSTS"))
426 etagHash.Write([]byte(topic))
427 etagHash.Write(commit.Hash[:])
433 title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
435 idHasher := blake3.New(32, nil)
436 idHasher.Write([]byte("ATOM POSTS"))
437 idHasher.Write([]byte(cfg.AtomId))
438 idHasher.Write([]byte(topic))
441 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
442 Updated: atom.Time(commit.Author.When),
445 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
447 Author: &atom.Person{Name: cfg.AtomAuthor},
450 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
452 makeErr(err, http.StatusInternalServerError)
454 var commits CommitIterNext
458 topicsCache, err := getTopicsCache(cfg, repoLog)
460 makeErr(err, http.StatusInternalServerError)
462 hashes := topicsCache[topic]
464 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
466 commits = &HashesIter{hashes}
469 for i := 0; i < PageEntries; i++ {
470 commit, err = commits.Next()
474 lines := msgSplit(commit.Message)
475 var categories []atom.Category
476 for _, topic := range sgblog.ParseTopics(sgblog.GetNote(
477 repo, topicsTree, commit.Hash,
479 categories = append(categories, atom.Category{Term: topic})
481 htmlized := make([]string, 0, len(lines))
482 htmlized = append(htmlized, "<pre>")
483 for _, l := range lines[2:] {
484 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
486 htmlized = append(htmlized, "</pre>")
487 links := []atom.Link{{
489 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
491 for _, img := range listImgs(cfg, commit.Hash) {
492 links = append(links, atom.Link{
494 Href: "http://" + cfg.ImgDomain + "/" + img.Path,
496 Length: uint(img.Size),
499 feed.Entry = append(feed.Entry, &atom.Entry{
501 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
503 Published: atom.Time(commit.Author.When),
504 Updated: atom.Time(commit.Author.When),
505 Summary: &atom.Text{Type: "text", Body: lines[0]},
508 Body: strings.Join(htmlized, "\n"),
510 Category: categories,
513 data, err := xml.MarshalIndent(&feed, "", " ")
515 makeErr(err, http.StatusInternalServerError)
517 out.Write([]byte(xml.Header))
520 } else if pathInfo == "/"+AtomCommentsFeed {
521 commit, err := repo.CommitObject(commentsRef.Hash())
523 makeErr(err, http.StatusInternalServerError)
525 etagHash.Write([]byte("ATOM COMMENTS"))
526 etagHash.Write(commit.Hash[:])
528 idHasher := blake3.New(32, nil)
529 idHasher.Write([]byte("ATOM COMMENTS"))
530 idHasher.Write([]byte(cfg.AtomId))
532 Title: cfg.Title + " comments",
533 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
534 Updated: atom.Time(commit.Author.When),
537 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
539 Author: &atom.Person{Name: cfg.AtomAuthor},
541 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
543 makeErr(err, http.StatusInternalServerError)
545 for i := 0; i < PageEntries; i++ {
546 commit, err = repoLog.Next()
550 fileStats, err := commit.Stats()
552 makeErr(err, http.StatusInternalServerError)
554 t, err := commit.Tree()
556 makeErr(err, http.StatusInternalServerError)
558 commentedHash := plumbing.NewHash(strings.ReplaceAll(
559 fileStats[0].Name, "/", "",
561 commit, err = repo.CommitObject(commentedHash)
565 comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
566 if len(comments) == 0 {
569 commentN := strconv.Itoa(len(comments) - 1)
570 lines := strings.Split(comments[len(comments)-1], "\n")
571 from := strings.TrimPrefix(lines[0], "From: ")
572 date := strings.TrimPrefix(lines[1], "Date: ")
573 htmlized := make([]string, 0, len(lines))
574 htmlized = append(htmlized, "<pre>")
575 for _, l := range lines[2:] {
576 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
578 htmlized = append(htmlized, "</pre>")
580 idHasher.Write([]byte("COMMENT"))
581 idHasher.Write(commit.Hash[:])
582 idHasher.Write([]byte(commentN))
583 feed.Entry = append(feed.Entry, &atom.Entry{
585 "Comment %s for \"%s\" by %s",
586 commentN, msgSplit(commit.Message)[0], from,
588 Author: &atom.Person{Name: from},
589 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
592 Href: strings.Join([]string{
593 cfg.AtomBaseURL, cfg.URLPrefix, "/",
594 commit.Hash.String(), "#comment", commentN,
597 Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
598 Updated: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
601 Body: strings.Join(htmlized, "\n"),
605 data, err := xml.MarshalIndent(&feed, "", " ")
607 makeErr(err, http.StatusInternalServerError)
609 out.Write([]byte(xml.Header))
612 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
613 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
615 makeErr(err, http.StatusBadRequest)
617 for _, data := range etagHashForWeb {
618 etagHash.Write([]byte(data))
620 etagHash.Write([]byte("ENTRY"))
621 etagHash.Write(commit.Hash[:])
622 atomCommentsURL := strings.Join([]string{
623 cfg.AtomBaseURL, cfg.URLPrefix, "/",
624 commit.Hash.String(), "/", AtomCommentsFeed,
626 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
627 etagHash.Write(commentsRaw)
628 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
629 etagHash.Write(topicsRaw)
630 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
631 etagHash.Write([]byte("ATOM COMMENTS"))
633 type Comment struct {
639 commentsRaw := sgblog.ParseComments(commentsRaw)
641 if len(commentsRaw) > PageEntries {
642 toSkip = len(commentsRaw) - PageEntries
644 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
645 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
646 lines := strings.Split(commentsRaw[i], "\n")
647 from := strings.TrimPrefix(lines[0], "From: ")
648 date := strings.TrimPrefix(lines[1], "Date: ")
649 comments = append(comments, Comment{
652 date: strings.Replace(date, " ", "T", 1),
656 idHasher := blake3.New(32, nil)
657 idHasher.Write([]byte("ATOM COMMENTS"))
658 idHasher.Write(commit.Hash[:])
660 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
661 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
662 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
663 Author: &atom.Person{Name: cfg.AtomAuthor},
665 if len(comments) > 0 {
666 feed.Updated = atom.TimeStr(comments[0].date)
668 feed.Updated = atom.Time(commit.Author.When)
670 for _, comment := range comments {
672 idHasher.Write([]byte("COMMENT"))
673 idHasher.Write(commit.Hash[:])
674 idHasher.Write([]byte(comment.n))
675 htmlized := make([]string, 0, len(comment.body))
676 htmlized = append(htmlized, "<pre>")
677 for _, l := range comment.body {
680 lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
683 htmlized = append(htmlized, "</pre>")
684 feed.Entry = append(feed.Entry, &atom.Entry{
685 Title: fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
686 Author: &atom.Person{Name: comment.from},
687 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
690 Href: strings.Join([]string{
693 commit.Hash.String(),
694 "#comment", comment.n,
697 Published: atom.TimeStr(
698 strings.Replace(comment.date, " ", "T", -1),
700 Updated: atom.TimeStr(
701 strings.Replace(comment.date, " ", "T", -1),
705 Body: strings.Join(htmlized, "\n"),
709 data, err := xml.MarshalIndent(&feed, "", " ")
711 makeErr(err, http.StatusInternalServerError)
713 out.Write([]byte(xml.Header))
717 notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
718 etagHash.Write(notesRaw)
721 lines := msgSplit(commit.Message)
723 when := commit.Author.When.Format(sgblog.WhenFmt)
725 if len(commit.ParentHashes) > 0 {
726 parent = commit.ParentHashes[0].String()
728 commentsParsed := sgblog.ParseComments(commentsRaw)
729 comments := make([]CommentEntry, 0, len(commentsParsed))
730 for _, comment := range commentsParsed {
731 lines := strings.Split(comment, "\n")
732 comments = append(comments, CommentEntry{lines[:3], lines[3:]})
734 var notesLines []string
735 if len(notesRaw) > 0 {
736 notesLines = strings.Split(string(notesRaw), "\n")
739 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
740 err = TmplHTMLEntry.Execute(out, struct {
747 AtomCommentsURL string
749 Commit *object.Commit
752 Comments []CommentEntry
757 Version: sgblog.Version,
760 TitleEscaped: url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
762 AtomCommentsURL: atomCommentsURL,
766 NoteLines: notesLines,
768 Topics: sgblog.ParseTopics(topicsRaw),
769 Imgs: listImgs(cfg, commit.Hash),
772 makeErr(err, http.StatusInternalServerError)
775 makeErr(errors.New("unknown URL action"), http.StatusNotFound)
777 out.Write([]byte("</body></html>\n"))
778 if gzipWriter != nil {
781 os.Stdout.Write(outBuf.Bytes())
785 os.Stdout.WriteString("Content-Type: application/atom+xml; charset=utf-8\n")
786 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
787 if gzipWriter != nil {
788 os.Stdout.WriteString("Content-Encoding: gzip\n")
791 os.Stdout.WriteString("\n")
792 os.Stdout.Write(outBuf.Bytes())