2 SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
3 Copyright (C) 2020-2021 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
40 "github.com/go-git/go-git/v5"
41 "github.com/go-git/go-git/v5/plumbing"
42 "github.com/go-git/go-git/v5/plumbing/object"
43 "go.stargrave.org/sgblog"
44 "go.stargrave.org/sgblog/cmd/sgblog/atom"
45 "golang.org/x/crypto/blake2b"
49 AtomPostsFeed = "feed.atom"
50 AtomCommentsFeed = "comments.atom"
54 renderableSchemes = map[string]struct{}{
67 //go:embed http-index.tmpl
68 TmplHTMLIndexRaw string
69 TmplHTMLIndex = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
71 //go:embed http-entry.tmpl
72 TmplHTMLEntryRaw string
73 TmplHTMLEntry = template.Must(template.New("http-entry").Funcs(
74 template.FuncMap{"lineURLize": lineURLizeInTemplate},
75 ).Parse(TmplHTMLEntryRaw))
78 type TableEntry struct {
90 type CommentEntry struct {
95 func makeA(href, text string) string {
96 return `<a href="` + href + `">` + text + `</a>`
99 func etagString(etag hash.Hash) string {
100 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
103 func urlParse(what string) *url.URL {
104 if u, err := url.ParseRequestURI(what); err == nil {
105 if _, exists := renderableSchemes[u.Scheme]; exists {
112 func lineURLize(urlPrefix, line string) string {
113 cols := strings.Split(html.EscapeString(line), " ")
114 for i, col := range cols {
115 if u := urlParse(col); u != nil {
116 cols[i] = makeA(col, col)
119 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
121 return strings.Join(cols, " ")
124 func lineURLizeInTemplate(urlPrefix, line interface{}) string {
125 return lineURLize(urlPrefix.(string), line.(string))
128 func startHeader(etag hash.Hash, gziped bool) string {
130 "Content-Type: text/html; charset=UTF-8",
131 "ETag: " + etagString(etag),
134 lines = append(lines, "Content-Encoding: gzip")
136 lines = append(lines, "")
137 lines = append(lines, "")
138 return strings.Join(lines, "\n")
141 func makeErr(err error, status int) {
142 fmt.Println("Status:", status)
143 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
148 func checkETag(etag hash.Hash) {
149 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
150 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
151 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
156 func bytes2uuid(b []byte) string {
159 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
160 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
163 type CommitIterNext interface {
164 Next() (*object.Commit, error)
168 cfgPath := os.Getenv("SGBLOG_CFG")
170 log.Fatalln("SGBLOG_CFG is not set")
172 cfg, err := readCfg(cfgPath)
177 pathInfo := os.Getenv("PATH_INFO")
178 if len(pathInfo) == 0 {
181 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
183 makeErr(err, http.StatusBadRequest)
186 etagHash, err := blake2b.New256(nil)
190 for _, s := range []string{
201 if _, err = etagHash.Write([]byte(s)); err != nil {
205 etagHashForWeb := []string{
209 cfg.CommentsNotesRef,
212 for _, gitURL := range cfg.GitURLs {
213 etagHashForWeb = append(etagHashForWeb, gitURL)
216 headHash, err := initRepo(cfg)
218 makeErr(err, http.StatusInternalServerError)
221 if notes, err := repo.Notes(); err == nil {
222 var notesRef *plumbing.Reference
223 var commentsRef *plumbing.Reference
224 notes.ForEach(func(ref *plumbing.Reference) error {
225 switch string(ref.Name()) {
226 case "refs/notes/commits":
228 case cfg.CommentsNotesRef:
234 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
235 notesTree, _ = commentsCommit.Tree()
238 if commentsRef != nil {
239 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
240 commentsTree, _ = commentsCommit.Tree()
245 var outBuf bytes.Buffer
248 var gzipWriter *gzip.Writer
249 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
250 for _, encoding := range strings.Split(acceptEncoding, ", ") {
251 if encoding == "gzip" {
252 gzipWriter = gzip.NewWriter(&outBuf)
259 if offsetRaw, exists := queryValues["offset"]; exists {
260 offset, err = strconv.Atoi(offsetRaw[0])
262 makeErr(err, http.StatusBadRequest)
265 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
267 makeErr(err, http.StatusInternalServerError)
269 topicsCache, err := getTopicsCache(cfg, repoLog)
271 makeErr(err, http.StatusInternalServerError)
273 repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
275 makeErr(err, http.StatusInternalServerError)
279 var commits CommitIterNext
281 if t, exists := queryValues["topic"]; exists {
283 hashes := topicsCache[topic]
285 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
287 if len(hashes) > offset {
288 hashes = hashes[offset:]
291 commits = &HashesIter{hashes}
293 for i := 0; i < offset; i++ {
294 if _, err = repoLog.Next(); err != nil {
302 entries := make([]TableEntry, 0, PageEntries)
304 for _, data := range etagHashForWeb {
305 etagHash.Write([]byte(data))
307 etagHash.Write([]byte("INDEX"))
308 etagHash.Write([]byte(topic))
309 for i := 0; i < PageEntries; i++ {
310 commit, err := commits.Next()
315 etagHash.Write(commit.Hash[:])
316 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
317 etagHash.Write(commentsRaw)
318 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
319 etagHash.Write(topicsRaw)
320 entries = append(entries, TableEntry{
322 CommentsRaw: commentsRaw,
323 TopicsRaw: topicsRaw,
328 for i, entry := range entries {
331 lines := msgSplit(entry.Commit.Message)
332 entry.Title = lines[0]
333 entry.LinesNum = len(lines) - 2
334 for _, line := range lines[2:] {
339 entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
341 entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
342 entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
345 offsetPrev := offset - PageEntries
349 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
350 err = TmplHTMLIndex.Execute(out, struct {
358 AtomCommentsFeed string
365 Version: sgblog.Version,
368 TopicsEnabled: topicsTree != nil,
369 Topics: topicsCache.Topics(),
370 CommentsEnabled: commentsTree != nil,
371 AtomPostsFeed: AtomPostsFeed,
372 AtomCommentsFeed: AtomCommentsFeed,
374 OffsetPrev: offsetPrev,
375 OffsetNext: offset + PageEntries,
380 makeErr(err, http.StatusInternalServerError)
382 } else if pathInfo == "/"+AtomPostsFeed {
383 commit, err := repo.CommitObject(*headHash)
385 makeErr(err, http.StatusInternalServerError)
389 if t, exists := queryValues["topic"]; exists {
393 etagHash.Write([]byte("ATOM POSTS"))
394 etagHash.Write([]byte(topic))
395 etagHash.Write(commit.Hash[:])
401 title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
403 idHasher, err := blake2b.New256(nil)
407 idHasher.Write([]byte("ATOM POSTS"))
408 idHasher.Write([]byte(cfg.AtomId))
409 idHasher.Write([]byte(topic))
412 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
413 Updated: atom.Time(commit.Author.When),
416 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
418 Author: &atom.Person{Name: cfg.AtomAuthor},
421 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
423 makeErr(err, http.StatusInternalServerError)
425 var commits CommitIterNext
429 topicsCache, err := getTopicsCache(cfg, repoLog)
431 makeErr(err, http.StatusInternalServerError)
433 hashes := topicsCache[topic]
435 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
437 commits = &HashesIter{hashes}
440 for i := 0; i < PageEntries; i++ {
441 commit, err = commits.Next()
445 lines := msgSplit(commit.Message)
446 var categories []atom.Category
447 for _, topic := range sgblog.ParseTopics(sgblog.GetNote(repo, topicsTree, commit.Hash)) {
448 categories = append(categories, atom.Category{Term: topic})
450 htmlized := make([]string, 0, len(lines))
451 htmlized = append(htmlized, "<pre>")
452 for _, l := range lines[2:] {
453 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
455 htmlized = append(htmlized, "</pre>")
456 feed.Entry = append(feed.Entry, &atom.Entry{
458 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
461 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
463 Published: atom.Time(commit.Author.When),
464 Updated: atom.Time(commit.Author.When),
465 Summary: &atom.Text{Type: "text", Body: lines[0]},
468 Body: strings.Join(htmlized, "\n"),
470 Category: categories,
473 data, err := xml.MarshalIndent(&feed, "", " ")
475 makeErr(err, http.StatusInternalServerError)
479 } else if pathInfo == "/"+AtomCommentsFeed {
480 commit, err := repo.CommitObject(commentsRef.Hash())
482 makeErr(err, http.StatusInternalServerError)
484 etagHash.Write([]byte("ATOM COMMENTS"))
485 etagHash.Write(commit.Hash[:])
487 idHasher, err := blake2b.New256(nil)
491 idHasher.Write([]byte("ATOM COMMENTS"))
492 idHasher.Write([]byte(cfg.AtomId))
494 Title: cfg.Title + " comments",
495 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
496 Updated: atom.Time(commit.Author.When),
499 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
501 Author: &atom.Person{Name: cfg.AtomAuthor},
503 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
505 makeErr(err, http.StatusInternalServerError)
507 for i := 0; i < PageEntries; i++ {
508 commit, err = repoLog.Next()
512 fileStats, err := commit.Stats()
514 makeErr(err, http.StatusInternalServerError)
516 t, err := commit.Tree()
518 makeErr(err, http.StatusInternalServerError)
520 commentedHash := plumbing.NewHash(strings.ReplaceAll(
521 fileStats[0].Name, "/", "",
523 commit, err = repo.CommitObject(commentedHash)
527 comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
528 if len(comments) == 0 {
531 commentN := strconv.Itoa(len(comments) - 1)
532 lines := strings.Split(comments[len(comments)-1], "\n")
533 from := strings.TrimPrefix(lines[0], "From: ")
534 date := strings.TrimPrefix(lines[1], "Date: ")
535 htmlized := make([]string, 0, len(lines))
536 htmlized = append(htmlized, "<pre>")
537 for _, l := range lines[2:] {
538 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
540 htmlized = append(htmlized, "</pre>")
542 idHasher.Write([]byte("COMMENT"))
543 idHasher.Write(commit.Hash[:])
544 idHasher.Write([]byte(commentN))
545 feed.Entry = append(feed.Entry, &atom.Entry{
547 "Comment %s for \"%s\" by %s",
548 commentN, msgSplit(commit.Message)[0], from,
550 Author: &atom.Person{Name: from},
551 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
554 Href: strings.Join([]string{
555 cfg.AtomBaseURL, cfg.URLPrefix, "/",
556 commit.Hash.String(), "#comment", commentN,
559 Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
560 Updated: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
563 Body: strings.Join(htmlized, "\n"),
567 data, err := xml.MarshalIndent(&feed, "", " ")
569 makeErr(err, http.StatusInternalServerError)
573 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
574 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
576 makeErr(err, http.StatusBadRequest)
578 for _, data := range etagHashForWeb {
579 etagHash.Write([]byte(data))
581 etagHash.Write([]byte("ENTRY"))
582 etagHash.Write(commit.Hash[:])
583 atomCommentsURL := strings.Join([]string{
584 cfg.AtomBaseURL, cfg.URLPrefix, "/",
585 commit.Hash.String(), "/", AtomCommentsFeed,
587 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
588 etagHash.Write(commentsRaw)
589 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
590 etagHash.Write(topicsRaw)
591 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
592 etagHash.Write([]byte("ATOM COMMENTS"))
594 type Comment struct {
600 commentsRaw := sgblog.ParseComments(commentsRaw)
602 if len(commentsRaw) > PageEntries {
603 toSkip = len(commentsRaw) - PageEntries
605 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
606 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
607 lines := strings.Split(commentsRaw[i], "\n")
608 from := strings.TrimPrefix(lines[0], "From: ")
609 date := strings.TrimPrefix(lines[1], "Date: ")
610 comments = append(comments, Comment{
613 date: strings.Replace(date, " ", "T", 1),
617 idHasher, err := blake2b.New256(nil)
621 idHasher.Write([]byte("ATOM COMMENTS"))
622 idHasher.Write(commit.Hash[:])
624 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
625 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
626 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
627 Author: &atom.Person{Name: cfg.AtomAuthor},
629 if len(comments) > 0 {
630 feed.Updated = atom.TimeStr(comments[0].date)
632 feed.Updated = atom.Time(commit.Author.When)
634 for _, comment := range comments {
636 idHasher.Write([]byte("COMMENT"))
637 idHasher.Write(commit.Hash[:])
638 idHasher.Write([]byte(comment.n))
639 htmlized := make([]string, 0, len(comment.body))
640 htmlized = append(htmlized, "<pre>")
641 for _, l := range comment.body {
644 lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
647 htmlized = append(htmlized, "</pre>")
648 feed.Entry = append(feed.Entry, &atom.Entry{
649 Title: fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
650 Author: &atom.Person{Name: comment.from},
651 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
654 Href: strings.Join([]string{
657 commit.Hash.String(),
658 "#comment", comment.n,
661 Published: atom.TimeStr(
662 strings.Replace(comment.date, " ", "T", -1),
664 Updated: atom.TimeStr(
665 strings.Replace(comment.date, " ", "T", -1),
669 Body: strings.Join(htmlized, "\n"),
673 data, err := xml.MarshalIndent(&feed, "", " ")
675 makeErr(err, http.StatusInternalServerError)
680 notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
681 etagHash.Write(notesRaw)
684 lines := msgSplit(commit.Message)
686 when := commit.Author.When.Format(sgblog.WhenFmt)
688 if len(commit.ParentHashes) > 0 {
689 parent = commit.ParentHashes[0].String()
691 commentsParsed := sgblog.ParseComments(commentsRaw)
692 comments := make([]CommentEntry, 0, len(commentsParsed))
693 for _, comment := range commentsParsed {
694 lines := strings.Split(comment, "\n")
695 comments = append(comments, CommentEntry{lines[:3], lines[3:]})
697 var notesLines []string
698 if len(notesRaw) > 0 {
699 notesLines = strings.Split(string(notesRaw), "\n")
702 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
703 err = TmplHTMLEntry.Execute(out, struct {
709 AtomCommentsURL string
711 Commit *object.Commit
714 Comments []CommentEntry
717 Version: sgblog.Version,
720 TitleEscaped: url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
722 AtomCommentsURL: atomCommentsURL,
726 NoteLines: notesLines,
728 Topics: sgblog.ParseTopics(topicsRaw),
731 makeErr(err, http.StatusInternalServerError)
734 makeErr(errors.New("unknown URL action"), http.StatusNotFound)
736 out.Write([]byte("</body></html>\n"))
737 if gzipWriter != nil {
740 os.Stdout.Write(outBuf.Bytes())
744 os.Stdout.WriteString("Content-Type: application/atom+xml; charset=UTF-8\n")
745 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
746 if gzipWriter != nil {
747 os.Stdout.WriteString("Content-Encoding: gzip\n")
750 os.Stdout.WriteString("\n")
751 os.Stdout.Write(outBuf.Bytes())