2 SGBlog -- Git-backed CGI/UCSPI blogging/phlogging/gemlogging engine
3 Copyright (C) 2020-2021 Sergey Matveev <stargrave@stargrave.org>
5 This program is free software: you can redistribute it and/or modify
6 it under the terms of the GNU Affero General Public License as
7 published by the Free Software Foundation, version 3 of the License.
9 This program is distributed in the hope that it will be useful,
10 but WITHOUT ANY WARRANTY; without even the implied warranty of
11 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 GNU General Public License for more details.
14 You should have received a copy of the GNU Affero General Public License
15 along with this program. If not, see <http://www.gnu.org/licenses/>.
40 "github.com/go-git/go-git/v5"
41 "github.com/go-git/go-git/v5/plumbing"
42 "github.com/go-git/go-git/v5/plumbing/object"
43 "go.stargrave.org/sgblog"
44 "go.stargrave.org/sgblog/cmd/sgblog/atom"
45 "golang.org/x/crypto/blake2b"
49 AtomPostsFeed = "feed.atom"
50 AtomCommentsFeed = "comments.atom"
54 renderableSchemes = map[string]struct{}{
64 //go:embed http-index.tmpl
65 TmplHTMLIndexRaw string
66 TmplHTMLIndex = template.Must(template.New("http-index").Parse(TmplHTMLIndexRaw))
68 //go:embed http-entry.tmpl
69 TmplHTMLEntryRaw string
70 TmplHTMLEntry = template.Must(template.New("http-entry").Funcs(
71 template.FuncMap{"lineURLize": lineURLizeInTemplate},
72 ).Parse(TmplHTMLEntryRaw))
75 type TableEntry struct {
87 type CommentEntry struct {
92 func makeA(href, text string) string {
93 return `<a href="` + href + `">` + text + `</a>`
96 func etagString(etag hash.Hash) string {
97 return `"` + hex.EncodeToString(etag.Sum(nil)) + `"`
100 func urlParse(what string) *url.URL {
101 if u, err := url.ParseRequestURI(what); err == nil {
102 if _, exists := renderableSchemes[u.Scheme]; exists {
109 func lineURLize(urlPrefix, line string) string {
110 cols := strings.Split(html.EscapeString(line), " ")
111 for i, col := range cols {
112 if u := urlParse(col); u != nil {
113 cols[i] = makeA(col, col)
116 cols[i] = sha1DigestRe.ReplaceAllString(col, makeA(urlPrefix+"/$1", "$1"))
118 return strings.Join(cols, " ")
121 func lineURLizeInTemplate(urlPrefix, line interface{}) string {
122 return lineURLize(urlPrefix.(string), line.(string))
125 func startHeader(etag hash.Hash, gziped bool) string {
127 "Content-Type: text/html; charset=UTF-8",
128 "ETag: " + etagString(etag),
131 lines = append(lines, "Content-Encoding: gzip")
133 lines = append(lines, "")
134 lines = append(lines, "")
135 return strings.Join(lines, "\n")
138 func makeErr(err error, status int) {
139 fmt.Println("Status:", status)
140 fmt.Print("Content-Type: text/plain; charset=UTF-8\n\n")
145 func checkETag(etag hash.Hash) {
146 ifNoneMatch := os.Getenv("HTTP_IF_NONE_MATCH")
147 if ifNoneMatch != "" && ifNoneMatch == etagString(etag) {
148 fmt.Printf("Status: 304\nETag: %s\n\n", ifNoneMatch)
153 func bytes2uuid(b []byte) string {
156 raw[6] = (raw[6] & 0x0F) | uint8(4<<4) // version 4
157 return fmt.Sprintf("%x-%x-%x-%x-%x", raw[0:4], raw[4:6], raw[6:8], raw[8:10], raw[10:])
160 type CommitIterNext interface {
161 Next() (*object.Commit, error)
165 cfgPath := os.Getenv("SGBLOG_CFG")
167 log.Fatalln("SGBLOG_CFG is not set")
169 cfg, err := readCfg(cfgPath)
174 pathInfo, exists := os.LookupEnv("PATH_INFO")
178 queryValues, err := url.ParseQuery(os.Getenv("QUERY_STRING"))
180 makeErr(err, http.StatusBadRequest)
183 etagHash, err := blake2b.New256(nil)
187 for _, s := range []string{
198 if _, err = etagHash.Write([]byte(s)); err != nil {
202 etagHashForWeb := []string{
206 cfg.CommentsNotesRef,
209 for _, gitURL := range cfg.GitURLs {
210 etagHashForWeb = append(etagHashForWeb, gitURL)
213 headHash, err := initRepo(cfg)
215 makeErr(err, http.StatusInternalServerError)
218 if notes, err := repo.Notes(); err == nil {
219 var notesRef *plumbing.Reference
220 var commentsRef *plumbing.Reference
221 notes.ForEach(func(ref *plumbing.Reference) error {
222 switch string(ref.Name()) {
223 case "refs/notes/commits":
225 case cfg.CommentsNotesRef:
231 if commentsCommit, err := repo.CommitObject(notesRef.Hash()); err == nil {
232 notesTree, _ = commentsCommit.Tree()
235 if commentsRef != nil {
236 if commentsCommit, err := repo.CommitObject(commentsRef.Hash()); err == nil {
237 commentsTree, _ = commentsCommit.Tree()
242 var outBuf bytes.Buffer
245 var gzipWriter *gzip.Writer
246 acceptEncoding := os.Getenv("HTTP_ACCEPT_ENCODING")
247 for _, encoding := range strings.Split(acceptEncoding, ", ") {
248 if encoding == "gzip" {
249 gzipWriter = gzip.NewWriter(&outBuf)
256 if offsetRaw, exists := queryValues["offset"]; exists {
257 offset, err = strconv.Atoi(offsetRaw[0])
259 makeErr(err, http.StatusBadRequest)
262 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
264 makeErr(err, http.StatusInternalServerError)
266 topicsCache, err := getTopicsCache(cfg, repoLog)
268 makeErr(err, http.StatusInternalServerError)
270 repoLog, err = repo.Log(&git.LogOptions{From: *headHash})
272 makeErr(err, http.StatusInternalServerError)
276 var commits CommitIterNext
278 if t, exists := queryValues["topic"]; exists {
280 hashes := topicsCache[topic]
282 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
284 if len(hashes) > offset {
285 hashes = hashes[offset:]
288 commits = &HashesIter{hashes}
290 for i := 0; i < offset; i++ {
291 if _, err = repoLog.Next(); err != nil {
299 entries := make([]TableEntry, 0, PageEntries)
301 for _, data := range etagHashForWeb {
302 etagHash.Write([]byte(data))
304 etagHash.Write([]byte("INDEX"))
305 etagHash.Write([]byte(topic))
306 for i := 0; i < PageEntries; i++ {
307 commit, err := commits.Next()
312 etagHash.Write(commit.Hash[:])
313 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
314 etagHash.Write(commentsRaw)
315 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
316 etagHash.Write(topicsRaw)
317 entries = append(entries, TableEntry{
319 CommentsRaw: commentsRaw,
320 TopicsRaw: topicsRaw,
325 for i, entry := range entries {
328 lines := msgSplit(entry.Commit.Message)
329 entry.Title = lines[0]
330 entry.LinesNum = len(lines) - 2
331 for _, line := range lines[2:] {
336 entry.DomainURLs = append(entry.DomainURLs, makeA(line, u.Host))
338 entry.CommentsNum = len(sgblog.ParseComments(entry.CommentsRaw))
339 entry.Topics = sgblog.ParseTopics(entry.TopicsRaw)
342 offsetPrev := offset - PageEntries
346 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
347 err = TmplHTMLIndex.Execute(out, struct {
355 AtomCommentsFeed string
362 Version: sgblog.Version,
365 TopicsEnabled: topicsTree != nil,
366 Topics: topicsCache.Topics(),
367 CommentsEnabled: commentsTree != nil,
368 AtomPostsFeed: AtomPostsFeed,
369 AtomCommentsFeed: AtomCommentsFeed,
371 OffsetPrev: offsetPrev,
372 OffsetNext: offset + PageEntries,
377 makeErr(err, http.StatusInternalServerError)
379 } else if pathInfo == "/"+AtomPostsFeed {
380 commit, err := repo.CommitObject(*headHash)
382 makeErr(err, http.StatusInternalServerError)
386 if t, exists := queryValues["topic"]; exists {
390 etagHash.Write([]byte("ATOM POSTS"))
391 etagHash.Write([]byte(topic))
392 etagHash.Write(commit.Hash[:])
398 title = fmt.Sprintf("%s (topic: %s)", cfg.Title, topic)
400 idHasher, err := blake2b.New256(nil)
404 idHasher.Write([]byte("ATOM POSTS"))
405 idHasher.Write([]byte(cfg.AtomId))
406 idHasher.Write([]byte(topic))
409 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
410 Updated: atom.Time(commit.Author.When),
413 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomPostsFeed,
415 Author: &atom.Person{Name: cfg.AtomAuthor},
418 repoLog, err := repo.Log(&git.LogOptions{From: *headHash})
420 makeErr(err, http.StatusInternalServerError)
422 var commits CommitIterNext
426 topicsCache, err := getTopicsCache(cfg, repoLog)
428 makeErr(err, http.StatusInternalServerError)
430 hashes := topicsCache[topic]
432 makeErr(errors.New("no posts with that topic"), http.StatusBadRequest)
434 commits = &HashesIter{hashes}
437 for i := 0; i < PageEntries; i++ {
438 commit, err = commits.Next()
442 lines := msgSplit(commit.Message)
443 var categories []atom.Category
444 for _, topic := range sgblog.ParseTopics(sgblog.GetNote(repo, topicsTree, commit.Hash)) {
445 categories = append(categories, atom.Category{Term: topic})
447 htmlized := make([]string, 0, len(lines))
448 htmlized = append(htmlized, "<pre>")
449 for _, l := range lines[2:] {
450 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
452 htmlized = append(htmlized, "</pre>")
453 feed.Entry = append(feed.Entry, &atom.Entry{
455 ID: "urn:uuid:" + bytes2uuid(commit.Hash[:]),
458 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + commit.Hash.String(),
460 Published: atom.Time(commit.Author.When),
461 Updated: atom.Time(commit.Author.When),
462 Summary: &atom.Text{Type: "text", Body: lines[0]},
465 Body: strings.Join(htmlized, "\n"),
467 Category: categories,
470 data, err := xml.MarshalIndent(&feed, "", " ")
472 makeErr(err, http.StatusInternalServerError)
476 } else if pathInfo == "/"+AtomCommentsFeed {
477 commit, err := repo.CommitObject(commentsRef.Hash())
479 makeErr(err, http.StatusInternalServerError)
481 etagHash.Write([]byte("ATOM COMMENTS"))
482 etagHash.Write(commit.Hash[:])
484 idHasher, err := blake2b.New256(nil)
488 idHasher.Write([]byte("ATOM COMMENTS"))
489 idHasher.Write([]byte(cfg.AtomId))
491 Title: cfg.Title + " comments",
492 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
493 Updated: atom.Time(commit.Author.When),
496 Href: cfg.AtomBaseURL + cfg.URLPrefix + "/" + AtomCommentsFeed,
498 Author: &atom.Person{Name: cfg.AtomAuthor},
500 repoLog, err := repo.Log(&git.LogOptions{From: commentsRef.Hash()})
502 makeErr(err, http.StatusInternalServerError)
504 for i := 0; i < PageEntries; i++ {
505 commit, err = repoLog.Next()
509 fileStats, err := commit.Stats()
511 makeErr(err, http.StatusInternalServerError)
513 t, err := commit.Tree()
515 makeErr(err, http.StatusInternalServerError)
517 commentedHash := plumbing.NewHash(strings.ReplaceAll(
518 fileStats[0].Name, "/", "",
520 commit, err = repo.CommitObject(commentedHash)
524 comments := sgblog.ParseComments(sgblog.GetNote(repo, t, commentedHash))
525 if len(comments) == 0 {
528 commentN := strconv.Itoa(len(comments) - 1)
529 lines := strings.Split(comments[len(comments)-1], "\n")
530 from := strings.TrimPrefix(lines[0], "From: ")
531 date := strings.TrimPrefix(lines[1], "Date: ")
532 htmlized := make([]string, 0, len(lines))
533 htmlized = append(htmlized, "<pre>")
534 for _, l := range lines[2:] {
535 htmlized = append(htmlized, lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l))
537 htmlized = append(htmlized, "</pre>")
539 idHasher.Write([]byte("COMMENT"))
540 idHasher.Write(commit.Hash[:])
541 idHasher.Write([]byte(commentN))
542 feed.Entry = append(feed.Entry, &atom.Entry{
544 "Comment %s for \"%s\" by %s",
545 commentN, msgSplit(commit.Message)[0], from,
547 Author: &atom.Person{Name: from},
548 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
551 Href: strings.Join([]string{
552 cfg.AtomBaseURL, cfg.URLPrefix, "/",
553 commit.Hash.String(), "#comment", commentN,
556 Published: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
557 Updated: atom.TimeStr(strings.Replace(date, " ", "T", -1)),
560 Body: strings.Join(htmlized, "\n"),
564 data, err := xml.MarshalIndent(&feed, "", " ")
566 makeErr(err, http.StatusInternalServerError)
570 } else if sha1DigestRe.MatchString(pathInfo[1:]) {
571 commit, err := repo.CommitObject(plumbing.NewHash(pathInfo[1 : 1+sha1.Size*2]))
573 makeErr(err, http.StatusBadRequest)
575 for _, data := range etagHashForWeb {
576 etagHash.Write([]byte(data))
578 etagHash.Write([]byte("ENTRY"))
579 etagHash.Write(commit.Hash[:])
580 atomCommentsURL := strings.Join([]string{
581 cfg.AtomBaseURL, cfg.URLPrefix, "/",
582 commit.Hash.String(), "/", AtomCommentsFeed,
584 commentsRaw := sgblog.GetNote(repo, commentsTree, commit.Hash)
585 etagHash.Write(commentsRaw)
586 topicsRaw := sgblog.GetNote(repo, topicsTree, commit.Hash)
587 etagHash.Write(topicsRaw)
588 if strings.HasSuffix(pathInfo, AtomCommentsFeed) {
589 etagHash.Write([]byte("ATOM COMMENTS"))
591 type Comment struct {
597 commentsRaw := sgblog.ParseComments(commentsRaw)
599 if len(commentsRaw) > PageEntries {
600 toSkip = len(commentsRaw) - PageEntries
602 comments := make([]Comment, 0, len(commentsRaw)-toSkip)
603 for i := len(commentsRaw) - 1; i >= toSkip; i-- {
604 lines := strings.Split(commentsRaw[i], "\n")
605 from := strings.TrimPrefix(lines[0], "From: ")
606 date := strings.TrimPrefix(lines[1], "Date: ")
607 comments = append(comments, Comment{
610 date: strings.Replace(date, " ", "T", 1),
614 idHasher, err := blake2b.New256(nil)
618 idHasher.Write([]byte("ATOM COMMENTS"))
619 idHasher.Write(commit.Hash[:])
621 Title: fmt.Sprintf("\"%s\" comments", msgSplit(commit.Message)[0]),
622 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
623 Link: []atom.Link{{Rel: "self", Href: atomCommentsURL}},
624 Author: &atom.Person{Name: cfg.AtomAuthor},
626 if len(comments) > 0 {
627 feed.Updated = atom.TimeStr(comments[0].date)
629 feed.Updated = atom.Time(commit.Author.When)
631 for _, comment := range comments {
633 idHasher.Write([]byte("COMMENT"))
634 idHasher.Write(commit.Hash[:])
635 idHasher.Write([]byte(comment.n))
636 htmlized := make([]string, 0, len(comment.body))
637 htmlized = append(htmlized, "<pre>")
638 for _, l := range comment.body {
641 lineURLize(cfg.AtomBaseURL+cfg.URLPrefix, l),
644 htmlized = append(htmlized, "</pre>")
645 feed.Entry = append(feed.Entry, &atom.Entry{
646 Title: fmt.Sprintf("Comment %s by %s", comment.n, comment.from),
647 Author: &atom.Person{Name: comment.from},
648 ID: "urn:uuid:" + bytes2uuid(idHasher.Sum(nil)),
651 Href: strings.Join([]string{
654 commit.Hash.String(),
655 "#comment", comment.n,
658 Published: atom.TimeStr(
659 strings.Replace(comment.date, " ", "T", -1),
661 Updated: atom.TimeStr(
662 strings.Replace(comment.date, " ", "T", -1),
666 Body: strings.Join(htmlized, "\n"),
670 data, err := xml.MarshalIndent(&feed, "", " ")
672 makeErr(err, http.StatusInternalServerError)
677 notesRaw := sgblog.GetNote(repo, notesTree, commit.Hash)
678 etagHash.Write(notesRaw)
681 lines := msgSplit(commit.Message)
683 when := commit.Author.When.Format(sgblog.WhenFmt)
685 if len(commit.ParentHashes) > 0 {
686 parent = commit.ParentHashes[0].String()
688 commentsParsed := sgblog.ParseComments(commentsRaw)
689 comments := make([]CommentEntry, 0, len(commentsParsed))
690 for _, comment := range commentsParsed {
691 lines := strings.Split(comment, "\n")
692 comments = append(comments, CommentEntry{lines[:3], lines[3:]})
694 var notesLines []string
695 if len(notesRaw) > 0 {
696 notesLines = strings.Split(string(notesRaw), "\n")
699 os.Stdout.Write([]byte(startHeader(etagHash, gzipWriter != nil)))
700 err = TmplHTMLEntry.Execute(out, struct {
706 AtomCommentsURL string
708 Commit *object.Commit
711 Comments []CommentEntry
714 Version: sgblog.Version,
717 TitleEscaped: url.PathEscape(fmt.Sprintf("Re: %s (%s)", title, commit.Hash)),
719 AtomCommentsURL: atomCommentsURL,
723 NoteLines: notesLines,
725 Topics: sgblog.ParseTopics(topicsRaw),
728 makeErr(err, http.StatusInternalServerError)
731 makeErr(errors.New("unknown URL action"), http.StatusNotFound)
733 out.Write([]byte("</body></html>\n"))
734 if gzipWriter != nil {
737 os.Stdout.Write(outBuf.Bytes())
741 os.Stdout.WriteString("Content-Type: application/atom+xml; charset=UTF-8\n")
742 os.Stdout.WriteString("ETag: " + etagString(etagHash) + "\n")
743 if gzipWriter != nil {
744 os.Stdout.WriteString("Content-Encoding: gzip\n")
747 os.Stdout.WriteString("\n")
748 os.Stdout.Write(outBuf.Bytes())